{
  "cells": [
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "EPU_V8M5aGiK"
      },
      "source": [
        "# 10-cifar"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 1,
      "metadata": {
        "ExecuteTime": {
          "end_time": "2025-06-26T01:43:32.363026Z",
          "start_time": "2025-06-26T01:43:29.447990Z"
        },
        "id": "We_D_UbfaGiM"
      },
      "outputs": [],
      "source": [
        "import torch\n",
        "import torchvision\n",
        "import numpy as np\n",
        "import matplotlib.pyplot as plt\n",
        "from torchvision import datasets, transforms\n",
        "from deeplearning_train import EarlyStopping, ModelSaver,train_classification_model,plot_learning_curves\n",
        "from deeplearning_train import evaluate_classification_model as evaluate_model\n",
        "import torchvision.models as models\n",
        "import torch.nn as nn"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 2,
      "metadata": {
        "id": "D7lUvbSNaGiQ"
      },
      "outputs": [],
      "source": [
        "# # 理解AdaptiveAvgPool2d\n",
        "# # 创建一个随机输入张量，模拟特征图\n",
        "# # 形状为 [1, 3, 6, 6]，表示1个样本，3个通道，6x6的特征图\n",
        "# input_tensor = torch.randn(1, 3, 6, 6)\n",
        "# print(\"输入张量的形状:\", input_tensor.shape)\n",
        "\n",
        "# # 创建 AdaptiveAvgPool2d 层，指定输出大小为 2x2，\n",
        "# adaptive_pool = nn.AdaptiveAvgPool2d(output_size=(2, 2))  # 创建一个自适应平均池化层，将输入特征图池化为2x2的输出\n",
        "\n",
        "# # 对输入张量进行自适应平均池化\n",
        "# output_tensor = adaptive_pool(input_tensor) # 对输入张量进行自适应平均池化，输出形状为[1,3,2,2]\n",
        "# print(\"输出张量的形状:\", output_tensor.shape)\n"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 3,
      "metadata": {
        "id": "e-MNsvcJaGiR"
      },
      "outputs": [],
      "source": [
        "import json\n",
        "token = {\"username\":\"zhangyudataset\",\"key\":\"6ae9a985be19950353520e31297702b4\"}\n",
        "with open('/content/kaggle.json', 'w') as file:\n",
        "  json.dump(token, file)  # json.dump类似于write，直接把字典类型数据变为字符串写入文件\n"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 4,
      "metadata": {
        "id": "KhuXgNFMaGiR",
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "outputId": "5ad96970-fb5c-49f2-854d-b6e81b7d9672"
      },
      "outputs": [
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "- path is now set to: /content\n"
          ]
        }
      ],
      "source": [
        "!mkdir -p ~/.kaggle\n",
        "!cp /content/kaggle.json ~/.kaggle/\n",
        "!chmod 600 ~/.kaggle/kaggle.json\n",
        "!kaggle config set -n path -v /content"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 5,
      "metadata": {
        "id": "oWc-GrCraGiR",
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "outputId": "07837fb0-01bf-46a1-f046-65f133bd763e"
      },
      "outputs": [
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "Downloading cifar-10.zip to /content/competitions/cifar-10\n",
            " 99% 708M/715M [00:11<00:00, 64.7MB/s]\n",
            "100% 715M/715M [00:11<00:00, 68.0MB/s]\n"
          ]
        }
      ],
      "source": [
        "# 需要先参加比赛才能下载数据集\n",
        "!kaggle competitions download -c cifar-10"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 6,
      "metadata": {
        "id": "qPJdBpjXaGiR",
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "outputId": "89bfca40-fe35-4f0d-8cb7-58d04fb94e50"
      },
      "outputs": [
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "Archive:  /content/competitions/cifar-10/cifar-10.zip\n",
            "  inflating: sampleSubmission.csv    \n",
            "  inflating: test.7z                 \n",
            "  inflating: train.7z                \n",
            "  inflating: trainLabels.csv         \n"
          ]
        }
      ],
      "source": [
        "!unzip /content/competitions/cifar-10/cifar-10.zip"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 7,
      "metadata": {
        "id": "QJbEjrVHaGiR",
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "outputId": "6ea6ebc1-8712-4386-9c8c-9da7b8720b33"
      },
      "outputs": [
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "Collecting py7zr\n",
            "  Downloading py7zr-1.0.0-py3-none-any.whl.metadata (17 kB)\n",
            "Collecting texttable (from py7zr)\n",
            "  Downloading texttable-1.7.0-py2.py3-none-any.whl.metadata (9.8 kB)\n",
            "Requirement already satisfied: pycryptodomex>=3.20.0 in /usr/local/lib/python3.11/dist-packages (from py7zr) (3.23.0)\n",
            "Collecting brotli>=1.1.0 (from py7zr)\n",
            "  Downloading Brotli-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (5.5 kB)\n",
            "Requirement already satisfied: psutil in /usr/local/lib/python3.11/dist-packages (from py7zr) (5.9.5)\n",
            "Collecting pyzstd>=0.16.1 (from py7zr)\n",
            "  Downloading pyzstd-0.17.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (2.5 kB)\n",
            "Collecting pyppmd<1.3.0,>=1.1.0 (from py7zr)\n",
            "  Downloading pyppmd-1.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (5.4 kB)\n",
            "Collecting pybcj<1.1.0,>=1.0.0 (from py7zr)\n",
            "  Downloading pybcj-1.0.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (3.7 kB)\n",
            "Collecting multivolumefile>=0.2.3 (from py7zr)\n",
            "  Downloading multivolumefile-0.2.3-py3-none-any.whl.metadata (6.3 kB)\n",
            "Collecting inflate64<1.1.0,>=1.0.0 (from py7zr)\n",
            "  Downloading inflate64-1.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (4.4 kB)\n",
            "Requirement already satisfied: typing-extensions>=4.13.2 in /usr/local/lib/python3.11/dist-packages (from pyzstd>=0.16.1->py7zr) (4.14.0)\n",
            "Downloading py7zr-1.0.0-py3-none-any.whl (69 kB)\n",
            "\u001b[2K   \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m69.7/69.7 kB\u001b[0m \u001b[31m3.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
            "\u001b[?25hDownloading Brotli-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (2.9 MB)\n",
            "\u001b[2K   \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m2.9/2.9 MB\u001b[0m \u001b[31m38.4 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
            "\u001b[?25hDownloading inflate64-1.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (96 kB)\n",
            "\u001b[2K   \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m96.4/96.4 kB\u001b[0m \u001b[31m7.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
            "\u001b[?25hDownloading multivolumefile-0.2.3-py3-none-any.whl (17 kB)\n",
            "Downloading pybcj-1.0.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (50 kB)\n",
            "\u001b[2K   \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m50.7/50.7 kB\u001b[0m \u001b[31m3.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
            "\u001b[?25hDownloading pyppmd-1.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (141 kB)\n",
            "\u001b[2K   \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m141.3/141.3 kB\u001b[0m \u001b[31m10.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
            "\u001b[?25hDownloading pyzstd-0.17.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (412 kB)\n",
            "\u001b[2K   \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m412.9/412.9 kB\u001b[0m \u001b[31m23.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
            "\u001b[?25hDownloading texttable-1.7.0-py2.py3-none-any.whl (10 kB)\n",
            "Installing collected packages: texttable, brotli, pyzstd, pyppmd, pybcj, multivolumefile, inflate64, py7zr\n",
            "Successfully installed brotli-1.1.0 inflate64-1.0.3 multivolumefile-0.2.3 py7zr-1.0.0 pybcj-1.0.6 pyppmd-1.2.0 pyzstd-0.17.0 texttable-1.7.0\n"
          ]
        }
      ],
      "source": [
        "# 安装py7zr库，用于解压7z格式的压缩包\n",
        "%pip install py7zr  # 在Jupyter环境下安装py7zr库\n",
        "\n",
        "# 导入py7zr库\n",
        "import py7zr  # 导入py7zr模块以便后续解压操作\n",
        "\n",
        "# 创建一个SevenZipFile对象，打开'./train.7z'文件，模式为只读\n",
        "a = py7zr.SevenZipFile(r'./train.7z', 'r')  # 用于读取7z压缩包\n",
        "\n",
        "# 将压缩包中的内容全部解压到指定目录'./competitions/cifar-10/'\n",
        "a.extractall(path=r'./competitions/cifar-10/')  # 解压所有文件到目标文件夹\n",
        "\n",
        "# 关闭SevenZipFile对象，释放资源\n",
        "a.close()  # 关闭文件，完成解压流程"
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "1FQvU7owaGiT"
      },
      "source": [
        "# 把数据集划分为训练集55000和验证集5000，并给DataLoader"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 8,
      "metadata": {
        "ExecuteTime": {
          "end_time": "2025-06-26T01:43:33.144223Z",
          "start_time": "2025-06-26T01:43:33.135368Z"
        },
        "id": "TBDumxASaGiT",
        "outputId": "44547b57-101a-431c-81e0-e86f9f7869e6",
        "colab": {
          "base_uri": "https://localhost:8080/"
        }
      },
      "outputs": [
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "完整数据集大小: 50000\n",
            "训练集大小: 45000\n",
            "验证集大小: 5000\n"
          ]
        }
      ],
      "source": [
        "# 加载CIFAR-10数据集相关库\n",
        "import os  # 导入os模块用于文件路径操作\n",
        "import pandas as pd  # 导入pandas用于读取csv文件\n",
        "from PIL import Image  # 导入PIL库用于图片处理\n",
        "from torch.utils.data import Dataset  # 导入PyTorch的数据集基类\n",
        "\n",
        "# 定义CIFAR-10数据集类，继承自Dataset\n",
        "class CIFAR10Dataset(Dataset):\n",
        "    def __init__(self, img_dir, labels_file, transform=None):  # 构造函数，接收图片文件夹、标签文件和预处理方法\n",
        "        self.img_dir = img_dir  # 保存图片文件夹路径\n",
        "        self.transform = transform  # 保存预处理方法\n",
        "\n",
        "        # 读取标签文件，read_csv默认第一行为列名\n",
        "        self.labels_df = pd.read_csv(labels_file)  # 读取csv标签文件\n",
        "        self.img_names = self.labels_df.iloc[:, 0].values.astype(str)  # 获取第一列图片名，转为字符串数组\n",
        "\n",
        "        # 定义类别名称到数字的映射字典\n",
        "        self.class_names_dict = {'airplane': 0, 'automobile': 1, 'bird': 2, 'cat': 3,\n",
        "                                 'deer': 4, 'dog': 5, 'frog': 6, 'horse': 7, 'ship': 8, 'truck': 9}  # 类别映射字典\n",
        "        # 将文本标签转换为数字ID\n",
        "        self.labels = [self.class_names_dict[label] for label in self.labels_df.iloc[:, 1].values]  # 标签转数字\n",
        "\n",
        "    def __len__(self):  # 返回数据集大小\n",
        "        return len(self.labels)  # 返回标签数量\n",
        "\n",
        "    def __getitem__(self, idx):  # 获取指定索引的数据\n",
        "        img_path = os.path.join(self.img_dir, self.img_names[idx] + '.png')  # 拼接图片路径\n",
        "        image = Image.open(img_path)  # 打开图片\n",
        "        label = self.labels[idx]  # 获取标签\n",
        "\n",
        "        if self.transform:  # 如果有预处理\n",
        "            image_tensor = self.transform(image)  # 对图片做预处理\n",
        "\n",
        "        return image_tensor, label  # 返回图片张量和标签\n",
        "\n",
        "# 定义数据预处理流程\n",
        "transform = transforms.Compose([  # 使用Compose组合多种预处理\n",
        "    transforms.ToTensor(),  # 转为张量\n",
        "    transforms.Normalize((0.4917, 0.4823, 0.4467), (0.2024, 0.1995, 0.2010))  # 标准化\n",
        "])\n",
        "\n",
        "# 加载CIFAR-10数据集\n",
        "img_dir = r\"competitions/cifar-10/train\"  # 图片文件夹路径\n",
        "labels_file = r\"./trainLabels.csv\"  # 标签文件路径\n",
        "full_dataset = CIFAR10Dataset(img_dir=img_dir, labels_file=labels_file, transform=transform)  # 创建完整数据集对象\n",
        "\n",
        "# 定义类别名称列表\n",
        "class_names = ['airplane', 'automobile', 'bird', 'cat', 'deer', 'dog', 'frog', 'horse', 'ship', 'truck']  # 类别名称\n",
        "\n",
        "# 划分训练集和验证集\n",
        "train_size = 45000  # 训练集大小\n",
        "val_size = 5000  # 验证集大小\n",
        "generator = torch.Generator().manual_seed(42)  # 固定随机种子，保证可复现\n",
        "train_dataset, val_dataset = torch.utils.data.random_split(  # 随机划分数据集\n",
        "    full_dataset,  # 完整数据集\n",
        "    [train_size, val_size],  # 划分比例\n",
        "    generator=generator  # 随机数生成器\n",
        ")\n",
        "\n",
        "# 查看数据集基本信息\n",
        "print(f\"完整数据集大小: {len(full_dataset)}\")  # 打印完整数据集大小\n",
        "print(f\"训练集大小: {len(train_dataset)}\")  # 打印训练集大小\n",
        "print(f\"验证集大小: {len(val_dataset)}\")  # 打印验证集大小\n"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 9,
      "metadata": {
        "ExecuteTime": {
          "end_time": "2025-06-26T01:43:33.148120Z",
          "start_time": "2025-06-26T01:43:33.145230Z"
        },
        "id": "dwLmD6WyaGiU"
      },
      "outputs": [],
      "source": [
        "def cal_mean_std(ds):\n",
        "    mean = 0.\n",
        "    std = 0.\n",
        "    for img, _ in ds:\n",
        "        mean += img.mean(dim=(1, 2)) #dim=(1, 2)表示在通道维度上求平均\n",
        "        std += img.std(dim=(1, 2))  #dim=(1, 2)表示在通道维度上求标准差\n",
        "    mean /= len(ds)\n",
        "    std /= len(ds)\n",
        "    return mean, std\n",
        "# cal_mean_std(train_dataset)"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 10,
      "metadata": {
        "id": "gytNKcXdaGiU"
      },
      "outputs": [],
      "source": [
        "# 将划分好的45000训练集和5000验证集给DataLoader\n",
        "# 创建数据加载器\n",
        "batch_size = 64\n",
        "train_loader = torch.utils.data.DataLoader(\n",
        "    train_dataset,\n",
        "    batch_size=batch_size,\n",
        "    shuffle=True #打乱数据集，每次迭代时，数据集的顺序都会被打乱\n",
        ")\n",
        "\n",
        "val_loader = torch.utils.data.DataLoader(\n",
        "    val_dataset,\n",
        "    batch_size=batch_size,\n",
        "    shuffle=False\n",
        ")\n"
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "fllOhr1daGiU"
      },
      "source": [
        "# 复现简单版vgg11"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 11,
      "metadata": {
        "ExecuteTime": {
          "end_time": "2025-06-26T01:43:33.152657Z",
          "start_time": "2025-06-26T01:43:33.148120Z"
        },
        "id": "cu8KLUamaGiU"
      },
      "outputs": [],
      "source": [
        "import torch.nn as nn  # 导入PyTorch的神经网络模块\n",
        "import torch.nn.functional as F  # 导入PyTorch的函数式API\n",
        "\n",
        "class VGG11_CIFAR10(nn.Module):  # 定义VGG11结构的模型，适用于CIFAR10\n",
        "    def __init__(self):\n",
        "        super().__init__()  # 调用父类初始化方法\n",
        "\n",
        "        # nn.Sequential的作用是将多个层组合成一个有序的容器，按顺序依次执行每一层\n",
        "        # 这样可以让代码更加简洁，方便管理和调用一组网络层\n",
        "        # 例如：nn.Sequential(卷积层, 批归一化, 激活函数, 池化层)会依次执行这些操作\n",
        "\n",
        "        # 第一组卷积层，输入3通道，输出64通道\n",
        "        self.conv1 = nn.Sequential(\n",
        "            nn.Conv2d(3, 64, kernel_size=3, padding=1),  # 卷积层，3->64\n",
        "            nn.BatchNorm2d(64),  # 批归一化\n",
        "            nn.ReLU(inplace=True),  # 激活函数\n",
        "            nn.MaxPool2d(kernel_size=2, stride=2)  # 最大池化\n",
        "        )\n",
        "\n",
        "        # 第二组卷积层，输入64通道，输出128通道\n",
        "        self.conv2 = nn.Sequential(\n",
        "            nn.Conv2d(64, 128, kernel_size=3, padding=1),  # 卷积层，64->128\n",
        "            nn.BatchNorm2d(128),  # 批归一化\n",
        "            nn.ReLU(inplace=True),  # 激活函数\n",
        "            nn.MaxPool2d(kernel_size=2, stride=2)  # 最大池化\n",
        "        )\n",
        "\n",
        "        # 第三组卷积层，输入128通道，输出256通道\n",
        "        self.conv3 = nn.Sequential(\n",
        "            nn.Conv2d(128, 256, kernel_size=3, padding=1),  # 卷积层，128->256\n",
        "            nn.BatchNorm2d(256),  # 批归一化\n",
        "            nn.ReLU(inplace=True),  # 激活函数\n",
        "            nn.Conv2d(256, 256, kernel_size=3, padding=1),  # 卷积层，256->256\n",
        "            nn.BatchNorm2d(256),  # 批归一化\n",
        "            nn.ReLU(inplace=True),  # 激活函数\n",
        "            nn.MaxPool2d(kernel_size=2, stride=2)  # 最大池化\n",
        "        )\n",
        "\n",
        "        # 第四组卷积层，输入256通道，输出512通道\n",
        "        self.conv4 = nn.Sequential(\n",
        "            nn.Conv2d(256, 512, kernel_size=3, padding=1),  # 卷积层，256->512\n",
        "            nn.BatchNorm2d(512),  # 批归一化\n",
        "            nn.ReLU(inplace=True),  # 激活函数\n",
        "            nn.Conv2d(512, 512, kernel_size=3, padding=1),  # 卷积层，512->512\n",
        "            nn.BatchNorm2d(512),  # 批归一化\n",
        "            nn.ReLU(inplace=True),  # 激活函数\n",
        "            nn.MaxPool2d(kernel_size=2, stride=2)  # 最大池化\n",
        "        )\n",
        "\n",
        "        # 第五组卷积层，输入512通道，输出512通道\n",
        "        self.conv5 = nn.Sequential(\n",
        "            nn.Conv2d(512, 512, kernel_size=3, padding=1),  # 卷积层，512->512\n",
        "            nn.BatchNorm2d(512),  # 批归一化\n",
        "            nn.ReLU(inplace=True),  # 激活函数\n",
        "            nn.Conv2d(512, 512, kernel_size=3, padding=1),  # 卷积层，512->512\n",
        "            nn.BatchNorm2d(512),  # 批归一化\n",
        "            nn.ReLU(inplace=True),  # 激活函数\n",
        "            nn.MaxPool2d(kernel_size=2, stride=2)  # 最大池化\n",
        "        )\n",
        "\n",
        "        # 计算展平后的特征维度\n",
        "        # CIFAR10输入32x32，经过5次2x2池化后为1x1\n",
        "        self.classifier = nn.Sequential(\n",
        "            nn.Linear(512 * 1 * 1, 4096),  # 全连接层，输入512，输出4096\n",
        "            nn.ReLU(inplace=True),  # 激活函数\n",
        "            nn.Dropout(0.5),  # Dropout防止过拟合\n",
        "            nn.Linear(4096, 4096),  # 全连接层，4096->4096\n",
        "            nn.ReLU(inplace=True),  # 激活函数\n",
        "            nn.Dropout(0.5),  # Dropout防止过拟合\n",
        "            nn.Linear(4096, 10)  # 输出层，10分类\n",
        "        )\n",
        "\n",
        "        self._initialize_weights()  # 初始化权重\n",
        "\n",
        "    def _initialize_weights(self):\n",
        "        # 使用xavier初始化卷积层和全连接层权重\n",
        "        for m in self.modules():\n",
        "            if isinstance(m, nn.Conv2d):  # 如果是卷积层\n",
        "                nn.init.xavier_uniform_(m.weight)  # xavier均匀初始化\n",
        "                if m.bias is not None:  # 如果有偏置\n",
        "                    nn.init.zeros_(m.bias)  # 偏置初始化为0\n",
        "            elif isinstance(m, nn.Linear):  # 如果是全连接层\n",
        "                nn.init.xavier_uniform_(m.weight)  # xavier均匀初始化\n",
        "                if m.bias is not None:  # 如果有偏置\n",
        "                    nn.init.zeros_(m.bias)  # 偏置初始化为0\n",
        "\n",
        "    def forward(self, x):\n",
        "        x = self.conv1(x)  # 第一组卷积\n",
        "        x = self.conv2(x)  # 第二组卷积\n",
        "        x = self.conv3(x)  # 第三组卷积\n",
        "        x = self.conv4(x)  # 第四组卷积\n",
        "        x = self.conv5(x)  # 第五组卷积\n",
        "        x = x.view(x.size(0), -1)  # 展平成一维\n",
        "        x = self.classifier(x)  # 全连接分类器\n",
        "        return x  # 返回输出\n"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 12,
      "metadata": {
        "ExecuteTime": {
          "end_time": "2025-06-26T01:43:33.185031Z",
          "start_time": "2025-06-26T01:43:33.152657Z"
        },
        "id": "87txlJ0AaGiV",
        "outputId": "276ab0bc-a845-4fa9-b44d-6778f34b720a",
        "colab": {
          "base_uri": "https://localhost:8080/"
        }
      },
      "outputs": [
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "批次图像形状: torch.Size([64, 3, 32, 32])\n",
            "批次标签形状: torch.Size([64])\n",
            "----------------------------------------------------------------------------------------------------\n",
            "torch.Size([64, 10])\n"
          ]
        }
      ],
      "source": [
        "# 实例化模型\n",
        "model = VGG11_CIFAR10()\n",
        "\n",
        "# 从train_loader获取第一个批次的数据\n",
        "dataiter = iter(train_loader)\n",
        "images, labels = next(dataiter)\n",
        "\n",
        "# 查看批次数据的形状\n",
        "print(\"批次图像形状:\", images.shape)\n",
        "print(\"批次标签形状:\", labels.shape)\n",
        "\n",
        "\n",
        "print('-'*100)\n",
        "# 进行前向传播\n",
        "with torch.no_grad():  # 不需要计算梯度\n",
        "    outputs = model(images)\n",
        "\n",
        "\n",
        "print(outputs.shape)\n"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 13,
      "metadata": {
        "ExecuteTime": {
          "end_time": "2025-06-26T01:43:33.203053Z",
          "start_time": "2025-06-26T01:43:33.199532Z"
        },
        "id": "C8o7E58BaGiV",
        "outputId": "f8ea1fc2-5840-446a-9da7-a4362a2ceca5",
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "collapsed": true
      },
      "outputs": [
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "需要求梯度的参数总量: 28149514\n",
            "模型总参数量: 28149514\n",
            "\n",
            "各层参数量明细:\n",
            "conv1.0.weight: 1728 参数\n",
            "conv1.0.bias: 64 参数\n",
            "conv1.1.weight: 64 参数\n",
            "conv1.1.bias: 64 参数\n",
            "conv2.0.weight: 73728 参数\n",
            "conv2.0.bias: 128 参数\n",
            "conv2.1.weight: 128 参数\n",
            "conv2.1.bias: 128 参数\n",
            "conv3.0.weight: 294912 参数\n",
            "conv3.0.bias: 256 参数\n",
            "conv3.1.weight: 256 参数\n",
            "conv3.1.bias: 256 参数\n",
            "conv3.3.weight: 589824 参数\n",
            "conv3.3.bias: 256 参数\n",
            "conv3.4.weight: 256 参数\n",
            "conv3.4.bias: 256 参数\n",
            "conv4.0.weight: 1179648 参数\n",
            "conv4.0.bias: 512 参数\n",
            "conv4.1.weight: 512 参数\n",
            "conv4.1.bias: 512 参数\n",
            "conv4.3.weight: 2359296 参数\n",
            "conv4.3.bias: 512 参数\n",
            "conv4.4.weight: 512 参数\n",
            "conv4.4.bias: 512 参数\n",
            "conv5.0.weight: 2359296 参数\n",
            "conv5.0.bias: 512 参数\n",
            "conv5.1.weight: 512 参数\n",
            "conv5.1.bias: 512 参数\n",
            "conv5.3.weight: 2359296 参数\n",
            "conv5.3.bias: 512 参数\n",
            "conv5.4.weight: 512 参数\n",
            "conv5.4.bias: 512 参数\n",
            "classifier.0.weight: 2097152 参数\n",
            "classifier.0.bias: 4096 参数\n",
            "classifier.3.weight: 16777216 参数\n",
            "classifier.3.bias: 4096 参数\n",
            "classifier.6.weight: 40960 参数\n",
            "classifier.6.bias: 10 参数\n"
          ]
        }
      ],
      "source": [
        "# 计算模型的总参数量\n",
        "# 统计需要求梯度的参数总量\n",
        "total_params = sum(p.numel() for p in model.parameters() if p.requires_grad)\n",
        "print(f\"需要求梯度的参数总量: {total_params}\")\n",
        "\n",
        "# 统计所有参数总量\n",
        "all_params = sum(p.numel() for p in model.parameters())\n",
        "print(f\"模型总参数量: {all_params}\")\n",
        "\n",
        "# 查看每层参数量明细\n",
        "print(\"\\n各层参数量明细:\")\n",
        "for name, param in model.named_parameters():\n",
        "    print(f\"{name}: {param.numel()} 参数\")\n"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 14,
      "metadata": {
        "id": "T0J8iiHDaGiV",
        "outputId": "685e8376-0950-42f2-a9f3-9926a5b568d6",
        "colab": {
          "base_uri": "https://localhost:8080/"
        }
      },
      "outputs": [
        {
          "output_type": "execute_result",
          "data": {
            "text/plain": [
              "294912"
            ]
          },
          "metadata": {},
          "execution_count": 14
        }
      ],
      "source": [
        "128*3*3*256"
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "pRqqzwCUaGiV"
      },
      "source": [
        "# 各层参数量明细:\n",
        "conv1.weight: 288 参数 3*3*1*32\n",
        "conv1.bias: 32 参数\n",
        "conv2.weight: 9216 参数 3*3*32*32\n",
        "conv2.bias: 32 参数  \n",
        "conv3.weight: 18432 参数 3*3*32*64\n",
        "conv3.bias: 64 参数\n",
        "conv4.weight: 36864 参数  3*3*64*64\n",
        "conv4.bias: 64 参数\n",
        "conv5.weight: 73728 参数\n",
        "conv5.bias: 128 参数\n",
        "conv6.weight: 147456 参数\n",
        "conv6.bias: 128 参数\n",
        "fc1.weight: 294912 参数 128*3*3*256\n",
        "fc1.bias: 256 参数\n",
        "fc2.weight: 2560 参数\n",
        "fc2.bias: 10 参数"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 15,
      "metadata": {
        "ExecuteTime": {
          "end_time": "2025-06-26T01:43:33.217395Z",
          "start_time": "2025-06-26T01:43:33.203561Z"
        },
        "id": "u5fYYs4RaGiW",
        "outputId": "060310ca-92ea-4879-ed87-0340d360a703",
        "collapsed": true,
        "colab": {
          "base_uri": "https://localhost:8080/"
        }
      },
      "outputs": [
        {
          "output_type": "execute_result",
          "data": {
            "text/plain": [
              "OrderedDict([('conv1.0.weight',\n",
              "              tensor([[[[-0.0693, -0.0058, -0.0160],\n",
              "                        [-0.0442, -0.0657, -0.0296],\n",
              "                        [-0.0115,  0.0343, -0.0542]],\n",
              "              \n",
              "                       [[-0.0861, -0.0533,  0.0476],\n",
              "                        [ 0.0245, -0.0738,  0.0857],\n",
              "                        [ 0.0347,  0.0746,  0.0861]],\n",
              "              \n",
              "                       [[ 0.0801,  0.0073,  0.0088],\n",
              "                        [ 0.0126, -0.0483, -0.0541],\n",
              "                        [ 0.0897, -0.0943, -0.0262]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 0.0086,  0.0328, -0.0926],\n",
              "                        [-0.0438, -0.0941,  0.0288],\n",
              "                        [-0.0045,  0.0761, -0.0326]],\n",
              "              \n",
              "                       [[-0.0149, -0.0778,  0.0809],\n",
              "                        [-0.0183, -0.0387,  0.0592],\n",
              "                        [ 0.0643, -0.0397, -0.0363]],\n",
              "              \n",
              "                       [[-0.0365, -0.0391,  0.0186],\n",
              "                        [ 0.0212, -0.0265, -0.0991],\n",
              "                        [-0.0803,  0.0993, -0.0911]]],\n",
              "              \n",
              "              \n",
              "                      [[[-0.0742, -0.0062,  0.0736],\n",
              "                        [-0.0030, -0.0543, -0.0226],\n",
              "                        [ 0.0910,  0.0684, -0.0908]],\n",
              "              \n",
              "                       [[-0.0849,  0.0588,  0.0627],\n",
              "                        [ 0.0029, -0.0951,  0.0786],\n",
              "                        [-0.0858, -0.0513,  0.0898]],\n",
              "              \n",
              "                       [[-0.0062,  0.0948, -0.0733],\n",
              "                        [-0.0632, -0.0952, -0.0221],\n",
              "                        [-0.0284,  0.0395, -0.0520]]],\n",
              "              \n",
              "              \n",
              "                      ...,\n",
              "              \n",
              "              \n",
              "                      [[[ 0.0901, -0.0434, -0.0496],\n",
              "                        [-0.0898, -0.0278,  0.0292],\n",
              "                        [ 0.0909, -0.0901,  0.0391]],\n",
              "              \n",
              "                       [[-0.0943,  0.0503, -0.0917],\n",
              "                        [-0.0412, -0.0977,  0.0407],\n",
              "                        [ 0.0295, -0.0526, -0.0076]],\n",
              "              \n",
              "                       [[-0.0478, -0.0156, -0.0144],\n",
              "                        [ 0.0198,  0.0897, -0.0488],\n",
              "                        [-0.0930,  0.0476, -0.0004]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 0.0489,  0.0615, -0.0030],\n",
              "                        [-0.0531, -0.0975, -0.0014],\n",
              "                        [ 0.0899, -0.0474, -0.0125]],\n",
              "              \n",
              "                       [[-0.0792, -0.0896, -0.0201],\n",
              "                        [-0.0901,  0.0738,  0.0978],\n",
              "                        [ 0.0590, -0.0685,  0.0523]],\n",
              "              \n",
              "                       [[ 0.0679, -0.0939,  0.0448],\n",
              "                        [-0.0159,  0.0512,  0.0671],\n",
              "                        [ 0.0066, -0.0942, -0.0812]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 0.0537, -0.0625,  0.0833],\n",
              "                        [ 0.0506, -0.0287,  0.0133],\n",
              "                        [-0.0976,  0.0474, -0.0866]],\n",
              "              \n",
              "                       [[-0.0059, -0.0440,  0.0733],\n",
              "                        [-0.0737,  0.0516, -0.0253],\n",
              "                        [ 0.0724, -0.0231,  0.0263]],\n",
              "              \n",
              "                       [[-0.0622,  0.0009, -0.0306],\n",
              "                        [ 0.0777, -0.0082, -0.0709],\n",
              "                        [-0.0825,  0.0091, -0.0265]]]])),\n",
              "             ('conv1.0.bias',\n",
              "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.])),\n",
              "             ('conv1.1.weight',\n",
              "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1.])),\n",
              "             ('conv1.1.bias',\n",
              "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.])),\n",
              "             ('conv1.1.running_mean',\n",
              "              tensor([-2.3274e-04, -1.4844e-03, -1.4400e-03, -9.8876e-05,  2.9185e-03,\n",
              "                      -1.3086e-03, -2.3754e-03,  1.9371e-03, -6.0460e-04,  4.3198e-04,\n",
              "                       1.5147e-03,  1.8800e-03,  1.9238e-03, -7.2353e-04,  5.4567e-05,\n",
              "                      -2.4093e-03, -1.5727e-03, -4.4082e-04,  2.0637e-03, -7.7941e-04,\n",
              "                      -8.7657e-04, -5.8293e-04, -3.1361e-04, -6.2441e-04,  8.1948e-04,\n",
              "                       4.2904e-04,  2.8580e-03, -2.7672e-04,  3.2294e-04,  1.8616e-03,\n",
              "                      -4.5145e-04,  2.6687e-04,  1.1822e-03, -7.2869e-04, -2.5656e-03,\n",
              "                       9.0086e-04,  2.1580e-03, -2.4748e-04,  4.5803e-04, -7.0906e-04,\n",
              "                       1.1520e-03,  9.0643e-04,  2.0603e-03,  6.3267e-04, -7.9993e-04,\n",
              "                       7.4006e-04,  7.9624e-04, -2.7847e-03,  2.2547e-04,  2.3495e-04,\n",
              "                      -1.4154e-03, -1.0610e-03, -2.3052e-03, -5.7984e-04, -3.7209e-04,\n",
              "                      -8.7150e-04, -6.6358e-04, -2.1810e-03,  1.7014e-04,  7.3054e-04,\n",
              "                      -2.9591e-03,  1.5117e-04, -3.2756e-05, -1.3517e-03])),\n",
              "             ('conv1.1.running_var',\n",
              "              tensor([0.9054, 0.9176, 0.9106, 0.9059, 0.9353, 0.9193, 0.9111, 0.9031, 0.9077,\n",
              "                      0.9028, 0.9032, 0.9071, 0.9100, 0.9133, 0.9043, 0.9183, 0.9217, 0.9036,\n",
              "                      0.9110, 0.9257, 0.9058, 0.9092, 0.9075, 0.9050, 0.9064, 0.9034, 0.9356,\n",
              "                      0.9421, 0.9029, 0.9128, 0.9081, 0.9031, 0.9081, 0.9139, 0.9178, 0.9045,\n",
              "                      0.9061, 0.9037, 0.9106, 0.9059, 0.9080, 0.9461, 0.9252, 0.9039, 0.9089,\n",
              "                      0.9038, 0.9092, 0.9080, 0.9053, 0.9078, 0.9023, 0.9090, 0.9184, 0.9035,\n",
              "                      0.9356, 0.9053, 0.9096, 0.9255, 0.9049, 0.9008, 0.9319, 0.9180, 0.9034,\n",
              "                      0.9044])),\n",
              "             ('conv1.1.num_batches_tracked', tensor(1)),\n",
              "             ('conv2.0.weight',\n",
              "              tensor([[[[ 0.0296, -0.0568,  0.0503],\n",
              "                        [-0.0353, -0.0483, -0.0151],\n",
              "                        [-0.0234, -0.0518,  0.0462]],\n",
              "              \n",
              "                       [[ 0.0511,  0.0009,  0.0326],\n",
              "                        [-0.0237, -0.0521,  0.0280],\n",
              "                        [ 0.0447,  0.0510,  0.0160]],\n",
              "              \n",
              "                       [[ 0.0299, -0.0074, -0.0482],\n",
              "                        [-0.0346, -0.0471, -0.0334],\n",
              "                        [ 0.0204, -0.0461,  0.0071]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 0.0235, -0.0321, -0.0012],\n",
              "                        [-0.0450, -0.0580,  0.0393],\n",
              "                        [-0.0315,  0.0005, -0.0547]],\n",
              "              \n",
              "                       [[-0.0029,  0.0420,  0.0348],\n",
              "                        [ 0.0542, -0.0527,  0.0527],\n",
              "                        [ 0.0120,  0.0166, -0.0558]],\n",
              "              \n",
              "                       [[ 0.0211, -0.0008,  0.0297],\n",
              "                        [ 0.0472, -0.0090,  0.0462],\n",
              "                        [ 0.0556, -0.0208, -0.0425]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 0.0221, -0.0291, -0.0136],\n",
              "                        [ 0.0473, -0.0311,  0.0354],\n",
              "                        [ 0.0250,  0.0474, -0.0203]],\n",
              "              \n",
              "                       [[-0.0076,  0.0540,  0.0435],\n",
              "                        [ 0.0319,  0.0151,  0.0520],\n",
              "                        [ 0.0038, -0.0570,  0.0402]],\n",
              "              \n",
              "                       [[ 0.0327, -0.0281,  0.0302],\n",
              "                        [-0.0154, -0.0254, -0.0078],\n",
              "                        [-0.0268,  0.0355,  0.0235]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 0.0501,  0.0370,  0.0273],\n",
              "                        [-0.0531, -0.0070,  0.0255],\n",
              "                        [ 0.0206,  0.0297, -0.0411]],\n",
              "              \n",
              "                       [[-0.0131, -0.0173,  0.0074],\n",
              "                        [-0.0252,  0.0009,  0.0384],\n",
              "                        [-0.0586, -0.0171,  0.0349]],\n",
              "              \n",
              "                       [[ 0.0476, -0.0357,  0.0383],\n",
              "                        [ 0.0020, -0.0334,  0.0499],\n",
              "                        [ 0.0503, -0.0350,  0.0470]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 0.0293,  0.0153, -0.0516],\n",
              "                        [-0.0083,  0.0244,  0.0278],\n",
              "                        [-0.0475, -0.0049,  0.0278]],\n",
              "              \n",
              "                       [[-0.0090, -0.0207, -0.0102],\n",
              "                        [ 0.0136, -0.0241,  0.0119],\n",
              "                        [-0.0327, -0.0098, -0.0425]],\n",
              "              \n",
              "                       [[ 0.0269,  0.0221, -0.0199],\n",
              "                        [ 0.0270,  0.0426,  0.0437],\n",
              "                        [-0.0376,  0.0489, -0.0576]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-0.0346,  0.0169, -0.0506],\n",
              "                        [-0.0550, -0.0107,  0.0586],\n",
              "                        [ 0.0150, -0.0487, -0.0338]],\n",
              "              \n",
              "                       [[-0.0059,  0.0264,  0.0449],\n",
              "                        [ 0.0528, -0.0078,  0.0060],\n",
              "                        [-0.0274, -0.0483, -0.0123]],\n",
              "              \n",
              "                       [[ 0.0551,  0.0046, -0.0506],\n",
              "                        [-0.0211, -0.0353, -0.0495],\n",
              "                        [-0.0197, -0.0012,  0.0242]]],\n",
              "              \n",
              "              \n",
              "                      ...,\n",
              "              \n",
              "              \n",
              "                      [[[-0.0558, -0.0573,  0.0407],\n",
              "                        [ 0.0444, -0.0037,  0.0445],\n",
              "                        [ 0.0092,  0.0205, -0.0040]],\n",
              "              \n",
              "                       [[ 0.0053, -0.0387, -0.0139],\n",
              "                        [ 0.0303, -0.0512, -0.0558],\n",
              "                        [ 0.0129,  0.0029, -0.0210]],\n",
              "              \n",
              "                       [[-0.0541,  0.0075, -0.0369],\n",
              "                        [-0.0044, -0.0492, -0.0587],\n",
              "                        [-0.0495, -0.0144,  0.0244]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 0.0083,  0.0063, -0.0458],\n",
              "                        [ 0.0024,  0.0040,  0.0025],\n",
              "                        [ 0.0090, -0.0188,  0.0370]],\n",
              "              \n",
              "                       [[-0.0471,  0.0540, -0.0389],\n",
              "                        [-0.0568,  0.0427,  0.0502],\n",
              "                        [-0.0141, -0.0254, -0.0146]],\n",
              "              \n",
              "                       [[-0.0443,  0.0390, -0.0398],\n",
              "                        [-0.0487, -0.0336, -0.0146],\n",
              "                        [ 0.0013, -0.0222, -0.0170]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 0.0346, -0.0058,  0.0066],\n",
              "                        [-0.0423, -0.0077,  0.0493],\n",
              "                        [-0.0577,  0.0522,  0.0116]],\n",
              "              \n",
              "                       [[ 0.0077,  0.0459,  0.0521],\n",
              "                        [-0.0126,  0.0567,  0.0550],\n",
              "                        [ 0.0514,  0.0295,  0.0095]],\n",
              "              \n",
              "                       [[ 0.0512,  0.0326,  0.0326],\n",
              "                        [ 0.0101, -0.0387,  0.0138],\n",
              "                        [ 0.0352,  0.0320, -0.0135]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-0.0527, -0.0010,  0.0389],\n",
              "                        [-0.0582,  0.0100, -0.0030],\n",
              "                        [-0.0440,  0.0398, -0.0420]],\n",
              "              \n",
              "                       [[ 0.0414, -0.0337, -0.0084],\n",
              "                        [-0.0071,  0.0332,  0.0356],\n",
              "                        [ 0.0150,  0.0156, -0.0475]],\n",
              "              \n",
              "                       [[ 0.0321, -0.0051,  0.0134],\n",
              "                        [ 0.0585, -0.0327, -0.0421],\n",
              "                        [-0.0335, -0.0423,  0.0390]]],\n",
              "              \n",
              "              \n",
              "                      [[[-0.0551,  0.0284, -0.0534],\n",
              "                        [-0.0418, -0.0142, -0.0579],\n",
              "                        [ 0.0523,  0.0365, -0.0127]],\n",
              "              \n",
              "                       [[-0.0516,  0.0190, -0.0145],\n",
              "                        [-0.0084, -0.0588,  0.0277],\n",
              "                        [ 0.0143, -0.0231, -0.0447]],\n",
              "              \n",
              "                       [[-0.0178, -0.0254, -0.0139],\n",
              "                        [ 0.0168,  0.0057, -0.0016],\n",
              "                        [ 0.0326,  0.0488,  0.0113]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-0.0038, -0.0224, -0.0508],\n",
              "                        [-0.0257, -0.0549,  0.0082],\n",
              "                        [-0.0139, -0.0334,  0.0464]],\n",
              "              \n",
              "                       [[ 0.0081, -0.0483, -0.0062],\n",
              "                        [ 0.0062,  0.0115,  0.0096],\n",
              "                        [-0.0374,  0.0017,  0.0280]],\n",
              "              \n",
              "                       [[-0.0377, -0.0511,  0.0046],\n",
              "                        [ 0.0245,  0.0073,  0.0003],\n",
              "                        [-0.0108,  0.0101,  0.0507]]]])),\n",
              "             ('conv2.0.bias',\n",
              "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0.])),\n",
              "             ('conv2.1.weight',\n",
              "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1.])),\n",
              "             ('conv2.1.bias',\n",
              "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0.])),\n",
              "             ('conv2.1.running_mean',\n",
              "              tensor([-0.0271,  0.0098, -0.0070,  0.0352, -0.0175, -0.0135,  0.0009,  0.0159,\n",
              "                      -0.0089,  0.0473,  0.0165, -0.0383, -0.0142, -0.0598, -0.0283, -0.0415,\n",
              "                      -0.0038, -0.0087,  0.0488, -0.0512, -0.0556, -0.0085, -0.0163, -0.0021,\n",
              "                       0.0152, -0.0091,  0.0350,  0.0243, -0.0175,  0.0044, -0.0176, -0.0602,\n",
              "                      -0.0136,  0.0225,  0.0175, -0.0132,  0.0227,  0.0230,  0.0010, -0.0155,\n",
              "                       0.0464,  0.0177,  0.0465,  0.0478,  0.0087, -0.0862, -0.0122, -0.0644,\n",
              "                      -0.0628,  0.0393,  0.0346,  0.0484,  0.0293, -0.0120,  0.0466, -0.0722,\n",
              "                      -0.0059, -0.1098, -0.0436,  0.0783,  0.0064, -0.0574,  0.0282,  0.0306,\n",
              "                       0.0223,  0.0258, -0.0127,  0.0937,  0.0198,  0.0012, -0.0124,  0.0307,\n",
              "                       0.0184,  0.0010,  0.0646,  0.0040, -0.0519,  0.0166,  0.0162,  0.0103,\n",
              "                      -0.0193, -0.0309,  0.0381, -0.0857,  0.0125,  0.0333,  0.0376,  0.0555,\n",
              "                      -0.0014,  0.0171, -0.0205,  0.0419, -0.0647, -0.0119, -0.0174, -0.0201,\n",
              "                      -0.0529, -0.0617,  0.0074,  0.0473, -0.0575, -0.0311, -0.0427, -0.0490,\n",
              "                      -0.0746,  0.0664, -0.0383,  0.0048,  0.0362,  0.0100,  0.0402, -0.0528,\n",
              "                       0.0077, -0.0248, -0.0275,  0.1005,  0.0145,  0.0015, -0.0330,  0.0121,\n",
              "                      -0.0452, -0.0197,  0.0190,  0.0713,  0.0602, -0.0336, -0.0109,  0.0015])),\n",
              "             ('conv2.1.running_var',\n",
              "              tensor([0.9297, 0.9175, 0.9358, 0.9384, 0.9265, 0.9297, 0.9556, 0.9151, 0.9334,\n",
              "                      0.9159, 0.9255, 0.9212, 0.9263, 0.9250, 0.9239, 0.9332, 0.9162, 0.9112,\n",
              "                      0.9239, 0.9415, 0.9187, 0.9290, 0.9426, 0.9124, 0.9231, 0.9125, 0.9209,\n",
              "                      0.9258, 0.9412, 0.9337, 0.9369, 0.9334, 0.9324, 0.9153, 0.9169, 0.9850,\n",
              "                      0.9592, 0.9304, 0.9192, 0.9160, 0.9326, 0.9167, 0.9239, 0.9209, 0.9295,\n",
              "                      0.9266, 0.9467, 0.9301, 0.9492, 0.9480, 0.9562, 0.9355, 0.9422, 0.9332,\n",
              "                      0.9297, 0.9258, 0.9194, 0.9369, 0.9204, 0.9404, 0.9264, 0.9468, 0.9244,\n",
              "                      0.9248, 0.9351, 0.9198, 0.9549, 0.9433, 1.0146, 0.9643, 0.9241, 0.9166,\n",
              "                      0.9726, 0.9125, 0.9242, 0.9320, 0.9386, 0.9109, 0.9149, 0.9237, 0.9186,\n",
              "                      0.9254, 0.9192, 0.9580, 0.9296, 0.9259, 0.9496, 0.9282, 0.9372, 0.9271,\n",
              "                      0.9129, 0.9285, 0.9275, 0.9454, 0.9475, 0.9337, 0.9203, 0.9320, 0.9160,\n",
              "                      0.9280, 0.9397, 0.9323, 0.9261, 0.9398, 0.9214, 0.9283, 0.9211, 0.9388,\n",
              "                      0.9179, 0.9262, 0.9502, 0.9139, 0.9376, 0.9161, 0.9328, 0.9313, 0.9353,\n",
              "                      0.9173, 0.9386, 0.9171, 0.9378, 0.9265, 0.9312, 0.9418, 0.9343, 0.9366,\n",
              "                      0.9331, 0.9521])),\n",
              "             ('conv2.1.num_batches_tracked', tensor(1)),\n",
              "             ('conv3.0.weight',\n",
              "              tensor([[[[ 3.8621e-02, -2.7824e-02,  2.2123e-02],\n",
              "                        [-3.2021e-02, -3.6062e-02, -2.5665e-02],\n",
              "                        [ 2.2287e-02,  4.0214e-02,  1.7857e-02]],\n",
              "              \n",
              "                       [[-1.0877e-02, -2.9280e-02,  1.5010e-02],\n",
              "                        [-3.9415e-02, -2.6740e-02,  1.3953e-02],\n",
              "                        [-3.5244e-02,  2.4062e-03, -2.7031e-04]],\n",
              "              \n",
              "                       [[ 2.9673e-02,  4.5099e-03,  2.0695e-02],\n",
              "                        [ 3.9045e-02,  3.9402e-02, -4.6587e-03],\n",
              "                        [ 3.7207e-02, -1.6442e-02,  1.4255e-02]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-4.0654e-02, -2.8997e-02, -2.2564e-02],\n",
              "                        [ 2.6576e-02,  3.8427e-02, -8.9441e-03],\n",
              "                        [ 4.0066e-02,  3.2432e-02, -2.3882e-02]],\n",
              "              \n",
              "                       [[ 2.7712e-02,  1.1711e-03,  2.9749e-02],\n",
              "                        [ 8.6428e-03,  2.6483e-02,  1.7212e-02],\n",
              "                        [-8.3408e-03, -1.7157e-02,  5.9925e-03]],\n",
              "              \n",
              "                       [[-1.2019e-02,  1.1690e-02, -3.6181e-02],\n",
              "                        [-1.3975e-02,  1.9243e-02, -3.6635e-02],\n",
              "                        [-3.4398e-02,  1.4173e-02, -4.0212e-02]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 2.7532e-02,  1.7848e-02, -2.0501e-02],\n",
              "                        [-2.8268e-02, -1.2601e-02,  2.2186e-02],\n",
              "                        [ 9.5471e-04,  2.7724e-02,  2.4116e-03]],\n",
              "              \n",
              "                       [[-5.8239e-03, -2.2381e-02,  3.4375e-02],\n",
              "                        [-1.7114e-02,  3.8148e-02,  1.3002e-02],\n",
              "                        [-2.3866e-02, -1.5096e-02,  2.1307e-02]],\n",
              "              \n",
              "                       [[ 3.2165e-02, -3.5550e-02, -2.1782e-02],\n",
              "                        [-1.5153e-02, -2.3857e-02,  9.7629e-03],\n",
              "                        [-2.4772e-02,  3.9022e-02, -1.5969e-02]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-6.4174e-05,  3.8416e-03, -1.1138e-03],\n",
              "                        [ 3.6520e-02,  1.8413e-02,  2.0269e-02],\n",
              "                        [-2.3852e-02,  2.5513e-02, -9.9792e-04]],\n",
              "              \n",
              "                       [[-8.8884e-03,  2.5206e-02, -7.7972e-03],\n",
              "                        [-1.3707e-02,  7.6069e-03, -1.0338e-02],\n",
              "                        [-3.6617e-02,  2.1292e-03,  1.3840e-03]],\n",
              "              \n",
              "                       [[ 2.8284e-03, -4.9626e-03,  2.3243e-02],\n",
              "                        [ 7.0093e-03,  9.9673e-03,  4.8649e-03],\n",
              "                        [ 3.2656e-02, -3.6209e-03,  7.0167e-03]]],\n",
              "              \n",
              "              \n",
              "                      [[[-3.6087e-02,  2.2928e-02, -5.2008e-03],\n",
              "                        [ 1.2616e-02, -8.1497e-03, -3.0358e-02],\n",
              "                        [-1.6885e-02, -9.5882e-03, -2.3266e-02]],\n",
              "              \n",
              "                       [[-1.2334e-02,  3.0745e-02,  9.6677e-04],\n",
              "                        [-9.4245e-03,  2.4040e-02, -2.9229e-02],\n",
              "                        [ 6.3873e-03, -1.1973e-02, -1.6101e-02]],\n",
              "              \n",
              "                       [[ 1.4492e-02, -1.1165e-04, -6.7393e-03],\n",
              "                        [-1.7521e-02, -3.0487e-02,  3.9192e-02],\n",
              "                        [ 1.7187e-02,  3.1720e-02,  9.3303e-03]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-1.2223e-02,  3.7554e-02, -7.9771e-03],\n",
              "                        [ 2.5698e-02,  2.9796e-03, -1.1188e-03],\n",
              "                        [ 6.8078e-03, -5.0762e-03,  3.5506e-02]],\n",
              "              \n",
              "                       [[-3.5527e-02, -2.6959e-02,  2.2115e-02],\n",
              "                        [-1.9866e-02, -3.0956e-02, -5.6853e-03],\n",
              "                        [-1.9415e-02,  2.1180e-02, -3.3281e-02]],\n",
              "              \n",
              "                       [[ 3.4033e-02,  4.0351e-02,  2.5887e-02],\n",
              "                        [ 2.7711e-02, -2.5147e-02, -3.0033e-02],\n",
              "                        [ 1.7166e-03, -1.5505e-02, -5.2824e-03]]],\n",
              "              \n",
              "              \n",
              "                      ...,\n",
              "              \n",
              "              \n",
              "                      [[[ 1.5500e-02,  2.9028e-02, -2.9742e-02],\n",
              "                        [ 1.9933e-02,  6.5251e-03, -6.8362e-04],\n",
              "                        [ 1.3499e-02,  2.1973e-02,  1.2010e-02]],\n",
              "              \n",
              "                       [[-5.4725e-03,  1.1096e-02,  5.0066e-04],\n",
              "                        [-3.7518e-02, -3.2229e-02, -1.2125e-02],\n",
              "                        [ 3.7102e-02,  1.7351e-02,  1.7019e-02]],\n",
              "              \n",
              "                       [[-4.0902e-02, -3.5078e-02,  3.9082e-02],\n",
              "                        [ 3.8812e-02, -4.1407e-02,  9.1757e-03],\n",
              "                        [ 2.7802e-02,  9.1154e-03,  1.7449e-02]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-2.4461e-02,  1.8390e-02, -1.2623e-02],\n",
              "                        [-4.0653e-02,  4.1297e-02, -3.2775e-02],\n",
              "                        [ 3.2111e-02, -2.2804e-02,  4.1061e-02]],\n",
              "              \n",
              "                       [[-3.6738e-02, -3.9497e-02, -6.7330e-03],\n",
              "                        [-4.4540e-03,  8.7664e-03,  2.1736e-02],\n",
              "                        [-1.2560e-02,  3.3292e-02, -3.8898e-02]],\n",
              "              \n",
              "                       [[ 1.9723e-02,  2.0692e-02, -4.0084e-02],\n",
              "                        [-3.7656e-02,  4.6693e-03,  3.6676e-02],\n",
              "                        [-1.4553e-02, -3.2576e-02, -6.4189e-03]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 3.3784e-02, -3.8139e-02, -2.3499e-02],\n",
              "                        [-2.4527e-02, -3.8838e-02, -1.6833e-02],\n",
              "                        [ 1.5743e-02,  2.9932e-02, -3.5710e-02]],\n",
              "              \n",
              "                       [[-2.3302e-02,  1.4633e-02, -3.9022e-02],\n",
              "                        [-8.0351e-03, -1.1806e-02,  2.3217e-02],\n",
              "                        [ 3.1700e-02,  2.2690e-02,  3.8728e-02]],\n",
              "              \n",
              "                       [[-2.0522e-02, -1.5043e-02,  7.0111e-03],\n",
              "                        [ 2.5687e-02, -1.9048e-02, -2.6401e-02],\n",
              "                        [ 1.7329e-02, -4.6940e-03,  3.2784e-02]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-1.2183e-02, -6.6613e-03,  2.3428e-02],\n",
              "                        [ 6.2098e-03, -6.9217e-03,  1.2931e-02],\n",
              "                        [ 2.1736e-02,  4.3785e-03, -2.0968e-03]],\n",
              "              \n",
              "                       [[-3.7215e-02, -2.6705e-03,  9.1357e-03],\n",
              "                        [-4.5577e-03, -3.6352e-02, -2.2042e-02],\n",
              "                        [-4.6472e-03,  1.6238e-02,  1.6625e-02]],\n",
              "              \n",
              "                       [[-2.2478e-02, -1.9583e-02, -3.6685e-02],\n",
              "                        [-5.0883e-03,  3.6957e-02,  4.0073e-03],\n",
              "                        [ 3.5373e-02,  1.3281e-02,  4.3298e-03]]],\n",
              "              \n",
              "              \n",
              "                      [[[-4.1598e-02, -2.2154e-02,  3.5781e-02],\n",
              "                        [ 3.0973e-02, -1.8801e-02,  1.3645e-02],\n",
              "                        [ 2.5019e-02, -3.2006e-02, -1.6253e-03]],\n",
              "              \n",
              "                       [[-1.8960e-02, -1.1672e-02,  3.2341e-02],\n",
              "                        [ 2.0896e-02,  2.7512e-02,  4.0390e-02],\n",
              "                        [-1.9821e-02,  2.2641e-02,  1.1561e-02]],\n",
              "              \n",
              "                       [[ 3.6723e-02,  3.0800e-02,  1.5834e-02],\n",
              "                        [ 1.1747e-02,  7.8004e-03, -2.5783e-02],\n",
              "                        [ 2.7398e-02, -3.6730e-02, -3.8737e-02]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-2.0275e-02, -9.5166e-03, -1.6609e-02],\n",
              "                        [-2.4869e-02, -1.3778e-02,  1.3485e-02],\n",
              "                        [-2.1941e-02, -1.9125e-02, -4.0917e-02]],\n",
              "              \n",
              "                       [[-3.8607e-03, -3.0798e-02, -3.4429e-02],\n",
              "                        [-2.0771e-03, -3.4037e-03, -9.2214e-03],\n",
              "                        [-3.9834e-02,  2.6221e-03,  1.0897e-03]],\n",
              "              \n",
              "                       [[ 3.7357e-02,  1.9254e-02, -1.5152e-02],\n",
              "                        [ 1.4679e-02,  3.5143e-03,  2.4153e-02],\n",
              "                        [-3.9876e-02,  3.8836e-02, -2.4097e-04]]]])),\n",
              "             ('conv3.0.bias',\n",
              "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.])),\n",
              "             ('conv3.1.weight',\n",
              "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1.])),\n",
              "             ('conv3.1.bias',\n",
              "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.])),\n",
              "             ('conv3.1.running_mean',\n",
              "              tensor([ 9.9117e-03, -2.5096e-02, -3.2369e-02, -3.2492e-02,  7.5739e-02,\n",
              "                       2.1318e-03,  3.3311e-02,  2.3547e-02,  3.2195e-02,  1.1507e-02,\n",
              "                       1.9825e-02, -6.7222e-02, -4.9115e-02,  1.1075e-02, -1.8200e-02,\n",
              "                      -5.4628e-02, -8.4768e-02,  8.2641e-02,  5.4117e-02, -1.3007e-03,\n",
              "                      -2.5025e-02,  8.7367e-02,  4.3339e-03, -1.1743e-02,  5.7421e-02,\n",
              "                      -1.0161e-02,  1.1581e-01, -3.4391e-02,  1.1982e-01,  1.1999e-02,\n",
              "                       1.0743e-01, -3.0071e-02, -1.6587e-02, -5.9401e-03, -5.5079e-02,\n",
              "                       3.7756e-02, -2.5789e-02, -1.7916e-02, -8.8055e-03, -1.1381e-03,\n",
              "                      -9.9448e-02, -1.8435e-02,  6.0751e-02, -4.0239e-02,  9.6288e-02,\n",
              "                      -2.7036e-02,  4.5660e-02,  3.4062e-02,  7.3525e-02,  3.8369e-02,\n",
              "                       5.1991e-02, -1.0366e-01,  3.3566e-02,  4.1712e-02,  7.2512e-02,\n",
              "                      -7.4802e-02, -1.9283e-02, -7.8943e-02, -9.2100e-03,  8.5722e-02,\n",
              "                       2.0369e-02, -1.0614e-02,  7.3965e-03,  3.6787e-02, -1.6606e-02,\n",
              "                      -4.3174e-02, -4.0536e-03,  6.9885e-02,  1.2657e-01, -3.7290e-02,\n",
              "                       1.2630e-02, -5.2501e-02, -2.4095e-02,  1.4260e-02, -5.0637e-02,\n",
              "                      -4.8815e-02,  1.9160e-02, -4.2611e-03, -1.7253e-02,  1.3941e-02,\n",
              "                      -2.5013e-02,  5.2958e-02, -2.0970e-02,  2.0960e-02,  8.0546e-03,\n",
              "                      -1.8592e-02,  7.1392e-04, -5.6077e-02, -5.6469e-03,  5.4138e-02,\n",
              "                      -1.9101e-02,  6.1442e-02, -3.8191e-03,  2.5389e-02,  9.8686e-02,\n",
              "                      -6.0143e-02,  3.4732e-02, -2.7979e-02, -2.2135e-02, -3.9297e-02,\n",
              "                      -1.6605e-03,  2.2251e-03, -1.0257e-01,  7.6765e-02, -1.1577e-02,\n",
              "                       2.5852e-02, -4.3493e-02, -6.4327e-02,  2.0170e-03, -9.6331e-02,\n",
              "                      -1.4664e-02, -7.4611e-02,  2.1924e-02, -4.1019e-02,  2.7342e-02,\n",
              "                       6.6433e-02, -2.6805e-02,  7.4460e-02, -2.6294e-02,  4.6573e-03,\n",
              "                       2.1294e-02, -7.7519e-02,  4.8035e-02,  7.1753e-02,  6.3350e-02,\n",
              "                       4.3602e-02,  1.5945e-02, -3.2136e-02, -3.6214e-02,  4.6586e-02,\n",
              "                      -1.7678e-02, -1.8266e-02, -5.5376e-02, -2.8239e-02, -8.6353e-02,\n",
              "                      -1.1278e-01, -3.7044e-02,  4.6530e-02, -9.8151e-02,  1.5851e-02,\n",
              "                      -9.5689e-03,  4.4195e-03, -6.3743e-02, -4.5523e-02,  4.7461e-02,\n",
              "                       4.6196e-03,  2.0387e-02,  1.3034e-03,  8.1869e-02,  5.3007e-03,\n",
              "                       1.2695e-02,  1.5955e-02,  1.3368e-02, -2.8079e-02, -7.0277e-03,\n",
              "                      -7.6972e-03, -1.0579e-01, -3.9203e-02,  1.0951e-02, -3.2238e-02,\n",
              "                       2.5898e-02, -4.5496e-02,  1.7381e-02, -2.0692e-03,  3.1112e-02,\n",
              "                       8.1627e-03, -7.2625e-02,  2.2584e-02,  1.2219e-02, -4.6911e-02,\n",
              "                      -1.4549e-02,  9.6170e-02, -3.1354e-03,  3.4871e-02,  7.8826e-03,\n",
              "                      -1.1128e-02,  6.8564e-03, -4.5751e-02, -5.5856e-03,  3.0614e-03,\n",
              "                      -1.3176e-02,  1.4430e-02,  5.2694e-02, -3.8205e-02, -2.5238e-02,\n",
              "                      -6.6114e-02, -2.1686e-02,  1.0820e-01,  7.4660e-03,  1.2737e-02,\n",
              "                       2.4543e-03,  6.0691e-03,  1.9965e-02, -3.3472e-02, -1.5794e-02,\n",
              "                      -3.8955e-02,  9.4638e-02, -2.3413e-02,  1.1375e-01,  8.2497e-05,\n",
              "                      -1.2631e-01,  3.9967e-02,  2.7983e-02, -4.4370e-02,  3.1179e-02,\n",
              "                       5.6220e-02,  3.7923e-04,  6.1128e-02, -2.9720e-02, -4.2863e-02,\n",
              "                      -4.1345e-02, -6.2592e-02,  3.4932e-02,  2.0657e-03, -2.8379e-02,\n",
              "                      -1.6454e-02, -1.8091e-02, -3.9013e-03, -9.0937e-02,  1.9878e-02,\n",
              "                       1.2100e-01, -1.0240e-02, -1.7758e-02, -4.6908e-03, -3.5649e-02,\n",
              "                      -1.6457e-03, -7.6292e-02,  8.3850e-02, -1.1721e-02,  6.9631e-03,\n",
              "                       3.9699e-02,  2.6835e-02,  8.2228e-03, -8.2674e-02, -3.6648e-02,\n",
              "                      -1.0792e-01,  5.8649e-02,  1.9012e-02, -5.0148e-02,  8.3150e-02,\n",
              "                      -6.4226e-02, -2.6958e-02,  2.8703e-02,  1.4378e-02, -2.2789e-02,\n",
              "                      -1.8122e-02, -3.7439e-02,  5.4907e-02, -1.0722e-01,  9.2946e-02,\n",
              "                      -2.4475e-02,  1.6185e-02,  2.2985e-02,  3.0681e-02,  1.9063e-02,\n",
              "                       2.5892e-02])),\n",
              "             ('conv3.1.running_var',\n",
              "              tensor([0.9257, 0.9338, 0.9310, 0.9540, 0.9354, 0.9296, 0.9456, 0.9243, 0.9218,\n",
              "                      0.9244, 0.9219, 0.9289, 0.9435, 0.9393, 0.9466, 0.9256, 0.9396, 0.9474,\n",
              "                      0.9225, 0.9275, 0.9293, 0.9454, 0.9340, 0.9346, 0.9525, 0.9265, 0.9628,\n",
              "                      0.9620, 0.9607, 0.9314, 0.9403, 0.9362, 0.9436, 0.9285, 0.9272, 0.9215,\n",
              "                      0.9352, 0.9459, 0.9353, 0.9236, 0.9457, 0.9267, 0.9299, 0.9240, 0.9501,\n",
              "                      0.9299, 0.9300, 0.9217, 0.9316, 0.9288, 0.9372, 0.9447, 0.9345, 0.9249,\n",
              "                      0.9246, 0.9371, 0.9426, 0.9409, 0.9294, 0.9446, 0.9343, 0.9340, 0.9363,\n",
              "                      0.9228, 0.9262, 0.9381, 0.9385, 0.9591, 0.9471, 0.9344, 0.9331, 0.9497,\n",
              "                      0.9394, 0.9267, 0.9325, 0.9290, 0.9270, 0.9414, 0.9267, 0.9252, 0.9353,\n",
              "                      0.9356, 0.9298, 0.9295, 0.9217, 0.9308, 0.9255, 0.9471, 0.9363, 0.9456,\n",
              "                      0.9342, 0.9455, 0.9257, 0.9336, 0.9385, 0.9428, 0.9381, 0.9368, 0.9272,\n",
              "                      0.9240, 0.9170, 0.9336, 0.9490, 0.9493, 0.9270, 0.9271, 0.9295, 0.9318,\n",
              "                      0.9379, 0.9451, 0.9482, 0.9537, 0.9486, 0.9287, 0.9369, 0.9341, 0.9382,\n",
              "                      0.9352, 0.9418, 0.9266, 0.9315, 0.9574, 0.9333, 0.9312, 0.9524, 0.9418,\n",
              "                      0.9282, 0.9322, 0.9299, 0.9264, 0.9292, 0.9478, 0.9212, 0.9310, 0.9562,\n",
              "                      0.9701, 0.9352, 0.9499, 0.9605, 0.9418, 0.9252, 0.9289, 0.9390, 0.9272,\n",
              "                      0.9305, 0.9256, 0.9362, 0.9218, 0.9315, 0.9219, 0.9240, 0.9433, 0.9421,\n",
              "                      0.9450, 0.9182, 0.9377, 0.9525, 0.9313, 0.9499, 0.9333, 0.9335, 0.9228,\n",
              "                      0.9206, 0.9267, 0.9385, 0.9261, 0.9335, 0.9335, 0.9276, 0.9294, 0.9311,\n",
              "                      0.9320, 0.9682, 0.9243, 0.9306, 0.9210, 0.9145, 0.9306, 0.9319, 0.9415,\n",
              "                      0.9266, 0.9316, 0.9206, 0.9429, 0.9223, 0.9499, 0.9287, 0.9426, 0.9335,\n",
              "                      0.9261, 0.9271, 0.9362, 0.9221, 0.9246, 0.9441, 0.9298, 0.9249, 0.9302,\n",
              "                      0.9376, 0.9261, 0.9454, 0.9324, 0.9248, 0.9271, 0.9342, 0.9376, 0.9252,\n",
              "                      0.9347, 0.9300, 0.9273, 0.9285, 0.9525, 0.9214, 0.9282, 0.9346, 0.9252,\n",
              "                      0.9356, 0.9276, 0.9380, 0.9267, 0.9383, 0.9509, 0.9379, 0.9308, 0.9355,\n",
              "                      0.9279, 0.9510, 0.9316, 0.9259, 0.9339, 0.9279, 0.9280, 0.9479, 0.9364,\n",
              "                      0.9399, 0.9535, 0.9296, 0.9232, 0.9310, 0.9536, 0.9385, 0.9311, 0.9235,\n",
              "                      0.9269, 0.9428, 0.9328, 0.9503, 0.9344, 0.9459, 0.9463, 0.9312, 0.9386,\n",
              "                      0.9324, 0.9305, 0.9314, 0.9304])),\n",
              "             ('conv3.1.num_batches_tracked', tensor(1)),\n",
              "             ('conv3.3.weight',\n",
              "              tensor([[[[-1.1801e-02, -1.8566e-02, -2.0518e-02],\n",
              "                        [ 3.4084e-02, -1.4742e-02, -3.7706e-03],\n",
              "                        [-2.4387e-02,  2.3085e-02, -3.2309e-02]],\n",
              "              \n",
              "                       [[ 3.2193e-02,  1.2367e-02, -1.7977e-04],\n",
              "                        [ 2.3295e-02, -2.8499e-02, -2.2526e-04],\n",
              "                        [-2.4065e-02, -2.6750e-02, -8.3513e-03]],\n",
              "              \n",
              "                       [[ 1.0618e-02,  1.3486e-02, -3.3297e-02],\n",
              "                        [ 9.8456e-03, -2.5330e-02,  3.4022e-02],\n",
              "                        [ 2.3341e-02, -3.6040e-02, -1.9472e-02]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-8.5821e-03, -1.2550e-02,  1.6778e-02],\n",
              "                        [-2.0753e-02,  3.0645e-02,  1.2705e-04],\n",
              "                        [-2.4521e-02,  1.2416e-02,  4.8663e-03]],\n",
              "              \n",
              "                       [[-2.8582e-02, -8.6755e-04,  1.1497e-02],\n",
              "                        [ 1.0592e-03, -1.4117e-03,  3.0422e-02],\n",
              "                        [ 1.4651e-02, -6.5788e-03, -3.3262e-02]],\n",
              "              \n",
              "                       [[ 3.0765e-03, -3.4707e-02,  6.9260e-03],\n",
              "                        [ 4.5600e-03,  1.1359e-02, -2.3952e-02],\n",
              "                        [-1.9620e-04, -2.5369e-02, -6.8784e-03]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 2.8116e-02, -1.2206e-02, -2.7449e-02],\n",
              "                        [ 3.2775e-02, -2.5241e-02, -3.5659e-02],\n",
              "                        [ 4.4952e-03, -1.6844e-02,  2.6901e-02]],\n",
              "              \n",
              "                       [[ 2.8347e-02, -1.0532e-02, -2.4475e-02],\n",
              "                        [-2.8071e-02,  2.6922e-02, -5.9030e-03],\n",
              "                        [-2.7166e-02,  2.3242e-02, -2.5145e-02]],\n",
              "              \n",
              "                       [[-2.7196e-03,  2.6698e-02, -3.1200e-02],\n",
              "                        [-3.9093e-03, -7.7282e-03, -2.4778e-02],\n",
              "                        [ 2.1994e-02, -1.2991e-02, -1.3803e-04]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 3.2318e-02, -8.5007e-03,  8.4185e-03],\n",
              "                        [-2.1794e-02, -2.1329e-02, -1.4207e-02],\n",
              "                        [ 2.6270e-02, -2.3510e-02, -1.6654e-02]],\n",
              "              \n",
              "                       [[ 3.3468e-02, -3.3628e-02, -2.0496e-02],\n",
              "                        [ 4.2442e-03,  8.0035e-03, -1.9632e-02],\n",
              "                        [-3.5140e-02, -1.5762e-02,  1.1135e-02]],\n",
              "              \n",
              "                       [[-6.8253e-05,  5.8067e-03,  3.5831e-02],\n",
              "                        [-2.4246e-02, -1.2301e-02, -2.8000e-02],\n",
              "                        [-3.1483e-02, -3.2705e-02, -2.5694e-02]]],\n",
              "              \n",
              "              \n",
              "                      [[[-4.8159e-03, -1.3287e-02,  2.9488e-02],\n",
              "                        [-3.3841e-02,  1.3516e-03,  1.3587e-02],\n",
              "                        [ 2.2713e-02, -7.9660e-03,  1.5223e-02]],\n",
              "              \n",
              "                       [[ 1.9806e-02, -2.5199e-02,  1.7789e-02],\n",
              "                        [-1.3737e-02,  1.1508e-02, -1.5387e-02],\n",
              "                        [-2.3618e-02, -1.4316e-03, -4.8083e-05]],\n",
              "              \n",
              "                       [[-1.5982e-02, -2.2382e-02, -2.2428e-02],\n",
              "                        [ 1.2865e-02, -1.7948e-02, -3.2740e-02],\n",
              "                        [ 2.7875e-02,  2.3556e-02, -8.7440e-03]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 4.2847e-03,  3.4053e-02,  1.5725e-02],\n",
              "                        [ 6.0067e-03,  1.5909e-02,  4.3288e-03],\n",
              "                        [ 2.9989e-02, -1.2190e-02, -3.2101e-02]],\n",
              "              \n",
              "                       [[-1.4007e-02, -3.0508e-02,  9.2489e-03],\n",
              "                        [ 1.2317e-02, -1.8913e-02, -2.4196e-03],\n",
              "                        [-1.6396e-02,  5.9529e-03, -1.3279e-02]],\n",
              "              \n",
              "                       [[-3.0266e-02, -3.0879e-02,  2.5237e-03],\n",
              "                        [-2.0458e-02,  8.1010e-03,  1.7234e-02],\n",
              "                        [ 1.2128e-02,  2.9787e-02,  3.0448e-02]]],\n",
              "              \n",
              "              \n",
              "                      ...,\n",
              "              \n",
              "              \n",
              "                      [[[-2.8148e-02,  1.1723e-02, -1.5456e-02],\n",
              "                        [ 2.3434e-02, -1.6641e-02, -2.8771e-02],\n",
              "                        [ 7.1998e-03,  5.1079e-03,  1.8796e-02]],\n",
              "              \n",
              "                       [[ 1.0659e-02,  6.4994e-03,  1.7711e-02],\n",
              "                        [ 5.2211e-03, -2.1817e-02,  1.2504e-02],\n",
              "                        [ 2.6612e-02, -3.1603e-03, -2.4236e-02]],\n",
              "              \n",
              "                       [[ 1.3236e-02, -1.4733e-02, -1.9298e-02],\n",
              "                        [ 3.0949e-02, -7.4100e-03, -1.3477e-02],\n",
              "                        [-8.1741e-03, -1.7241e-02,  1.7690e-02]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 2.2140e-02, -3.1315e-03, -1.9793e-03],\n",
              "                        [ 3.0220e-02,  1.2481e-02, -9.9032e-03],\n",
              "                        [ 3.9079e-04,  1.4453e-02, -1.7191e-02]],\n",
              "              \n",
              "                       [[ 3.4217e-02,  2.0893e-02, -2.9737e-02],\n",
              "                        [ 3.2030e-02,  2.8839e-02, -8.0403e-03],\n",
              "                        [ 2.1559e-02,  3.3802e-02,  2.8311e-02]],\n",
              "              \n",
              "                       [[-2.7507e-02, -5.5090e-03, -1.1744e-02],\n",
              "                        [-1.5808e-02,  3.2903e-02, -3.0254e-03],\n",
              "                        [ 3.1779e-02, -3.2069e-02,  1.2336e-02]]],\n",
              "              \n",
              "              \n",
              "                      [[[-2.6207e-02,  2.9092e-02,  6.5706e-03],\n",
              "                        [ 7.3376e-03,  2.7283e-02, -2.8272e-02],\n",
              "                        [-3.8648e-03, -2.6444e-02, -2.6856e-02]],\n",
              "              \n",
              "                       [[-1.6434e-02, -8.3997e-03, -8.2514e-03],\n",
              "                        [ 2.6541e-02, -1.4715e-03, -1.8015e-02],\n",
              "                        [ 1.6723e-02,  3.2902e-02, -1.5796e-02]],\n",
              "              \n",
              "                       [[-9.3623e-03, -3.0507e-02, -1.9195e-02],\n",
              "                        [-3.1205e-02,  1.5665e-03, -2.4454e-02],\n",
              "                        [ 1.3591e-02, -2.5780e-02,  1.0376e-02]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-3.6453e-03,  1.7877e-04,  2.9545e-02],\n",
              "                        [ 1.2255e-02,  5.6074e-03, -8.3476e-03],\n",
              "                        [ 2.1352e-02,  1.3331e-02, -1.0337e-02]],\n",
              "              \n",
              "                       [[-2.0825e-02,  1.5773e-02,  3.1210e-02],\n",
              "                        [-1.2119e-02, -2.2064e-02,  2.8351e-02],\n",
              "                        [-3.0509e-02, -9.1071e-03, -1.0386e-02]],\n",
              "              \n",
              "                       [[ 1.2385e-02,  2.9461e-02,  4.4217e-03],\n",
              "                        [ 1.1733e-02, -3.0231e-02, -6.3601e-03],\n",
              "                        [ 2.3248e-02,  2.4174e-02,  3.2337e-02]]],\n",
              "              \n",
              "              \n",
              "                      [[[-1.2556e-03,  4.1480e-03,  1.1616e-02],\n",
              "                        [-3.2443e-02,  1.5289e-02, -2.0155e-02],\n",
              "                        [ 3.3635e-03,  3.0201e-02,  2.3028e-03]],\n",
              "              \n",
              "                       [[-1.3376e-02, -2.9219e-02, -1.1128e-02],\n",
              "                        [-2.5241e-02, -5.7242e-03, -2.2741e-03],\n",
              "                        [ 2.6626e-02,  3.1556e-02,  1.1855e-02]],\n",
              "              \n",
              "                       [[ 2.6970e-02,  9.6790e-04, -1.2816e-02],\n",
              "                        [-1.3653e-02,  7.1195e-03,  3.5845e-02],\n",
              "                        [-1.7342e-02,  1.1922e-02, -7.9555e-03]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 1.9355e-02, -1.0242e-02,  3.2915e-02],\n",
              "                        [ 5.5672e-03,  7.2323e-05, -2.7569e-02],\n",
              "                        [ 2.5387e-02, -2.6023e-02, -2.1138e-02]],\n",
              "              \n",
              "                       [[-8.6626e-03, -2.9822e-02,  1.9251e-02],\n",
              "                        [ 1.1897e-02, -5.8474e-03, -1.7657e-02],\n",
              "                        [-1.6904e-03, -3.5286e-02, -2.5773e-02]],\n",
              "              \n",
              "                       [[-2.6663e-03,  2.4770e-02, -1.4246e-02],\n",
              "                        [-1.8043e-02, -1.5063e-02, -2.5553e-02],\n",
              "                        [ 1.2001e-02, -2.1010e-02,  6.8660e-03]]]])),\n",
              "             ('conv3.3.bias',\n",
              "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.])),\n",
              "             ('conv3.4.weight',\n",
              "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1.])),\n",
              "             ('conv3.4.bias',\n",
              "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.])),\n",
              "             ('conv3.4.running_mean',\n",
              "              tensor([-0.0300, -0.0328,  0.0287, -0.0448, -0.0159,  0.0405, -0.0106, -0.0174,\n",
              "                       0.0051,  0.0226,  0.0240,  0.0140,  0.0096,  0.0047,  0.0173, -0.0459,\n",
              "                       0.0055,  0.0084, -0.0108, -0.0033,  0.0365,  0.0096,  0.0212, -0.0394,\n",
              "                      -0.0503, -0.0255, -0.0536, -0.0416, -0.0492,  0.0075,  0.0363,  0.0162,\n",
              "                      -0.0207,  0.0471, -0.0544,  0.0264, -0.0402, -0.0322,  0.0495, -0.0156,\n",
              "                       0.0134, -0.0270,  0.0314,  0.0233,  0.0062,  0.0370,  0.0379,  0.0214,\n",
              "                       0.0243, -0.0232,  0.0477, -0.0581, -0.0177, -0.0084, -0.0420, -0.0539,\n",
              "                       0.0080,  0.0366, -0.0052, -0.0018,  0.0250, -0.0364,  0.0299, -0.0011,\n",
              "                       0.0126,  0.0054,  0.0066, -0.0227,  0.0501,  0.0256,  0.0577, -0.0058,\n",
              "                      -0.0457,  0.0069, -0.0076, -0.0206, -0.0053, -0.0378, -0.0184,  0.0006,\n",
              "                       0.0123, -0.0127,  0.0299,  0.0211,  0.0131,  0.0590, -0.0256, -0.0496,\n",
              "                       0.0641, -0.0064,  0.0023,  0.0159, -0.0534,  0.0379, -0.0344, -0.0469,\n",
              "                       0.0177,  0.0646,  0.0289, -0.0006, -0.0190,  0.0364,  0.0299,  0.0450,\n",
              "                      -0.0367,  0.0004, -0.0184,  0.0195,  0.0511,  0.0290,  0.0114,  0.0275,\n",
              "                      -0.0697,  0.0299, -0.0152, -0.0156, -0.0510,  0.0349,  0.0115,  0.0009,\n",
              "                      -0.0204, -0.0087, -0.0175,  0.0166, -0.0150, -0.0228,  0.0341, -0.0049,\n",
              "                      -0.0490, -0.0326,  0.0018, -0.0221, -0.0807,  0.0632,  0.0272,  0.0577,\n",
              "                       0.0226, -0.0620,  0.0127,  0.0650,  0.0078, -0.0014,  0.0219,  0.0086,\n",
              "                      -0.0061, -0.0342, -0.0233, -0.0112,  0.0761,  0.0320,  0.0287,  0.0172,\n",
              "                       0.0659, -0.0319,  0.0156,  0.0528, -0.0335,  0.0141,  0.0359,  0.0677,\n",
              "                      -0.0852, -0.0453, -0.0314, -0.0252,  0.0425,  0.0075, -0.0257,  0.0285,\n",
              "                       0.0196, -0.0349, -0.0165, -0.0484, -0.0141, -0.0087,  0.0097, -0.0245,\n",
              "                      -0.0222, -0.0511, -0.0428, -0.0112,  0.0264, -0.0552, -0.0223, -0.0245,\n",
              "                       0.0432,  0.0397,  0.0115, -0.0135,  0.0277, -0.0109, -0.0307,  0.0326,\n",
              "                      -0.0531, -0.0275,  0.0629, -0.0505,  0.0146,  0.0218,  0.0027,  0.0757,\n",
              "                       0.0114, -0.0355,  0.0099,  0.0581, -0.0014,  0.0523, -0.0361, -0.0706,\n",
              "                      -0.0612,  0.0319, -0.0036,  0.0327,  0.0095,  0.0245, -0.0509,  0.0370,\n",
              "                      -0.0195, -0.0104, -0.0031,  0.0345,  0.0205, -0.0084,  0.0608,  0.0289,\n",
              "                      -0.0686, -0.0350,  0.0381, -0.0039,  0.0253, -0.0508, -0.0182,  0.0096,\n",
              "                      -0.0362, -0.0592, -0.0016, -0.0125, -0.0227,  0.0260,  0.0105,  0.0428,\n",
              "                       0.0218, -0.0282, -0.0229,  0.0067, -0.0485,  0.0086, -0.0033, -0.0137,\n",
              "                      -0.0337,  0.0423,  0.0117,  0.0160,  0.0271,  0.0030, -0.0236,  0.0049])),\n",
              "             ('conv3.4.running_var',\n",
              "              tensor([0.9312, 0.9259, 0.9243, 0.9367, 0.9291, 0.9252, 0.9271, 0.9398, 0.9263,\n",
              "                      0.9322, 0.9312, 0.9268, 0.9391, 0.9359, 0.9288, 0.9478, 0.9432, 0.9255,\n",
              "                      0.9293, 0.9317, 0.9242, 0.9285, 0.9413, 0.9394, 0.9271, 0.9361, 0.9400,\n",
              "                      0.9259, 0.9274, 0.9333, 0.9281, 0.9286, 0.9378, 0.9371, 0.9344, 0.9289,\n",
              "                      0.9408, 0.9488, 0.9406, 0.9322, 0.9409, 0.9427, 0.9296, 0.9278, 0.9327,\n",
              "                      0.9302, 0.9265, 0.9249, 0.9300, 0.9334, 0.9260, 0.9259, 0.9312, 0.9397,\n",
              "                      0.9278, 0.9289, 0.9263, 0.9307, 0.9286, 0.9283, 0.9386, 0.9300, 0.9421,\n",
              "                      0.9308, 0.9342, 0.9276, 0.9235, 0.9277, 0.9446, 0.9286, 0.9479, 0.9390,\n",
              "                      0.9369, 0.9296, 0.9286, 0.9262, 0.9318, 0.9396, 0.9321, 0.9312, 0.9299,\n",
              "                      0.9244, 0.9324, 0.9302, 0.9384, 0.9383, 0.9358, 0.9354, 0.9294, 0.9414,\n",
              "                      0.9416, 0.9283, 0.9333, 0.9322, 0.9406, 0.9326, 0.9312, 0.9381, 0.9421,\n",
              "                      0.9245, 0.9333, 0.9319, 0.9272, 0.9287, 0.9358, 0.9353, 0.9274, 0.9262,\n",
              "                      0.9296, 0.9381, 0.9268, 0.9291, 0.9402, 0.9228, 0.9278, 0.9244, 0.9300,\n",
              "                      0.9299, 0.9315, 0.9256, 0.9207, 0.9317, 0.9296, 0.9324, 0.9418, 0.9394,\n",
              "                      0.9226, 0.9313, 0.9265, 0.9346, 0.9250, 0.9307, 0.9467, 0.9349, 0.9225,\n",
              "                      0.9373, 0.9441, 0.9489, 0.9437, 0.9349, 0.9429, 0.9245, 0.9219, 0.9315,\n",
              "                      0.9330, 0.9281, 0.9515, 0.9250, 0.9478, 0.9279, 0.9260, 0.9289, 0.9530,\n",
              "                      0.9317, 0.9328, 0.9489, 0.9255, 0.9290, 0.9387, 0.9375, 0.9272, 0.9270,\n",
              "                      0.9501, 0.9432, 0.9298, 0.9251, 0.9362, 0.9294, 0.9254, 0.9237, 0.9309,\n",
              "                      0.9344, 0.9286, 0.9345, 0.9368, 0.9314, 0.9267, 0.9422, 0.9231, 0.9364,\n",
              "                      0.9340, 0.9285, 0.9261, 0.9265, 0.9278, 0.9273, 0.9237, 0.9268, 0.9378,\n",
              "                      0.9391, 0.9268, 0.9400, 0.9305, 0.9359, 0.9277, 0.9338, 0.9297, 0.9277,\n",
              "                      0.9248, 0.9312, 0.9384, 0.9245, 0.9286, 0.9382, 0.9369, 0.9422, 0.9346,\n",
              "                      0.9336, 0.9343, 0.9274, 0.9232, 0.9237, 0.9298, 0.9322, 0.9291, 0.9332,\n",
              "                      0.9276, 0.9393, 0.9270, 0.9263, 0.9487, 0.9387, 0.9384, 0.9300, 0.9372,\n",
              "                      0.9291, 0.9392, 0.9257, 0.9287, 0.9412, 0.9396, 0.9473, 0.9273, 0.9350,\n",
              "                      0.9373, 0.9358, 0.9306, 0.9380, 0.9232, 0.9308, 0.9292, 0.9315, 0.9273,\n",
              "                      0.9251, 0.9377, 0.9278, 0.9268, 0.9272, 0.9292, 0.9325, 0.9325, 0.9304,\n",
              "                      0.9212, 0.9190, 0.9305, 0.9260])),\n",
              "             ('conv3.4.num_batches_tracked', tensor(1)),\n",
              "             ('conv4.0.weight',\n",
              "              tensor([[[[ 1.1870e-02, -2.3632e-02, -2.1206e-02],\n",
              "                        [ 2.8865e-03, -2.3039e-02, -1.4795e-02],\n",
              "                        [-2.1754e-02, -1.8500e-02,  2.5636e-02]],\n",
              "              \n",
              "                       [[-1.4464e-02, -1.5994e-02, -2.7145e-02],\n",
              "                        [-1.5400e-03, -1.1719e-02,  6.0710e-03],\n",
              "                        [ 8.6157e-03, -7.1288e-03,  1.6260e-02]],\n",
              "              \n",
              "                       [[-1.2678e-02, -2.4073e-02,  7.6286e-03],\n",
              "                        [ 1.3460e-02, -2.4804e-02,  2.5792e-03],\n",
              "                        [ 2.2700e-02,  2.7090e-02, -3.3621e-03]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 1.2597e-02, -2.3032e-02,  1.2096e-02],\n",
              "                        [ 2.1164e-02, -1.8593e-02,  2.5949e-02],\n",
              "                        [-8.7400e-03,  2.3150e-02, -2.9070e-02]],\n",
              "              \n",
              "                       [[-7.8367e-03,  2.5247e-04,  1.5878e-02],\n",
              "                        [ 7.6483e-03,  2.4356e-02,  6.0904e-03],\n",
              "                        [ 9.1219e-04, -2.8893e-03,  9.5072e-03]],\n",
              "              \n",
              "                       [[-1.5627e-02,  1.4074e-02,  2.6346e-03],\n",
              "                        [-1.8786e-02,  2.8517e-02,  6.4775e-03],\n",
              "                        [ 2.7414e-02,  1.4643e-03, -1.1185e-02]]],\n",
              "              \n",
              "              \n",
              "                      [[[-1.3064e-02, -7.2018e-03, -2.8415e-03],\n",
              "                        [ 1.3460e-02, -1.6683e-03, -1.1596e-02],\n",
              "                        [-1.1947e-02,  2.5139e-02,  8.3720e-03]],\n",
              "              \n",
              "                       [[ 2.5835e-02, -1.7518e-02,  3.7864e-03],\n",
              "                        [-6.2796e-03,  1.8213e-02,  4.3587e-03],\n",
              "                        [ 1.7394e-02, -2.1759e-02, -2.3320e-02]],\n",
              "              \n",
              "                       [[-1.0374e-02, -1.7852e-02,  7.8955e-03],\n",
              "                        [ 1.9507e-02, -1.7713e-03, -3.7573e-03],\n",
              "                        [ 1.4796e-02,  2.5120e-02, -2.0139e-02]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-8.6738e-03, -1.6160e-02, -2.4226e-02],\n",
              "                        [-9.4150e-03,  4.4910e-03, -2.8102e-02],\n",
              "                        [-2.8100e-03,  8.3147e-03,  2.4080e-02]],\n",
              "              \n",
              "                       [[-1.9529e-02, -2.9833e-03,  1.3442e-02],\n",
              "                        [-7.7581e-03, -1.5992e-02, -1.7922e-02],\n",
              "                        [ 5.0636e-03,  2.5356e-02,  9.1152e-03]],\n",
              "              \n",
              "                       [[ 1.4062e-02, -2.5419e-04,  2.3879e-02],\n",
              "                        [ 2.1871e-02, -1.9573e-02,  5.3285e-03],\n",
              "                        [ 2.8893e-02,  1.2684e-02,  2.7846e-02]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 1.3884e-02,  2.8570e-02, -2.0256e-03],\n",
              "                        [ 1.0826e-02, -2.6466e-02, -1.6389e-02],\n",
              "                        [ 1.7591e-02,  2.4078e-02,  1.3643e-02]],\n",
              "              \n",
              "                       [[ 1.6922e-02, -9.1330e-03,  1.6638e-02],\n",
              "                        [ 2.8235e-02, -6.1361e-03,  1.4983e-02],\n",
              "                        [-1.6756e-02,  7.1569e-03, -2.8004e-02]],\n",
              "              \n",
              "                       [[-3.5095e-03, -9.2463e-04,  1.4096e-02],\n",
              "                        [-2.6870e-02, -1.5119e-03,  1.7721e-02],\n",
              "                        [-1.5906e-02, -2.8005e-02, -2.2231e-02]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-1.8477e-02, -6.6543e-03, -1.7209e-02],\n",
              "                        [ 1.7794e-02,  2.6781e-02,  1.9055e-02],\n",
              "                        [-5.9438e-03,  1.3490e-02, -1.0357e-02]],\n",
              "              \n",
              "                       [[ 1.0470e-02, -1.3191e-02,  3.8999e-03],\n",
              "                        [ 1.0086e-02, -1.8533e-02, -1.2288e-02],\n",
              "                        [ 5.4370e-03,  2.6697e-02, -8.8504e-03]],\n",
              "              \n",
              "                       [[-2.5871e-02,  1.8247e-02, -1.9422e-03],\n",
              "                        [ 3.6499e-03, -1.0120e-02, -2.1828e-02],\n",
              "                        [-1.8887e-03, -1.2618e-02, -2.6003e-02]]],\n",
              "              \n",
              "              \n",
              "                      ...,\n",
              "              \n",
              "              \n",
              "                      [[[ 2.5706e-02, -2.2316e-03,  3.7277e-04],\n",
              "                        [-2.3299e-02,  2.4396e-02, -1.1977e-02],\n",
              "                        [ 1.8771e-02, -2.9349e-02,  1.6217e-02]],\n",
              "              \n",
              "                       [[-4.7302e-03,  1.2701e-02, -1.8587e-02],\n",
              "                        [ 2.9525e-03,  4.6274e-03, -2.2440e-03],\n",
              "                        [ 1.7900e-02,  5.9170e-03,  6.2119e-03]],\n",
              "              \n",
              "                       [[-3.9327e-03, -2.3813e-02,  4.4851e-04],\n",
              "                        [ 1.9064e-02, -3.0755e-03, -1.2892e-02],\n",
              "                        [-1.3286e-02,  2.0363e-02, -7.7654e-03]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 2.5647e-02, -1.5206e-02,  2.3282e-02],\n",
              "                        [ 2.8918e-02,  2.4993e-02,  2.4000e-02],\n",
              "                        [ 1.1846e-02, -3.4186e-03, -1.8661e-02]],\n",
              "              \n",
              "                       [[-2.2789e-02,  3.0692e-03, -1.9019e-02],\n",
              "                        [-4.5988e-03, -1.2456e-02, -1.5034e-02],\n",
              "                        [-1.9463e-02, -9.7795e-03,  2.3152e-02]],\n",
              "              \n",
              "                       [[-2.6517e-02, -2.5024e-02,  1.3549e-02],\n",
              "                        [ 1.6278e-02,  1.3228e-02,  1.6698e-02],\n",
              "                        [-1.4791e-02,  1.9213e-02, -1.1155e-02]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 2.8336e-02,  5.6402e-03,  4.7603e-03],\n",
              "                        [-9.8050e-03, -2.3494e-02,  2.4853e-02],\n",
              "                        [-1.8806e-02,  2.7416e-02,  2.7892e-02]],\n",
              "              \n",
              "                       [[-2.9278e-02, -1.8035e-02, -3.9400e-03],\n",
              "                        [-2.3553e-02, -1.2135e-02, -1.1560e-02],\n",
              "                        [ 2.6257e-02,  1.7537e-02, -9.9361e-05]],\n",
              "              \n",
              "                       [[ 5.6129e-03, -1.9704e-02,  8.3896e-03],\n",
              "                        [ 1.1953e-03,  2.1385e-02,  1.7664e-02],\n",
              "                        [-9.8949e-03, -6.8290e-03, -4.3497e-03]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-1.9027e-02,  2.5682e-02,  7.6616e-03],\n",
              "                        [-2.5857e-02, -1.2927e-02,  1.5581e-02],\n",
              "                        [-1.4251e-02, -2.5444e-02, -1.7148e-02]],\n",
              "              \n",
              "                       [[-2.3971e-02, -6.7889e-03,  1.2537e-02],\n",
              "                        [-8.9771e-03,  2.3685e-02,  2.0425e-02],\n",
              "                        [-1.1507e-02, -2.8644e-03,  1.1179e-02]],\n",
              "              \n",
              "                       [[-1.6889e-02, -6.2995e-03,  3.0366e-03],\n",
              "                        [-4.4609e-03, -8.5101e-03,  1.4392e-02],\n",
              "                        [ 2.5765e-02,  2.5809e-02,  1.4017e-02]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 1.0390e-02, -2.8261e-03, -1.8914e-02],\n",
              "                        [-2.3653e-02, -2.4662e-02, -1.9766e-02],\n",
              "                        [-1.8885e-02, -7.5116e-03, -1.8077e-02]],\n",
              "              \n",
              "                       [[-2.0087e-02, -8.7840e-04, -2.1486e-02],\n",
              "                        [-5.3079e-03,  1.8885e-02,  2.8821e-03],\n",
              "                        [ 4.0082e-03,  2.8081e-02,  1.6936e-02]],\n",
              "              \n",
              "                       [[-8.2539e-03, -2.3739e-02, -9.7699e-03],\n",
              "                        [ 2.2157e-02, -5.7497e-03,  9.0995e-05],\n",
              "                        [ 3.8487e-03,  2.9755e-03,  2.7415e-02]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 1.3887e-02, -7.5935e-04,  6.9715e-03],\n",
              "                        [-2.8211e-02, -2.8741e-03, -2.2405e-02],\n",
              "                        [-9.8141e-03,  2.7067e-02, -1.9940e-02]],\n",
              "              \n",
              "                       [[-6.8661e-03, -8.9321e-03,  1.6840e-03],\n",
              "                        [ 2.2412e-04, -1.5373e-02,  1.2724e-02],\n",
              "                        [-1.9952e-02,  2.9451e-02,  2.8993e-02]],\n",
              "              \n",
              "                       [[ 1.7032e-02, -2.8915e-02,  1.9650e-02],\n",
              "                        [ 1.7652e-02,  2.5167e-03, -1.1129e-02],\n",
              "                        [-8.7456e-03, -1.9558e-03,  2.5063e-02]]]])),\n",
              "             ('conv4.0.bias',\n",
              "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0.])),\n",
              "             ('conv4.1.weight',\n",
              "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1.])),\n",
              "             ('conv4.1.bias',\n",
              "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0.])),\n",
              "             ('conv4.1.running_mean',\n",
              "              tensor([-0.0388, -0.0322, -0.0428,  0.0142,  0.0523,  0.0069, -0.0496,  0.0160,\n",
              "                       0.0072,  0.0062, -0.0165,  0.0205, -0.0988, -0.0054, -0.0845, -0.0177,\n",
              "                      -0.0074, -0.0149, -0.0402,  0.0398, -0.0206, -0.0377,  0.0171,  0.0549,\n",
              "                       0.0249, -0.0392, -0.0871, -0.0736,  0.0462,  0.0166, -0.0675, -0.0108,\n",
              "                      -0.0664,  0.0768,  0.0220, -0.0122,  0.0102, -0.0062,  0.0545,  0.0359,\n",
              "                       0.0151, -0.0147,  0.0276,  0.0580,  0.0847,  0.0889, -0.0292, -0.0344,\n",
              "                       0.0407,  0.0212, -0.0272, -0.0157,  0.0310,  0.0346,  0.0243, -0.0206,\n",
              "                      -0.0261,  0.0286, -0.0505,  0.0051, -0.0505, -0.0531,  0.0187, -0.0295,\n",
              "                       0.0192, -0.0708, -0.0415, -0.0677, -0.0912, -0.0789, -0.0477, -0.0073,\n",
              "                      -0.0901,  0.0343,  0.0179, -0.0582, -0.0905, -0.0233, -0.0197,  0.0513,\n",
              "                       0.0314, -0.0386, -0.0082, -0.0498,  0.0281, -0.0349, -0.0055, -0.0336,\n",
              "                       0.0588, -0.0047,  0.0406, -0.0484, -0.1159,  0.0026,  0.0307,  0.0190,\n",
              "                       0.1074, -0.0535,  0.0024,  0.0414,  0.0364, -0.0471,  0.0519,  0.1161,\n",
              "                       0.0044,  0.0219,  0.0091, -0.0038, -0.0457, -0.0464, -0.0361,  0.0096,\n",
              "                       0.0104, -0.0831, -0.0724, -0.1092,  0.0151, -0.0360,  0.0466, -0.1108,\n",
              "                      -0.0293,  0.0015,  0.0393,  0.0364,  0.0860,  0.1393, -0.0083, -0.0172,\n",
              "                      -0.0089, -0.0323, -0.0111, -0.0216,  0.0347,  0.0204,  0.0182, -0.0020,\n",
              "                       0.0865, -0.0317, -0.0376,  0.0582,  0.0421, -0.0683,  0.1228, -0.0220,\n",
              "                      -0.0912, -0.0283, -0.0683,  0.0490,  0.0594, -0.1354, -0.0688, -0.0115,\n",
              "                      -0.0093,  0.0215,  0.0156,  0.0370,  0.0031, -0.0085, -0.0279, -0.0184,\n",
              "                      -0.0057, -0.0507,  0.0389,  0.0351,  0.0829,  0.0014, -0.0764, -0.0989,\n",
              "                       0.0320,  0.0126,  0.0110,  0.0554,  0.0969, -0.1333, -0.0011, -0.0091,\n",
              "                       0.0083, -0.0393, -0.0320, -0.0165, -0.1073,  0.0461,  0.0356,  0.0293,\n",
              "                      -0.0090,  0.0159, -0.0073, -0.0040, -0.0476,  0.0021,  0.0685, -0.0343,\n",
              "                      -0.0208, -0.0690,  0.0017,  0.0357, -0.0122,  0.0480, -0.0331, -0.0145,\n",
              "                       0.0304,  0.0029, -0.0893,  0.0400, -0.0918,  0.0037, -0.0182, -0.0159,\n",
              "                      -0.0482,  0.0628,  0.0453,  0.0239, -0.0402,  0.0569, -0.0322,  0.0494,\n",
              "                      -0.0479, -0.0279,  0.0137, -0.0312,  0.0183, -0.0221, -0.0232, -0.0175,\n",
              "                       0.0931,  0.0006, -0.0129, -0.0496,  0.0073,  0.0855, -0.0016,  0.0320,\n",
              "                       0.0190,  0.0204,  0.0243,  0.0072,  0.0553, -0.0372, -0.0121, -0.0400,\n",
              "                      -0.0197, -0.0735,  0.0120, -0.0306, -0.0458,  0.0016, -0.0717, -0.0231,\n",
              "                       0.1479,  0.0061,  0.0649, -0.0258, -0.0259,  0.0037, -0.0351,  0.0273,\n",
              "                      -0.0677,  0.0309,  0.0371, -0.0130,  0.0734,  0.0064,  0.1369,  0.0591,\n",
              "                      -0.0029,  0.0023, -0.0062,  0.0356, -0.0391,  0.0348,  0.0202,  0.0225,\n",
              "                      -0.0663, -0.0372,  0.0091,  0.0351, -0.0701, -0.0940,  0.0058,  0.0424,\n",
              "                       0.0641, -0.0034, -0.0695, -0.0444,  0.0685, -0.0911, -0.0119,  0.0561,\n",
              "                       0.0546, -0.0199, -0.0263, -0.0513,  0.0105, -0.0506,  0.0517, -0.0052,\n",
              "                       0.0104,  0.0105, -0.0194,  0.0166, -0.0522,  0.0592,  0.0459, -0.0149,\n",
              "                      -0.0148,  0.0206,  0.0483, -0.0737, -0.0621, -0.1252, -0.0599,  0.0516,\n",
              "                      -0.0196,  0.0123, -0.1025,  0.0028,  0.0686, -0.0294, -0.0118, -0.0077,\n",
              "                       0.0713, -0.0013, -0.0883,  0.0611, -0.0455,  0.0434, -0.0816,  0.0682,\n",
              "                       0.0505,  0.0558,  0.0328, -0.0361,  0.0496,  0.0994,  0.0613, -0.0712,\n",
              "                       0.0258,  0.0432, -0.0190,  0.0537, -0.0328, -0.0384, -0.0573,  0.0269,\n",
              "                       0.0827,  0.0250, -0.0107,  0.0269,  0.0281,  0.0248, -0.0063, -0.0134,\n",
              "                       0.0181,  0.0560,  0.0506,  0.0912,  0.0019, -0.0160, -0.0317, -0.0690,\n",
              "                      -0.0219, -0.0231,  0.0742, -0.0182, -0.0288, -0.0380, -0.0226,  0.0288,\n",
              "                       0.0082,  0.0244,  0.0267,  0.0173, -0.0241, -0.0400,  0.0422, -0.0139,\n",
              "                      -0.0027, -0.0356,  0.0168,  0.0576, -0.0303,  0.0022, -0.0663, -0.0212,\n",
              "                      -0.0583, -0.0170, -0.0306, -0.0583,  0.0447,  0.0291,  0.0481,  0.0457,\n",
              "                      -0.1180, -0.0319, -0.0331,  0.1192,  0.0082,  0.0696,  0.0059, -0.0074,\n",
              "                       0.0235, -0.0672, -0.0325, -0.0239,  0.0452, -0.0086,  0.0130, -0.1281,\n",
              "                       0.0063, -0.0636, -0.0463, -0.0121,  0.0580,  0.0044,  0.0167,  0.1002,\n",
              "                      -0.0023,  0.0308,  0.0165,  0.0011, -0.0531, -0.0607,  0.0136,  0.0387,\n",
              "                       0.0637,  0.0458,  0.0925,  0.0166, -0.0569, -0.0304,  0.0047,  0.0081,\n",
              "                       0.0255,  0.0595,  0.0748, -0.0897,  0.0488,  0.0628, -0.0014, -0.0389,\n",
              "                       0.0317,  0.0829, -0.0506, -0.0598,  0.0586, -0.0856,  0.0040, -0.0575,\n",
              "                      -0.0300, -0.0510, -0.0139, -0.0280, -0.0081, -0.0569,  0.0331,  0.1183,\n",
              "                      -0.0515,  0.0690, -0.0284,  0.0149, -0.0072,  0.0621, -0.0264,  0.0158,\n",
              "                       0.0158, -0.1315, -0.0383, -0.0132,  0.1085,  0.0515,  0.0069, -0.0041,\n",
              "                      -0.0574,  0.0398,  0.0143, -0.0656,  0.1039,  0.1139, -0.0435,  0.0197,\n",
              "                       0.0306, -0.0025,  0.0691,  0.0631, -0.0141, -0.0076,  0.0855, -0.0192,\n",
              "                       0.0105, -0.0573, -0.0092, -0.0190,  0.0045,  0.0379,  0.0294, -0.0520,\n",
              "                      -0.0255, -0.0571, -0.0133, -0.0053,  0.0313, -0.0276, -0.0500,  0.0612,\n",
              "                      -0.0621,  0.0197, -0.0243,  0.1078,  0.0286, -0.0406, -0.0389,  0.1083])),\n",
              "             ('conv4.1.running_var',\n",
              "              tensor([0.9379, 0.9317, 0.9349, 0.9491, 0.9266, 0.9247, 0.9289, 0.9340, 0.9293,\n",
              "                      0.9217, 0.9301, 0.9353, 0.9385, 0.9255, 0.9693, 0.9263, 0.9310, 0.9345,\n",
              "                      0.9293, 0.9484, 0.9286, 0.9429, 0.9368, 0.9348, 0.9263, 0.9443, 0.9288,\n",
              "                      0.9268, 0.9574, 0.9573, 0.9261, 0.9303, 0.9317, 0.9489, 0.9261, 0.9293,\n",
              "                      0.9284, 0.9470, 0.9275, 0.9551, 0.9297, 0.9233, 0.9384, 0.9346, 0.9316,\n",
              "                      0.9444, 0.9362, 0.9368, 0.9281, 0.9371, 0.9268, 0.9261, 0.9253, 0.9394,\n",
              "                      0.9505, 0.9415, 0.9288, 0.9446, 0.9350, 0.9319, 0.9549, 0.9294, 0.9271,\n",
              "                      0.9221, 0.9302, 0.9445, 0.9529, 0.9290, 0.9390, 0.9490, 0.9370, 0.9223,\n",
              "                      0.9603, 0.9282, 0.9253, 0.9393, 0.9560, 0.9229, 0.9434, 0.9428, 0.9236,\n",
              "                      0.9306, 0.9284, 0.9540, 0.9280, 0.9244, 0.9287, 0.9266, 0.9542, 0.9368,\n",
              "                      0.9397, 0.9285, 0.9660, 0.9335, 0.9305, 0.9430, 0.9465, 0.9187, 0.9289,\n",
              "                      0.9441, 0.9336, 0.9426, 0.9316, 0.9499, 0.9222, 0.9280, 0.9225, 0.9294,\n",
              "                      0.9504, 0.9408, 0.9324, 0.9261, 0.9461, 0.9598, 0.9291, 0.9380, 0.9510,\n",
              "                      0.9297, 0.9435, 0.9416, 0.9476, 0.9269, 0.9237, 0.9336, 0.9361, 0.9535,\n",
              "                      0.9346, 0.9364, 0.9402, 0.9450, 0.9555, 0.9312, 0.9289, 0.9550, 0.9312,\n",
              "                      0.9270, 0.9490, 0.9291, 0.9366, 0.9584, 0.9441, 0.9340, 0.9690, 0.9375,\n",
              "                      0.9597, 0.9221, 0.9385, 0.9320, 0.9398, 0.9400, 0.9434, 0.9278, 0.9277,\n",
              "                      0.9484, 0.9301, 0.9273, 0.9192, 0.9390, 0.9204, 0.9442, 0.9344, 0.9445,\n",
              "                      0.9487, 0.9270, 0.9443, 0.9382, 0.9483, 0.9435, 0.9248, 0.9407, 0.9314,\n",
              "                      0.9437, 0.9292, 0.9470, 0.9358, 0.9302, 0.9260, 0.9398, 0.9218, 0.9299,\n",
              "                      0.9467, 0.9443, 0.9283, 0.9216, 0.9326, 0.9353, 0.9517, 0.9476, 0.9331,\n",
              "                      0.9254, 0.9493, 0.9456, 0.9296, 0.9399, 0.9309, 0.9307, 0.9347, 0.9309,\n",
              "                      0.9314, 0.9287, 0.9389, 0.9225, 0.9351, 0.9361, 0.9481, 0.9312, 0.9346,\n",
              "                      0.9402, 0.9347, 0.9548, 0.9378, 0.9419, 0.9232, 0.9317, 0.9511, 0.9340,\n",
              "                      0.9288, 0.9254, 0.9324, 0.9275, 0.9267, 0.9582, 0.9217, 0.9355, 0.9429,\n",
              "                      0.9272, 0.9254, 0.9272, 0.9209, 0.9447, 0.9237, 0.9215, 0.9596, 0.9254,\n",
              "                      0.9242, 0.9281, 0.9455, 0.9332, 0.9325, 0.9238, 0.9348, 0.9537, 0.9418,\n",
              "                      0.9329, 0.9342, 0.9374, 0.9310, 0.9357, 0.9523, 0.9581, 0.9552, 0.9266,\n",
              "                      0.9260, 0.9339, 0.9303, 0.9322, 0.9356, 0.9231, 0.9322, 0.9289, 0.9411,\n",
              "                      0.9236, 0.9450, 0.9314, 0.9300, 0.9351, 0.9426, 0.9474, 0.9430, 0.9284,\n",
              "                      0.9388, 0.9388, 0.9553, 0.9288, 0.9222, 0.9422, 0.9551, 0.9339, 0.9273,\n",
              "                      0.9317, 0.9381, 0.9288, 0.9320, 0.9406, 0.9276, 0.9484, 0.9230, 0.9647,\n",
              "                      0.9700, 0.9432, 0.9362, 0.9239, 0.9297, 0.9501, 0.9277, 0.9383, 0.9952,\n",
              "                      0.9313, 0.9292, 0.9369, 0.9506, 0.9440, 0.9298, 0.9230, 0.9251, 0.9392,\n",
              "                      0.9352, 0.9355, 0.9402, 0.9579, 0.9332, 0.9371, 0.9295, 0.9333, 0.9430,\n",
              "                      0.9312, 0.9268, 0.9369, 0.9205, 0.9246, 0.9275, 0.9282, 0.9383, 0.9322,\n",
              "                      0.9454, 0.9452, 0.9302, 0.9353, 0.9473, 0.9342, 0.9361, 0.9399, 0.9534,\n",
              "                      0.9430, 0.9268, 0.9214, 0.9357, 0.9277, 0.9273, 0.9324, 0.9284, 0.9331,\n",
              "                      0.9408, 0.9278, 0.9444, 0.9259, 0.9268, 0.9319, 0.9574, 0.9343, 0.9250,\n",
              "                      0.9291, 0.9340, 0.9269, 0.9331, 0.9324, 0.9349, 0.9321, 0.9312, 0.9560,\n",
              "                      0.9267, 0.9264, 0.9639, 0.9358, 0.9288, 0.9331, 0.9254, 0.9313, 0.9316,\n",
              "                      0.9278, 0.9427, 0.9386, 0.9258, 0.9502, 0.9309, 0.9295, 0.9517, 0.9326,\n",
              "                      0.9359, 0.9330, 0.9397, 0.9309, 0.9427, 0.9511, 0.9616, 0.9410, 0.9269,\n",
              "                      0.9324, 0.9336, 0.9302, 0.9290, 0.9323, 0.9409, 0.9437, 0.9347, 0.9502,\n",
              "                      0.9420, 0.9270, 0.9328, 0.9426, 0.9289, 0.9335, 0.9286, 0.9532, 0.9274,\n",
              "                      0.9354, 0.9295, 0.9459, 0.9249, 0.9327, 0.9321, 0.9244, 0.9203, 0.9241,\n",
              "                      0.9297, 0.9422, 0.9308, 0.9231, 0.9316, 0.9322, 0.9419, 0.9572, 0.9255,\n",
              "                      0.9318, 0.9498, 0.9356, 0.9300, 0.9491, 0.9346, 0.9273, 0.9408, 0.9198,\n",
              "                      0.9446, 0.9318, 0.9224, 0.9507, 0.9202, 0.9359, 0.9457, 0.9349, 0.9316,\n",
              "                      0.9698, 0.9269, 0.9293, 0.9301, 0.9679, 0.9292, 0.9374, 0.9298, 0.9388,\n",
              "                      0.9291, 0.9252, 0.9321, 0.9423, 0.9270, 0.9369, 0.9285, 0.9390, 0.9398,\n",
              "                      0.9381, 0.9308, 0.9411, 0.9538, 0.9217, 0.9356, 0.9745, 0.9308, 0.9323,\n",
              "                      0.9409, 0.9435, 0.9240, 0.9270, 0.9357, 0.9526, 0.9255, 0.9642, 0.9574,\n",
              "                      0.9353, 0.9278, 0.9255, 0.9409, 0.9241, 0.9279, 0.9498, 0.9315, 0.9273,\n",
              "                      0.9498, 0.9522, 0.9381, 0.9365, 0.9269, 0.9221, 0.9500, 0.9381, 0.9266,\n",
              "                      0.9342, 0.9334, 0.9330, 0.9285, 0.9257, 0.9312, 0.9396, 0.9359, 0.9411,\n",
              "                      0.9541, 0.9274, 0.9234, 0.9366, 0.9284, 0.9396, 0.9404, 0.9476])),\n",
              "             ('conv4.1.num_batches_tracked', tensor(1)),\n",
              "             ('conv4.3.weight',\n",
              "              tensor([[[[ 0.0141,  0.0038, -0.0247],\n",
              "                        [ 0.0028,  0.0022, -0.0153],\n",
              "                        [ 0.0182, -0.0029,  0.0008]],\n",
              "              \n",
              "                       [[ 0.0072, -0.0244,  0.0222],\n",
              "                        [-0.0113, -0.0174,  0.0225],\n",
              "                        [ 0.0093, -0.0085,  0.0007]],\n",
              "              \n",
              "                       [[ 0.0029, -0.0091, -0.0086],\n",
              "                        [ 0.0012, -0.0143, -0.0190],\n",
              "                        [ 0.0093, -0.0024, -0.0253]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 0.0055, -0.0175, -0.0112],\n",
              "                        [ 0.0243,  0.0096, -0.0248],\n",
              "                        [ 0.0156,  0.0084,  0.0058]],\n",
              "              \n",
              "                       [[ 0.0026, -0.0169,  0.0025],\n",
              "                        [-0.0012,  0.0073, -0.0067],\n",
              "                        [-0.0168,  0.0194,  0.0147]],\n",
              "              \n",
              "                       [[-0.0023,  0.0246, -0.0020],\n",
              "                        [ 0.0095, -0.0194, -0.0177],\n",
              "                        [ 0.0034, -0.0215, -0.0115]]],\n",
              "              \n",
              "              \n",
              "                      [[[-0.0027,  0.0218, -0.0051],\n",
              "                        [ 0.0133,  0.0118, -0.0155],\n",
              "                        [-0.0186,  0.0091,  0.0233]],\n",
              "              \n",
              "                       [[-0.0086, -0.0008,  0.0190],\n",
              "                        [-0.0185, -0.0182, -0.0186],\n",
              "                        [ 0.0117, -0.0221, -0.0181]],\n",
              "              \n",
              "                       [[ 0.0095,  0.0057,  0.0101],\n",
              "                        [-0.0178,  0.0171,  0.0161],\n",
              "                        [-0.0242, -0.0018, -0.0165]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-0.0104, -0.0035,  0.0253],\n",
              "                        [ 0.0033,  0.0192,  0.0080],\n",
              "                        [-0.0078, -0.0053, -0.0043]],\n",
              "              \n",
              "                       [[-0.0011, -0.0004,  0.0234],\n",
              "                        [ 0.0172,  0.0212,  0.0131],\n",
              "                        [ 0.0250,  0.0049,  0.0230]],\n",
              "              \n",
              "                       [[-0.0041, -0.0085, -0.0115],\n",
              "                        [ 0.0054, -0.0054, -0.0154],\n",
              "                        [-0.0216, -0.0035, -0.0188]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 0.0077, -0.0016,  0.0188],\n",
              "                        [-0.0205, -0.0200, -0.0203],\n",
              "                        [-0.0048,  0.0136, -0.0155]],\n",
              "              \n",
              "                       [[ 0.0016, -0.0142,  0.0018],\n",
              "                        [ 0.0243, -0.0058,  0.0252],\n",
              "                        [ 0.0124,  0.0165, -0.0103]],\n",
              "              \n",
              "                       [[ 0.0110, -0.0200,  0.0059],\n",
              "                        [-0.0005,  0.0227,  0.0122],\n",
              "                        [-0.0023, -0.0200,  0.0184]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-0.0039, -0.0119, -0.0088],\n",
              "                        [-0.0201,  0.0075,  0.0106],\n",
              "                        [-0.0181, -0.0095,  0.0103]],\n",
              "              \n",
              "                       [[-0.0095,  0.0206,  0.0172],\n",
              "                        [-0.0147, -0.0038,  0.0095],\n",
              "                        [-0.0006, -0.0096,  0.0021]],\n",
              "              \n",
              "                       [[ 0.0025, -0.0083, -0.0031],\n",
              "                        [-0.0221, -0.0164,  0.0104],\n",
              "                        [-0.0155,  0.0151, -0.0170]]],\n",
              "              \n",
              "              \n",
              "                      ...,\n",
              "              \n",
              "              \n",
              "                      [[[-0.0234,  0.0078, -0.0037],\n",
              "                        [-0.0084, -0.0193, -0.0009],\n",
              "                        [-0.0063, -0.0058, -0.0112]],\n",
              "              \n",
              "                       [[ 0.0006,  0.0179,  0.0032],\n",
              "                        [ 0.0219, -0.0004, -0.0207],\n",
              "                        [-0.0006, -0.0132, -0.0251]],\n",
              "              \n",
              "                       [[ 0.0131,  0.0212,  0.0099],\n",
              "                        [ 0.0048, -0.0108, -0.0011],\n",
              "                        [ 0.0120,  0.0175, -0.0201]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-0.0215,  0.0135, -0.0019],\n",
              "                        [-0.0004, -0.0199,  0.0221],\n",
              "                        [ 0.0252,  0.0113, -0.0024]],\n",
              "              \n",
              "                       [[ 0.0088,  0.0003, -0.0027],\n",
              "                        [-0.0249, -0.0056,  0.0231],\n",
              "                        [ 0.0172,  0.0219,  0.0205]],\n",
              "              \n",
              "                       [[ 0.0108, -0.0126,  0.0175],\n",
              "                        [-0.0160, -0.0036, -0.0011],\n",
              "                        [ 0.0103,  0.0100,  0.0115]]],\n",
              "              \n",
              "              \n",
              "                      [[[-0.0157, -0.0200, -0.0065],\n",
              "                        [-0.0032,  0.0131, -0.0030],\n",
              "                        [-0.0162,  0.0150, -0.0032]],\n",
              "              \n",
              "                       [[-0.0083, -0.0184,  0.0053],\n",
              "                        [ 0.0124, -0.0099,  0.0150],\n",
              "                        [ 0.0116,  0.0207,  0.0217]],\n",
              "              \n",
              "                       [[ 0.0255, -0.0180, -0.0231],\n",
              "                        [ 0.0208, -0.0200,  0.0026],\n",
              "                        [ 0.0232, -0.0070, -0.0117]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 0.0237, -0.0050, -0.0068],\n",
              "                        [-0.0208, -0.0164,  0.0031],\n",
              "                        [-0.0038,  0.0042, -0.0170]],\n",
              "              \n",
              "                       [[ 0.0211,  0.0144,  0.0125],\n",
              "                        [ 0.0194,  0.0189, -0.0055],\n",
              "                        [-0.0090,  0.0048, -0.0076]],\n",
              "              \n",
              "                       [[ 0.0126, -0.0176, -0.0203],\n",
              "                        [-0.0217, -0.0099,  0.0162],\n",
              "                        [ 0.0187,  0.0136,  0.0063]]],\n",
              "              \n",
              "              \n",
              "                      [[[-0.0138, -0.0073,  0.0067],\n",
              "                        [-0.0056, -0.0193, -0.0251],\n",
              "                        [ 0.0229, -0.0028, -0.0085]],\n",
              "              \n",
              "                       [[-0.0128,  0.0043,  0.0039],\n",
              "                        [-0.0236,  0.0251, -0.0168],\n",
              "                        [ 0.0228, -0.0081, -0.0023]],\n",
              "              \n",
              "                       [[-0.0147, -0.0011, -0.0222],\n",
              "                        [-0.0089,  0.0050, -0.0069],\n",
              "                        [-0.0157,  0.0220,  0.0155]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-0.0127, -0.0003,  0.0020],\n",
              "                        [-0.0172, -0.0144, -0.0165],\n",
              "                        [ 0.0032,  0.0236,  0.0159]],\n",
              "              \n",
              "                       [[ 0.0238, -0.0055, -0.0084],\n",
              "                        [-0.0174, -0.0016,  0.0149],\n",
              "                        [ 0.0047, -0.0151,  0.0061]],\n",
              "              \n",
              "                       [[-0.0001,  0.0127,  0.0106],\n",
              "                        [-0.0134, -0.0182,  0.0094],\n",
              "                        [-0.0155,  0.0018, -0.0073]]]])),\n",
              "             ('conv4.3.bias',\n",
              "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0.])),\n",
              "             ('conv4.4.weight',\n",
              "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1.])),\n",
              "             ('conv4.4.bias',\n",
              "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0.])),\n",
              "             ('conv4.4.running_mean',\n",
              "              tensor([-1.1745e-02,  3.1460e-02,  9.0923e-03,  6.3283e-03, -2.8441e-02,\n",
              "                       2.7938e-02,  2.1717e-02,  3.3021e-02,  3.6297e-03, -3.0541e-02,\n",
              "                       7.9750e-03, -5.5176e-02, -7.1784e-02, -3.8582e-03,  3.2349e-02,\n",
              "                       3.4521e-02,  2.4197e-02, -1.5102e-03, -3.0424e-02,  1.3199e-02,\n",
              "                      -6.1790e-02, -5.7338e-02,  1.9525e-02, -4.3969e-02, -1.0358e-02,\n",
              "                       1.8708e-03,  1.0973e-02,  2.3772e-02, -4.0652e-03,  1.7292e-02,\n",
              "                       1.5837e-02,  2.8500e-02, -7.9337e-04, -3.9143e-02,  1.9212e-02,\n",
              "                      -1.6927e-02,  3.7822e-02,  5.7585e-02, -2.3451e-02, -2.9857e-02,\n",
              "                       3.1555e-02, -3.8739e-03, -9.5974e-04, -5.4417e-02,  2.3090e-02,\n",
              "                      -3.8761e-03,  1.0876e-02,  3.3491e-02,  4.0032e-03, -3.1758e-02,\n",
              "                      -2.1801e-02,  5.4333e-02,  6.9004e-03,  1.4288e-02, -3.5682e-02,\n",
              "                       1.6991e-02,  1.1273e-02,  7.3267e-03,  1.7898e-02,  1.3270e-02,\n",
              "                       3.1879e-02,  2.2026e-02, -8.1816e-03, -1.7116e-02, -1.4191e-02,\n",
              "                       2.0632e-03, -1.4591e-02, -2.7898e-03,  4.2223e-02, -4.8778e-02,\n",
              "                      -7.7281e-03, -6.8553e-03, -1.5336e-02, -4.6742e-02,  1.3832e-02,\n",
              "                      -1.6602e-02, -2.8662e-03, -2.5524e-02,  2.2709e-02, -4.8794e-03,\n",
              "                      -3.2918e-02, -2.8655e-02, -5.3155e-02,  3.0485e-02, -5.7828e-02,\n",
              "                      -3.9390e-02,  2.3570e-02,  3.3058e-02, -3.7884e-02,  1.5274e-03,\n",
              "                       2.2019e-02, -1.8964e-02, -2.0476e-02, -4.1189e-02,  6.5269e-04,\n",
              "                       1.0545e-02, -3.8412e-02, -5.3991e-02,  4.5313e-02, -3.2877e-02,\n",
              "                      -3.9487e-02,  8.5831e-03, -1.0807e-02, -4.2398e-03,  4.5227e-02,\n",
              "                       1.5230e-02,  2.8660e-02,  5.7240e-02,  7.4855e-02,  2.5483e-02,\n",
              "                      -3.6809e-02, -1.3001e-02,  9.3967e-03, -3.3352e-02,  4.0533e-02,\n",
              "                       6.6835e-02, -3.4277e-02, -1.0817e-02,  1.5096e-02, -3.2268e-02,\n",
              "                      -2.1362e-02,  3.0772e-02, -1.0597e-02, -2.1141e-02,  2.9537e-02,\n",
              "                      -4.5576e-02,  7.2314e-03, -1.7210e-02, -3.7901e-02,  4.4672e-02,\n",
              "                      -1.6414e-02, -1.1490e-02, -2.5083e-02, -8.8326e-03, -2.4252e-02,\n",
              "                      -2.5184e-02,  8.3790e-03,  1.0582e-03,  6.8638e-02,  1.3574e-02,\n",
              "                      -2.2017e-02, -1.4499e-02,  9.3245e-03, -1.7604e-02,  3.9933e-02,\n",
              "                       3.9512e-02, -1.5357e-02, -3.1822e-02, -1.6621e-02,  2.7653e-02,\n",
              "                      -3.0085e-02, -6.7198e-03,  8.4551e-03, -1.1769e-02,  8.1922e-03,\n",
              "                      -1.0682e-02, -1.8748e-02,  4.4693e-02,  3.3967e-02,  1.2934e-02,\n",
              "                       3.3189e-02,  4.2391e-02, -7.1651e-02, -1.6279e-02, -1.2771e-02,\n",
              "                       3.8766e-03,  3.6052e-03, -2.1627e-02,  1.0436e-02, -2.2121e-02,\n",
              "                       3.8995e-02, -1.6482e-02,  3.9790e-02, -8.7815e-03,  3.4815e-02,\n",
              "                       2.8025e-02,  1.1383e-02, -1.8114e-02, -4.4724e-02,  3.1357e-02,\n",
              "                      -4.5815e-03,  5.8327e-03,  3.5338e-02, -5.7933e-02,  5.9909e-03,\n",
              "                       3.1222e-02,  3.4798e-02,  1.7793e-02, -1.9102e-02, -2.6160e-02,\n",
              "                      -1.1769e-02, -4.2891e-03,  1.7628e-02, -1.5777e-02, -2.1159e-02,\n",
              "                       1.1894e-02,  6.3278e-02, -5.7171e-02, -7.9118e-03, -2.1180e-02,\n",
              "                      -9.3862e-03, -1.4378e-02, -2.5301e-02,  4.9828e-02, -2.5382e-02,\n",
              "                      -2.0520e-02, -1.1865e-02,  5.9899e-02, -1.2187e-02,  1.0411e-02,\n",
              "                      -6.9483e-02,  3.1396e-02, -4.7259e-03,  1.6416e-03,  2.0822e-03,\n",
              "                      -3.6151e-04,  4.6514e-02, -1.0467e-02, -1.5616e-02, -3.4969e-02,\n",
              "                       1.4395e-02,  4.0078e-02,  5.3203e-03,  3.5528e-02,  8.0913e-04,\n",
              "                       2.4511e-03, -3.6539e-03, -1.7593e-02,  6.6852e-04, -3.4697e-02,\n",
              "                      -4.6292e-03, -2.1620e-02,  9.7103e-03, -2.9464e-02, -8.2628e-03,\n",
              "                       4.6516e-02, -8.1426e-03,  2.2107e-02,  3.2571e-02, -1.7257e-02,\n",
              "                      -7.5316e-04,  4.6915e-02,  4.0256e-02,  1.1642e-02, -2.3574e-02,\n",
              "                       2.9698e-02, -7.2817e-03, -4.3294e-02, -4.6541e-02, -1.0374e-02,\n",
              "                       2.5948e-02, -7.7853e-03,  1.2515e-02,  1.4813e-02, -5.4020e-02,\n",
              "                      -1.8644e-02,  5.5704e-02, -7.4172e-03, -1.7732e-02, -5.1019e-03,\n",
              "                      -1.2551e-02,  6.6110e-03,  6.1440e-03, -1.9971e-02,  1.2287e-02,\n",
              "                       3.2714e-03, -1.9879e-02,  9.9059e-03,  1.4638e-02, -2.2133e-02,\n",
              "                       2.1238e-03,  1.6861e-02, -2.2684e-02,  8.2566e-04,  3.2473e-02,\n",
              "                      -3.1984e-02, -1.7636e-02,  9.3923e-03, -2.9402e-02,  4.5086e-02,\n",
              "                      -1.9662e-03, -6.2291e-02, -1.0929e-02, -4.0522e-02, -1.1858e-02,\n",
              "                       8.7139e-03,  2.1566e-02, -2.2340e-02,  2.6097e-02,  4.4955e-02,\n",
              "                      -7.0378e-03,  1.3611e-02, -6.7677e-03,  2.6520e-02, -3.2351e-02,\n",
              "                      -2.5427e-02,  1.8075e-02,  4.6933e-04, -1.9258e-02,  5.9436e-02,\n",
              "                      -1.6745e-02,  2.2408e-03, -1.7766e-02,  1.1544e-02, -1.3068e-02,\n",
              "                      -3.8826e-02,  2.2702e-03, -6.1222e-03,  3.1253e-02, -4.6562e-03,\n",
              "                      -6.7027e-03,  4.8467e-02, -2.5827e-02, -8.5771e-03, -1.2464e-02,\n",
              "                      -3.9185e-03,  1.7795e-02,  2.2572e-03,  7.2328e-02, -2.8545e-02,\n",
              "                       1.6293e-02,  1.2776e-02, -3.2894e-02,  3.4353e-02,  4.6496e-02,\n",
              "                      -2.2365e-02,  8.8005e-03, -3.9196e-02,  4.6324e-02,  8.4201e-03,\n",
              "                      -1.2611e-02, -1.9651e-02,  4.0866e-02, -5.2432e-02,  5.4136e-03,\n",
              "                      -3.8712e-02, -2.9205e-02, -6.2476e-02,  7.7502e-02,  2.8101e-02,\n",
              "                       3.6574e-02, -1.5997e-02, -5.1398e-02, -3.5804e-02,  5.3353e-02,\n",
              "                       3.2400e-02, -5.5410e-02,  9.2745e-03,  5.5110e-02, -2.7411e-02,\n",
              "                       1.8970e-02, -1.5092e-02,  6.0527e-04, -1.1712e-02, -1.5225e-03,\n",
              "                      -6.1805e-03,  6.1284e-03, -2.6103e-02, -7.1145e-02, -8.6434e-03,\n",
              "                      -4.2229e-02,  4.3080e-02, -2.0886e-02,  1.5576e-02,  2.7852e-02,\n",
              "                       3.4243e-02,  8.6442e-03,  2.5785e-02,  2.4022e-03, -3.1522e-02,\n",
              "                       1.2497e-02, -3.2401e-03, -3.2353e-03, -2.5781e-02,  1.3179e-03,\n",
              "                      -1.9603e-02,  3.2936e-02,  2.3111e-02, -4.6325e-03, -4.9180e-02,\n",
              "                       1.6945e-02,  8.7638e-03,  1.2411e-02, -2.2628e-02, -2.2028e-03,\n",
              "                      -1.3314e-03, -2.4293e-04, -2.1481e-02, -7.8737e-03,  1.6670e-02,\n",
              "                       1.3769e-02, -3.9984e-02, -3.4540e-02,  1.8998e-02, -3.4200e-03,\n",
              "                       2.2439e-02, -3.1320e-03, -2.7421e-02,  2.6634e-02,  9.5995e-03,\n",
              "                      -9.6629e-03,  4.9871e-04, -1.1717e-02, -1.5778e-02, -9.0268e-03,\n",
              "                       5.6566e-03,  1.8541e-02, -2.7732e-02, -1.6255e-02,  6.7440e-02,\n",
              "                       2.0567e-02,  6.7865e-04, -1.3939e-02, -2.6550e-02,  8.0712e-02,\n",
              "                       1.4568e-02,  1.5491e-02, -1.1382e-02, -3.6814e-02,  1.3985e-03,\n",
              "                      -4.8980e-02,  3.8636e-02, -7.7434e-03, -4.1021e-03, -2.3884e-02,\n",
              "                       3.7929e-02, -3.0437e-02,  2.7902e-02,  1.5436e-02,  6.0308e-02,\n",
              "                      -4.1361e-02,  2.7266e-02, -1.1296e-02, -9.3636e-03, -2.9613e-03,\n",
              "                      -6.4214e-03, -5.4119e-02, -8.5644e-03,  3.7134e-02, -6.9089e-03,\n",
              "                       3.1826e-02, -8.9536e-05,  7.8836e-04, -7.3907e-02, -5.4594e-03,\n",
              "                       2.7871e-02, -1.4085e-02,  2.9106e-02, -1.4055e-02, -2.7686e-02,\n",
              "                       5.0650e-02,  1.3740e-02, -3.1086e-02,  3.4576e-02, -5.8427e-02,\n",
              "                      -6.9822e-03, -3.7528e-02, -4.5397e-02,  2.9822e-02, -7.4458e-03,\n",
              "                      -2.4738e-02, -8.9080e-03,  1.7750e-02,  2.7287e-03,  4.5258e-02,\n",
              "                       3.1400e-02,  3.2501e-04,  2.6515e-02,  3.8911e-02, -2.7468e-02,\n",
              "                      -6.1215e-03, -5.4463e-03,  1.8660e-02, -6.3304e-02,  2.3549e-02,\n",
              "                       6.4833e-03,  6.1939e-03,  5.1758e-02,  3.4355e-02, -7.1045e-02,\n",
              "                      -8.8896e-04, -2.2393e-02, -2.2827e-02, -3.4943e-03,  1.2934e-02,\n",
              "                      -1.1285e-02, -4.9009e-02,  2.4114e-03,  3.0644e-02, -5.0003e-03,\n",
              "                      -5.4073e-02, -9.8579e-03, -2.4026e-02, -1.3927e-02,  2.4176e-02,\n",
              "                       3.6441e-02,  1.9582e-02,  3.5189e-02,  2.2225e-02, -4.6053e-02,\n",
              "                      -2.2972e-02,  8.3531e-03,  3.0307e-02, -1.0835e-02, -6.2536e-02,\n",
              "                       1.7047e-02, -2.7372e-02, -1.5292e-02,  5.4681e-02, -3.9402e-02,\n",
              "                       1.0404e-01, -2.2431e-03])),\n",
              "             ('conv4.4.running_var',\n",
              "              tensor([0.9308, 0.9252, 0.9335, 0.9230, 0.9328, 0.9432, 0.9269, 0.9302, 0.9328,\n",
              "                      0.9248, 0.9280, 0.9392, 0.9301, 0.9213, 0.9398, 0.9307, 0.9234, 0.9273,\n",
              "                      0.9254, 0.9285, 0.9290, 0.9284, 0.9370, 0.9270, 0.9226, 0.9223, 0.9268,\n",
              "                      0.9241, 0.9261, 0.9260, 0.9222, 0.9322, 0.9266, 0.9206, 0.9337, 0.9374,\n",
              "                      0.9392, 0.9253, 0.9294, 0.9225, 0.9294, 0.9422, 0.9255, 0.9275, 0.9230,\n",
              "                      0.9296, 0.9253, 0.9300, 0.9223, 0.9289, 0.9238, 0.9307, 0.9236, 0.9273,\n",
              "                      0.9265, 0.9246, 0.9244, 0.9202, 0.9266, 0.9256, 0.9301, 0.9268, 0.9295,\n",
              "                      0.9271, 0.9267, 0.9256, 0.9306, 0.9236, 0.9230, 0.9401, 0.9275, 0.9345,\n",
              "                      0.9244, 0.9379, 0.9275, 0.9258, 0.9242, 0.9222, 0.9292, 0.9261, 0.9294,\n",
              "                      0.9267, 0.9463, 0.9326, 0.9250, 0.9259, 0.9279, 0.9284, 0.9267, 0.9397,\n",
              "                      0.9251, 0.9247, 0.9218, 0.9357, 0.9284, 0.9336, 0.9298, 0.9247, 0.9205,\n",
              "                      0.9391, 0.9187, 0.9300, 0.9234, 0.9419, 0.9251, 0.9203, 0.9205, 0.9370,\n",
              "                      0.9388, 0.9310, 0.9298, 0.9287, 0.9280, 0.9264, 0.9284, 0.9237, 0.9312,\n",
              "                      0.9294, 0.9201, 0.9218, 0.9358, 0.9325, 0.9279, 0.9343, 0.9227, 0.9291,\n",
              "                      0.9247, 0.9256, 0.9278, 0.9288, 0.9309, 0.9333, 0.9217, 0.9219, 0.9237,\n",
              "                      0.9258, 0.9251, 0.9258, 0.9231, 0.9224, 0.9365, 0.9297, 0.9267, 0.9245,\n",
              "                      0.9270, 0.9274, 0.9235, 0.9230, 0.9261, 0.9229, 0.9261, 0.9212, 0.9200,\n",
              "                      0.9236, 0.9389, 0.9274, 0.9207, 0.9216, 0.9199, 0.9285, 0.9352, 0.9332,\n",
              "                      0.9378, 0.9230, 0.9344, 0.9208, 0.9277, 0.9301, 0.9269, 0.9221, 0.9215,\n",
              "                      0.9223, 0.9228, 0.9294, 0.9249, 0.9254, 0.9281, 0.9243, 0.9296, 0.9274,\n",
              "                      0.9250, 0.9309, 0.9325, 0.9441, 0.9239, 0.9217, 0.9333, 0.9239, 0.9263,\n",
              "                      0.9196, 0.9248, 0.9275, 0.9286, 0.9183, 0.9274, 0.9393, 0.9219, 0.9220,\n",
              "                      0.9267, 0.9233, 0.9224, 0.9284, 0.9304, 0.9246, 0.9228, 0.9220, 0.9265,\n",
              "                      0.9332, 0.9292, 0.9389, 0.9376, 0.9226, 0.9245, 0.9364, 0.9272, 0.9392,\n",
              "                      0.9353, 0.9387, 0.9473, 0.9350, 0.9265, 0.9294, 0.9213, 0.9231, 0.9242,\n",
              "                      0.9329, 0.9283, 0.9312, 0.9280, 0.9309, 0.9334, 0.9312, 0.9281, 0.9367,\n",
              "                      0.9303, 0.9240, 0.9307, 0.9349, 0.9319, 0.9245, 0.9221, 0.9197, 0.9246,\n",
              "                      0.9221, 0.9225, 0.9266, 0.9386, 0.9247, 0.9349, 0.9206, 0.9307, 0.9241,\n",
              "                      0.9264, 0.9217, 0.9202, 0.9298, 0.9346, 0.9295, 0.9271, 0.9195, 0.9272,\n",
              "                      0.9251, 0.9252, 0.9309, 0.9317, 0.9298, 0.9267, 0.9329, 0.9255, 0.9229,\n",
              "                      0.9227, 0.9258, 0.9419, 0.9236, 0.9276, 0.9232, 0.9367, 0.9284, 0.9228,\n",
              "                      0.9274, 0.9281, 0.9469, 0.9217, 0.9249, 0.9308, 0.9261, 0.9196, 0.9283,\n",
              "                      0.9274, 0.9281, 0.9237, 0.9359, 0.9274, 0.9201, 0.9343, 0.9247, 0.9254,\n",
              "                      0.9218, 0.9227, 0.9311, 0.9277, 0.9287, 0.9211, 0.9199, 0.9235, 0.9287,\n",
              "                      0.9229, 0.9290, 0.9277, 0.9254, 0.9336, 0.9323, 0.9220, 0.9237, 0.9316,\n",
              "                      0.9259, 0.9226, 0.9271, 0.9283, 0.9361, 0.9257, 0.9583, 0.9294, 0.9238,\n",
              "                      0.9313, 0.9272, 0.9291, 0.9213, 0.9437, 0.9267, 0.9359, 0.9260, 0.9218,\n",
              "                      0.9306, 0.9312, 0.9314, 0.9271, 0.9330, 0.9458, 0.9283, 0.9294, 0.9244,\n",
              "                      0.9331, 0.9264, 0.9259, 0.9197, 0.9332, 0.9391, 0.9400, 0.9205, 0.9228,\n",
              "                      0.9259, 0.9243, 0.9335, 0.9320, 0.9303, 0.9256, 0.9244, 0.9249, 0.9314,\n",
              "                      0.9251, 0.9270, 0.9311, 0.9458, 0.9377, 0.9277, 0.9247, 0.9316, 0.9231,\n",
              "                      0.9216, 0.9209, 0.9269, 0.9299, 0.9179, 0.9261, 0.9258, 0.9283, 0.9227,\n",
              "                      0.9243, 0.9265, 0.9254, 0.9302, 0.9210, 0.9220, 0.9245, 0.9270, 0.9245,\n",
              "                      0.9198, 0.9271, 0.9195, 0.9256, 0.9361, 0.9374, 0.9216, 0.9268, 0.9214,\n",
              "                      0.9264, 0.9296, 0.9192, 0.9302, 0.9347, 0.9199, 0.9318, 0.9316, 0.9290,\n",
              "                      0.9298, 0.9326, 0.9311, 0.9230, 0.9282, 0.9283, 0.9289, 0.9209, 0.9254,\n",
              "                      0.9341, 0.9366, 0.9285, 0.9326, 0.9201, 0.9239, 0.9419, 0.9222, 0.9310,\n",
              "                      0.9298, 0.9275, 0.9205, 0.9333, 0.9287, 0.9323, 0.9378, 0.9314, 0.9364,\n",
              "                      0.9252, 0.9245, 0.9278, 0.9295, 0.9325, 0.9260, 0.9293, 0.9314, 0.9278,\n",
              "                      0.9253, 0.9251, 0.9416, 0.9250, 0.9218, 0.9250, 0.9214, 0.9294, 0.9303,\n",
              "                      0.9317, 0.9275, 0.9252, 0.9321, 0.9293, 0.9241, 0.9369, 0.9261, 0.9286,\n",
              "                      0.9253, 0.9368, 0.9240, 0.9307, 0.9275, 0.9236, 0.9217, 0.9218, 0.9276,\n",
              "                      0.9288, 0.9258, 0.9251, 0.9250, 0.9234, 0.9239, 0.9333, 0.9236, 0.9277,\n",
              "                      0.9279, 0.9273, 0.9294, 0.9497, 0.9337, 0.9326, 0.9232, 0.9263, 0.9305,\n",
              "                      0.9251, 0.9236, 0.9283, 0.9204, 0.9294, 0.9238, 0.9277, 0.9246, 0.9247,\n",
              "                      0.9289, 0.9290, 0.9397, 0.9239, 0.9290, 0.9368, 0.9255, 0.9325, 0.9288,\n",
              "                      0.9235, 0.9316, 0.9212, 0.9289, 0.9291, 0.9307, 0.9399, 0.9236])),\n",
              "             ('conv4.4.num_batches_tracked', tensor(1)),\n",
              "             ('conv5.0.weight',\n",
              "              tensor([[[[-2.3823e-02,  2.9081e-04,  1.4651e-02],\n",
              "                        [ 1.0635e-02, -1.7033e-02, -6.9443e-03],\n",
              "                        [ 8.2468e-03,  1.9071e-02, -3.7096e-04]],\n",
              "              \n",
              "                       [[-1.2883e-02,  8.6445e-03, -2.2584e-02],\n",
              "                        [-2.0489e-02,  1.3844e-03, -4.7238e-03],\n",
              "                        [-1.5741e-02, -3.0214e-03,  1.0984e-02]],\n",
              "              \n",
              "                       [[ 1.7402e-02,  1.6041e-02, -2.5464e-02],\n",
              "                        [-2.3967e-02, -1.1314e-02,  6.7933e-03],\n",
              "                        [ 2.4125e-02,  2.1141e-02, -2.1298e-02]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 1.2588e-02,  1.6981e-02, -6.9890e-03],\n",
              "                        [ 3.5624e-03, -8.5450e-03,  4.9949e-03],\n",
              "                        [-1.2144e-02,  5.5392e-03,  1.1172e-02]],\n",
              "              \n",
              "                       [[ 2.2147e-02, -1.1579e-02,  1.5464e-02],\n",
              "                        [ 5.8222e-03,  2.2664e-02,  1.3545e-02],\n",
              "                        [ 2.9013e-03,  2.2849e-02,  1.5608e-02]],\n",
              "              \n",
              "                       [[ 5.0101e-03,  4.5020e-03, -8.7312e-03],\n",
              "                        [ 2.1141e-02, -3.1916e-03, -1.8608e-02],\n",
              "                        [ 1.2287e-02, -1.3574e-03, -1.6440e-02]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 3.4135e-03,  2.1235e-02, -6.0500e-03],\n",
              "                        [-1.0205e-02, -1.3952e-02, -1.9733e-02],\n",
              "                        [-8.9954e-03, -1.9426e-02, -2.3481e-02]],\n",
              "              \n",
              "                       [[ 6.3284e-03, -1.1724e-02,  4.5658e-04],\n",
              "                        [-1.3195e-02, -1.7116e-02, -6.1170e-03],\n",
              "                        [ 1.0921e-02, -3.7938e-03,  2.2657e-02]],\n",
              "              \n",
              "                       [[ 7.7050e-03,  7.4988e-03, -2.4534e-03],\n",
              "                        [ 1.0528e-02, -6.5231e-03, -3.2878e-03],\n",
              "                        [-1.5940e-02, -1.9434e-03,  1.5261e-02]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 1.4220e-02,  2.2223e-02, -2.5411e-02],\n",
              "                        [ 7.6259e-03, -1.1013e-02, -2.0163e-03],\n",
              "                        [-2.4488e-03, -2.3707e-02,  2.3459e-02]],\n",
              "              \n",
              "                       [[ 1.1510e-02, -6.5612e-03,  4.0375e-03],\n",
              "                        [-1.1522e-02,  2.2273e-02,  2.1366e-03],\n",
              "                        [ 1.5641e-02, -1.6462e-02, -2.7207e-03]],\n",
              "              \n",
              "                       [[-2.1943e-02, -1.1815e-02, -3.8265e-03],\n",
              "                        [ 1.0527e-02,  3.9064e-03,  8.5899e-03],\n",
              "                        [ 7.3173e-03, -3.5228e-03,  1.1484e-02]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 1.5819e-02,  1.8136e-02, -2.2213e-02],\n",
              "                        [ 1.4765e-02, -7.1872e-03, -2.1178e-02],\n",
              "                        [ 1.3758e-02, -1.8537e-02,  1.3499e-02]],\n",
              "              \n",
              "                       [[ 9.0160e-03, -2.2396e-02,  2.1916e-02],\n",
              "                        [ 1.7726e-02, -1.7104e-02,  6.2119e-03],\n",
              "                        [ 2.0681e-02,  3.3242e-03,  1.4870e-02]],\n",
              "              \n",
              "                       [[-1.7016e-02,  4.2421e-04, -6.4995e-03],\n",
              "                        [ 2.5176e-02, -2.5001e-02, -1.4142e-02],\n",
              "                        [-2.5397e-02,  6.0482e-03,  8.0816e-03]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 1.2176e-02, -2.2805e-02, -1.3383e-02],\n",
              "                        [-1.6700e-02, -1.4250e-02, -1.3200e-02],\n",
              "                        [ 5.2378e-03, -5.7838e-03, -1.5912e-02]],\n",
              "              \n",
              "                       [[ 1.8052e-02,  2.3175e-02,  9.0232e-03],\n",
              "                        [-1.6582e-02, -1.7617e-02,  1.6164e-02],\n",
              "                        [-1.8449e-04,  9.8826e-04, -3.5689e-03]],\n",
              "              \n",
              "                       [[ 1.0667e-03, -1.2554e-02, -6.1451e-05],\n",
              "                        [-1.6910e-02, -7.0745e-03, -1.9165e-02],\n",
              "                        [-1.5586e-02, -6.9578e-03,  2.3185e-02]]],\n",
              "              \n",
              "              \n",
              "                      ...,\n",
              "              \n",
              "              \n",
              "                      [[[ 2.2728e-02, -1.3097e-02,  9.0888e-03],\n",
              "                        [-1.1622e-02, -2.1233e-02, -3.1868e-03],\n",
              "                        [ 1.9614e-02,  1.3930e-02,  9.6028e-03]],\n",
              "              \n",
              "                       [[-1.5311e-02,  2.2964e-02, -2.0068e-02],\n",
              "                        [-1.7373e-03,  1.0356e-03,  9.4774e-03],\n",
              "                        [ 2.3804e-02, -1.1426e-02, -8.1255e-03]],\n",
              "              \n",
              "                       [[ 2.3886e-02, -3.3296e-03, -2.0724e-03],\n",
              "                        [ 3.6703e-03, -1.9866e-02,  9.4845e-03],\n",
              "                        [ 1.8272e-02,  7.7829e-03,  1.6439e-03]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-9.0981e-03,  2.0717e-02, -2.1596e-02],\n",
              "                        [-1.5949e-02,  2.2865e-02,  1.7035e-02],\n",
              "                        [ 1.2985e-02,  4.6022e-03,  1.0829e-02]],\n",
              "              \n",
              "                       [[-2.2015e-02, -1.5491e-02,  4.1166e-03],\n",
              "                        [ 1.2865e-02,  4.2525e-03,  1.6978e-02],\n",
              "                        [-1.7073e-02, -1.6298e-02,  2.3387e-02]],\n",
              "              \n",
              "                       [[-1.4655e-02, -1.5355e-02, -9.2913e-03],\n",
              "                        [ 1.9365e-02,  1.3557e-02,  1.8111e-02],\n",
              "                        [ 1.4523e-02,  1.7187e-02,  2.9637e-04]]],\n",
              "              \n",
              "              \n",
              "                      [[[-6.4978e-03,  2.1174e-02, -2.5415e-02],\n",
              "                        [ 2.4216e-02,  2.5073e-02, -1.2995e-02],\n",
              "                        [-2.0267e-02,  1.3286e-02, -9.5283e-03]],\n",
              "              \n",
              "                       [[-1.3537e-02, -2.4199e-02,  2.1301e-02],\n",
              "                        [-2.0184e-02, -2.1871e-03, -1.0360e-02],\n",
              "                        [-1.0645e-02,  5.3037e-03,  1.9441e-02]],\n",
              "              \n",
              "                       [[ 1.3014e-02, -2.5016e-02,  2.1250e-02],\n",
              "                        [-1.5886e-02, -3.0707e-03, -1.2851e-02],\n",
              "                        [-2.4954e-02,  2.3941e-02,  1.2651e-02]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-2.0055e-02, -9.5418e-03,  7.3745e-03],\n",
              "                        [-1.0547e-02,  2.1099e-02, -1.0679e-02],\n",
              "                        [ 1.8720e-02, -1.5910e-02, -2.0747e-02]],\n",
              "              \n",
              "                       [[-1.1212e-02, -1.3261e-02, -2.5505e-02],\n",
              "                        [-7.1521e-03, -3.3366e-03, -2.0013e-02],\n",
              "                        [ 1.5855e-02,  1.9350e-02, -5.2804e-03]],\n",
              "              \n",
              "                       [[ 2.7975e-03, -1.9233e-02,  1.4642e-02],\n",
              "                        [ 2.8477e-03, -1.9418e-02,  1.0611e-02],\n",
              "                        [-7.4983e-03,  2.4838e-02,  1.9034e-02]]],\n",
              "              \n",
              "              \n",
              "                      [[[-7.8824e-03, -1.5238e-02, -9.7048e-03],\n",
              "                        [ 1.4028e-02,  7.6640e-03,  1.6709e-02],\n",
              "                        [ 1.3380e-02,  1.2159e-02,  2.0957e-02]],\n",
              "              \n",
              "                       [[ 2.2903e-02, -6.9562e-03, -1.8393e-02],\n",
              "                        [ 9.8064e-03, -2.4206e-02, -1.7103e-02],\n",
              "                        [ 1.1120e-02,  1.6938e-02, -7.1267e-03]],\n",
              "              \n",
              "                       [[ 7.1640e-03,  1.8488e-02, -2.9311e-03],\n",
              "                        [ 4.5449e-03, -2.3278e-02,  1.0336e-03],\n",
              "                        [-1.1942e-02, -1.4106e-02, -5.2974e-04]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-2.2916e-03, -3.2590e-03, -1.8844e-02],\n",
              "                        [-6.3322e-04, -9.2164e-04,  2.1766e-03],\n",
              "                        [ 1.7542e-02, -2.1257e-02,  1.9800e-02]],\n",
              "              \n",
              "                       [[-1.0486e-03, -4.3198e-03, -2.1718e-02],\n",
              "                        [-9.3841e-03, -9.4763e-04,  1.8697e-02],\n",
              "                        [ 1.7691e-02, -1.4968e-02,  6.4725e-03]],\n",
              "              \n",
              "                       [[ 2.3042e-03, -1.3633e-02,  1.0147e-02],\n",
              "                        [-7.6454e-03,  2.3706e-02, -1.2609e-02],\n",
              "                        [-1.4085e-02, -2.1749e-03, -1.5455e-02]]]])),\n",
              "             ('conv5.0.bias',\n",
              "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0.])),\n",
              "             ('conv5.1.weight',\n",
              "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1.])),\n",
              "             ('conv5.1.bias',\n",
              "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0.])),\n",
              "             ('conv5.1.running_mean',\n",
              "              tensor([-1.4883e-01, -1.8314e-02, -1.4501e-02,  5.4120e-02, -7.2272e-02,\n",
              "                      -6.4197e-02, -4.8009e-02, -5.7316e-02,  1.1056e-01, -3.2026e-03,\n",
              "                      -6.9370e-02,  4.0422e-02,  5.5559e-02, -7.1771e-02, -6.1065e-02,\n",
              "                      -2.0120e-02,  1.6544e-02, -9.9646e-03, -1.5015e-02,  3.0195e-02,\n",
              "                       1.0786e-02, -6.9059e-02, -4.8417e-02,  3.3903e-02,  5.5680e-02,\n",
              "                      -9.5250e-03,  7.0955e-02, -1.3920e-02,  4.8678e-02,  2.6474e-04,\n",
              "                      -4.3544e-02,  6.2957e-02, -5.8371e-02,  2.0814e-02,  6.9921e-02,\n",
              "                      -5.1047e-02, -8.5092e-02, -6.3620e-02,  2.7409e-02, -2.7863e-02,\n",
              "                      -5.8213e-02,  6.0191e-02,  7.7293e-02,  7.9660e-03,  1.0475e-01,\n",
              "                      -4.1092e-03,  1.9634e-02,  1.0096e-01, -6.3560e-02,  8.0173e-02,\n",
              "                      -4.5129e-02,  1.7280e-02, -2.0435e-02, -1.2109e-02, -1.0055e-01,\n",
              "                      -2.2367e-02,  1.3394e-02,  4.4609e-02,  6.3447e-02,  5.3831e-02,\n",
              "                      -3.0909e-03, -8.9674e-02, -3.8610e-02,  1.1865e-01,  1.7965e-02,\n",
              "                      -3.5547e-02,  2.8092e-02, -7.2424e-02, -4.0813e-02,  7.2718e-02,\n",
              "                       2.0430e-02,  1.0857e-01,  7.8363e-02, -7.9977e-02,  1.5449e-03,\n",
              "                      -5.0119e-02,  6.8520e-02, -4.2563e-02,  1.9479e-02, -3.2490e-02,\n",
              "                       5.5453e-03, -4.7513e-02, -1.6776e-02,  3.9521e-02,  1.2656e-02,\n",
              "                      -4.8036e-03, -1.8524e-02, -1.6549e-02, -9.7545e-04,  6.2661e-02,\n",
              "                       8.1891e-03, -5.1351e-03, -6.1351e-02,  1.1806e-02, -9.2028e-02,\n",
              "                       1.2230e-02, -4.6162e-02,  2.5130e-02, -1.0571e-01, -6.4447e-02,\n",
              "                       7.6453e-02, -7.5990e-02, -4.4238e-02,  1.3207e-02,  2.5221e-02,\n",
              "                      -1.7076e-02, -3.4420e-02, -6.8377e-02,  2.2267e-02, -7.4734e-02,\n",
              "                       3.4352e-02,  2.9363e-02,  1.2916e-02, -5.5704e-02, -9.1266e-02,\n",
              "                       1.9463e-02, -4.0905e-02,  1.8236e-02,  3.1431e-02,  6.4811e-04,\n",
              "                      -3.2999e-02,  5.1201e-02,  4.8302e-02,  1.0088e-02,  9.9253e-03,\n",
              "                       6.9537e-02,  4.4279e-02,  1.0942e-02,  3.0163e-02, -3.2074e-02,\n",
              "                      -7.0850e-02, -5.2202e-02,  6.2456e-02, -3.7351e-02,  2.6620e-02,\n",
              "                       1.6748e-02, -3.4360e-03, -3.6348e-02, -5.9036e-02, -1.2587e-02,\n",
              "                      -5.5733e-02,  5.4223e-02, -3.5123e-02,  2.5742e-02,  3.6666e-02,\n",
              "                      -3.3235e-02, -9.9384e-03, -3.5594e-02,  5.8154e-02, -2.0520e-02,\n",
              "                       6.1828e-02, -2.1662e-02,  1.0227e-02,  4.2980e-02,  3.9622e-02,\n",
              "                       2.9380e-03,  6.5469e-03,  5.3861e-02, -2.9872e-02,  5.2440e-02,\n",
              "                       2.9600e-03,  2.2222e-02, -1.4904e-02,  2.3086e-02, -2.9007e-02,\n",
              "                      -3.5997e-02, -1.8687e-03, -5.0182e-02, -5.8410e-02,  6.0902e-03,\n",
              "                      -8.7337e-02, -4.1376e-02,  4.3056e-03, -7.1164e-02,  5.5587e-02,\n",
              "                      -5.1676e-02, -3.3891e-02, -3.4790e-02, -7.9399e-02,  4.2267e-03,\n",
              "                      -8.4833e-02,  3.2676e-02, -4.3437e-02,  7.2040e-02, -4.9201e-03,\n",
              "                       2.6698e-02,  2.0969e-02, -5.6537e-03, -2.1187e-02, -3.9903e-03,\n",
              "                      -7.0393e-03,  2.3577e-02, -3.2064e-03, -3.4144e-02, -2.9926e-02,\n",
              "                       1.0668e-02, -3.3403e-02,  7.8348e-02, -2.6042e-02, -9.7150e-03,\n",
              "                      -2.8446e-04, -1.7421e-03,  3.5322e-02,  3.7869e-02, -2.7965e-03,\n",
              "                       3.5841e-02, -6.7454e-02, -7.1139e-02, -5.9418e-02, -4.0213e-02,\n",
              "                      -1.4737e-02, -7.5901e-03, -2.2123e-02, -4.4085e-02, -1.5017e-02,\n",
              "                       5.4593e-02, -7.2845e-02, -3.6288e-02, -2.7090e-02, -4.1561e-03,\n",
              "                      -6.5145e-03, -4.4169e-02,  1.8363e-02,  1.0530e-02, -2.7095e-02,\n",
              "                       2.9955e-02,  2.3632e-02,  2.9397e-02,  2.4853e-02, -3.9076e-03,\n",
              "                      -5.6380e-02, -6.0758e-02, -3.5770e-02,  2.4499e-02, -2.7035e-02,\n",
              "                      -5.7800e-02, -6.3030e-02, -5.6711e-02,  2.7958e-02,  2.7806e-02,\n",
              "                       5.5254e-03,  8.3091e-03, -3.0201e-02, -1.5072e-02, -6.4423e-03,\n",
              "                      -1.9650e-02,  2.0268e-02,  4.8540e-02, -7.3640e-02,  7.1193e-02,\n",
              "                       2.8065e-02, -6.5739e-02,  1.1705e-02,  1.3856e-02, -4.5962e-02,\n",
              "                      -6.4061e-02, -3.3168e-02,  6.1074e-02,  7.7902e-02,  5.6343e-03,\n",
              "                      -2.1617e-02,  2.5428e-02, -8.4095e-04, -6.9627e-02,  3.6503e-02,\n",
              "                      -1.6309e-02, -8.2913e-02,  1.3081e-02, -7.5610e-02,  1.6570e-02,\n",
              "                      -5.7182e-02, -7.3497e-02, -3.0881e-02,  2.2732e-02, -1.0655e-03,\n",
              "                       3.5108e-02,  4.7172e-02, -7.9935e-02, -1.2950e-02,  1.2990e-02,\n",
              "                       2.1768e-02, -6.2079e-02,  1.3796e-02, -3.9627e-02, -3.2687e-02,\n",
              "                      -3.0982e-03, -3.5616e-02, -4.2339e-02, -3.4980e-02,  5.7929e-02,\n",
              "                      -1.6503e-02,  2.5517e-02,  1.1974e-02,  5.1625e-02, -6.4860e-02,\n",
              "                       3.6398e-02, -3.4257e-02,  4.0909e-02,  5.1960e-02,  3.9407e-02,\n",
              "                      -4.8061e-02, -4.9918e-02, -1.8388e-02, -6.9193e-03, -8.0810e-03,\n",
              "                       3.6707e-02, -3.4557e-02, -3.8137e-02,  7.1709e-02, -5.7342e-02,\n",
              "                       1.4976e-02, -1.1947e-02, -3.2229e-02,  5.4022e-02, -2.9728e-02,\n",
              "                      -1.3617e-02,  5.6660e-02,  1.8717e-03, -2.5457e-02,  4.7492e-02,\n",
              "                       1.0056e-01,  4.2002e-02, -9.3520e-02, -4.9295e-02, -7.2586e-02,\n",
              "                       3.7292e-02, -7.3981e-03, -5.1138e-04,  4.3402e-02, -4.9819e-02,\n",
              "                       2.0366e-02, -7.0774e-02,  9.3069e-02, -4.9225e-02,  2.6935e-02,\n",
              "                       8.8130e-03,  7.6422e-02, -5.1411e-02, -4.7018e-02,  3.9116e-02,\n",
              "                       5.3217e-02, -1.5165e-02,  2.4736e-02, -4.2152e-02, -1.1107e-01,\n",
              "                      -4.4989e-03,  3.6281e-02, -2.6265e-02, -1.6552e-02,  3.9655e-02,\n",
              "                       2.0109e-02, -6.8831e-02, -5.9635e-03, -3.5174e-03, -6.9061e-04,\n",
              "                      -1.0432e-02,  2.5641e-03, -2.3237e-02, -2.5701e-03,  1.3121e-02,\n",
              "                      -1.1309e-02, -1.3142e-01,  2.7305e-02, -5.6330e-02,  8.7512e-03,\n",
              "                       2.1352e-03, -3.6533e-03, -2.8872e-02, -5.7264e-02, -5.8948e-02,\n",
              "                      -1.7639e-02,  2.5177e-02, -5.9133e-02, -5.9143e-02,  2.9852e-02,\n",
              "                      -1.2838e-02,  5.1063e-02,  5.6607e-03,  5.2749e-02,  1.3430e-02,\n",
              "                      -5.0691e-02,  2.6821e-02,  9.8048e-02,  1.0564e-01, -5.4667e-02,\n",
              "                      -3.0113e-02, -3.5806e-02, -1.0118e-02, -4.9175e-02, -1.6207e-02,\n",
              "                      -1.9822e-02,  1.1761e-01, -4.3675e-02,  1.3955e-02, -6.1883e-02,\n",
              "                      -2.0123e-02,  2.8642e-02, -4.8222e-02, -1.0466e-01,  2.9222e-03,\n",
              "                      -7.0020e-03,  9.7947e-03,  1.2241e-02, -1.3106e-02,  5.2488e-02,\n",
              "                      -2.3535e-05,  3.9074e-02,  1.6014e-01,  2.8929e-02, -7.1733e-02,\n",
              "                      -3.0227e-02, -4.2079e-02,  3.0321e-02, -8.0078e-02, -2.9021e-02,\n",
              "                       6.0543e-02, -3.9053e-02, -2.1317e-02,  3.8297e-02,  2.7848e-02,\n",
              "                      -2.3603e-02, -8.6047e-02, -1.2801e-02, -2.3332e-02,  1.9406e-02,\n",
              "                       1.5084e-02,  1.5415e-02,  3.9953e-02, -6.8483e-02,  5.7284e-02,\n",
              "                      -4.7847e-02,  3.3398e-03,  4.7621e-02,  2.9464e-02, -3.9011e-03,\n",
              "                      -1.3835e-02,  3.6891e-02,  2.5502e-02, -1.0972e-02,  6.4279e-02,\n",
              "                       6.2656e-02, -3.0565e-02, -4.7711e-02, -7.7714e-02,  7.4273e-02,\n",
              "                      -1.4195e-04,  1.1293e-03,  5.1501e-02,  8.7143e-02, -2.9038e-02,\n",
              "                       1.5996e-02,  1.1127e-01,  6.7849e-03,  1.2087e-01,  5.4230e-02,\n",
              "                       3.3679e-02,  5.1395e-03, -2.7515e-02, -5.4542e-03, -9.7658e-02,\n",
              "                      -4.2616e-02,  2.9810e-02,  1.2135e-02,  3.6419e-02,  4.1346e-02,\n",
              "                      -1.4590e-02, -5.7441e-02, -6.8079e-02, -2.1431e-02, -1.8134e-02,\n",
              "                      -3.8312e-02, -1.2915e-02,  2.3502e-02, -6.0497e-02, -1.6110e-02,\n",
              "                       5.1450e-02, -2.7492e-02,  3.9191e-02, -5.6334e-02, -5.1128e-02,\n",
              "                      -3.0200e-02, -1.1794e-02, -6.0288e-02,  4.0341e-02,  1.5984e-02,\n",
              "                      -1.7072e-02, -4.2438e-02, -1.2391e-02,  7.5671e-02,  3.5743e-02,\n",
              "                       3.5273e-02, -5.2200e-02, -1.3366e-01,  7.5655e-04,  1.6917e-02,\n",
              "                      -9.0540e-02, -4.0276e-03,  6.4321e-02, -2.9010e-02,  3.6874e-03,\n",
              "                       5.8663e-02, -2.2018e-02,  5.4868e-03, -4.6360e-02, -3.3600e-02,\n",
              "                      -6.8436e-02, -2.2398e-03,  7.3941e-02, -1.0209e-02, -3.1122e-02,\n",
              "                      -5.9863e-02, -1.6507e-02])),\n",
              "             ('conv5.1.running_var',\n",
              "              tensor([1.0067, 0.9453, 0.9433, 0.9439, 0.9868, 0.9441, 0.9390, 0.9234, 0.9437,\n",
              "                      0.9431, 0.9188, 0.9401, 0.9512, 0.9329, 0.9306, 0.9395, 0.9374, 0.9356,\n",
              "                      0.9551, 0.9435, 0.9557, 0.9232, 0.9349, 0.9429, 0.9180, 0.9538, 0.9682,\n",
              "                      0.9306, 0.9826, 0.9250, 0.9242, 0.9720, 0.9283, 0.9463, 0.9462, 0.9206,\n",
              "                      0.9274, 0.9280, 0.9339, 0.9736, 0.9290, 0.9279, 0.9741, 0.9408, 0.9415,\n",
              "                      0.9538, 0.9547, 0.9498, 0.9192, 0.9270, 0.9491, 0.9405, 0.9393, 0.9329,\n",
              "                      0.9237, 0.9299, 0.9541, 0.9491, 0.9821, 0.9573, 0.9255, 0.9408, 0.9326,\n",
              "                      0.9658, 0.9896, 0.9414, 0.9493, 0.9477, 0.9329, 0.9248, 0.9258, 0.9399,\n",
              "                      0.9404, 0.9342, 0.9397, 0.9380, 0.9375, 0.9345, 0.9718, 0.9416, 0.9490,\n",
              "                      0.9628, 0.9301, 0.9491, 0.9331, 0.9320, 0.9169, 0.9250, 0.9179, 0.9547,\n",
              "                      0.9518, 0.9480, 0.9852, 0.9164, 0.9667, 0.9640, 0.9550, 0.9584, 0.9271,\n",
              "                      0.9382, 0.9626, 0.9177, 0.9745, 0.9325, 0.9219, 0.9453, 0.9227, 0.9268,\n",
              "                      0.9343, 0.9292, 0.9481, 0.9224, 0.9340, 0.9295, 0.9412, 0.9371, 0.9255,\n",
              "                      0.9266, 0.9203, 0.9198, 0.9625, 0.9386, 0.9266, 0.9283, 0.9273, 0.9395,\n",
              "                      0.9271, 0.9776, 0.9491, 0.9344, 0.9368, 0.9689, 0.9350, 0.9827, 0.9452,\n",
              "                      1.0082, 0.9448, 0.9217, 0.9254, 0.9685, 0.9242, 0.9494, 1.0208, 0.9258,\n",
              "                      0.9184, 0.9188, 0.9545, 0.9416, 0.9377, 0.9438, 0.9531, 0.9299, 0.9188,\n",
              "                      0.9533, 0.9554, 0.9275, 0.9540, 0.9412, 0.9368, 0.9347, 0.9255, 0.9329,\n",
              "                      0.9214, 0.9713, 0.9326, 0.9408, 0.9809, 0.9364, 0.9219, 0.9699, 0.9505,\n",
              "                      0.9349, 0.9308, 0.9588, 0.9297, 0.9387, 0.9282, 0.9209, 0.9736, 0.9332,\n",
              "                      0.9363, 0.9626, 0.9351, 0.9472, 0.9252, 0.9570, 0.9701, 0.9314, 0.9234,\n",
              "                      0.9398, 0.9192, 0.9225, 0.9398, 0.9422, 0.9301, 0.9320, 0.9504, 0.9205,\n",
              "                      0.9426, 0.9362, 0.9453, 0.9399, 0.9244, 0.9281, 0.9491, 0.9416, 0.9292,\n",
              "                      0.9371, 0.9462, 0.9320, 0.9365, 0.9361, 0.9765, 0.9351, 0.9428, 0.9441,\n",
              "                      0.9468, 0.9280, 0.9388, 0.9299, 0.9182, 0.9397, 0.9175, 0.9326, 0.9316,\n",
              "                      0.9369, 0.9269, 0.9504, 0.9372, 0.9343, 0.9350, 0.9434, 0.9543, 0.9448,\n",
              "                      0.9650, 0.9542, 0.9739, 0.9231, 0.9455, 0.9461, 0.9370, 0.9510, 0.9492,\n",
              "                      0.9335, 0.9421, 0.9324, 0.9259, 0.9719, 0.9645, 0.9187, 0.9468, 0.9201,\n",
              "                      0.9396, 0.9650, 0.9393, 0.9383, 0.9248, 0.9340, 0.9219, 0.9388, 0.9305,\n",
              "                      0.9301, 0.9555, 0.9304, 0.9315, 0.9274, 0.9378, 0.9207, 0.9540, 0.9622,\n",
              "                      0.9338, 0.9300, 0.9373, 0.9848, 0.9279, 0.9257, 0.9229, 0.9323, 0.9393,\n",
              "                      0.9769, 0.9249, 0.9590, 0.9547, 0.9514, 0.9488, 0.9206, 0.9558, 0.9850,\n",
              "                      0.9698, 0.9430, 0.9662, 0.9607, 0.9268, 0.9431, 0.9235, 0.9270, 0.9489,\n",
              "                      0.9450, 0.9357, 0.9335, 0.9217, 0.9526, 0.9230, 0.9220, 0.9220, 0.9307,\n",
              "                      0.9270, 0.9405, 0.9679, 0.9325, 0.9353, 0.9425, 0.9220, 0.9387, 0.9545,\n",
              "                      0.9329, 1.0198, 0.9465, 0.9200, 0.9443, 0.9476, 0.9376, 0.9811, 0.9433,\n",
              "                      1.0064, 0.9312, 0.9180, 0.9333, 0.9342, 0.9473, 0.9754, 0.9204, 0.9412,\n",
              "                      0.9301, 0.9184, 0.9228, 0.9602, 0.9269, 0.9584, 0.9706, 0.9458, 0.9332,\n",
              "                      0.9918, 0.9418, 0.9184, 0.9517, 0.9259, 0.9649, 0.9241, 0.9522, 0.9235,\n",
              "                      0.9427, 0.9299, 0.9375, 0.9220, 0.9509, 0.9532, 0.9259, 0.9315, 0.9540,\n",
              "                      0.9391, 0.9469, 0.9257, 0.9248, 0.9202, 1.0072, 0.9334, 0.9226, 0.9230,\n",
              "                      0.9283, 0.9327, 0.9606, 0.9405, 0.9558, 0.9617, 0.9357, 0.9336, 0.9632,\n",
              "                      0.9645, 0.9298, 0.9518, 0.9212, 1.0031, 0.9458, 0.9281, 0.9403, 0.9439,\n",
              "                      0.9317, 0.9185, 0.9234, 0.9400, 0.9312, 0.9248, 0.9289, 0.9334, 0.9186,\n",
              "                      1.0280, 0.9781, 0.9393, 0.9189, 0.9249, 0.9425, 0.9326, 1.0267, 0.9274,\n",
              "                      0.9337, 0.9408, 0.9455, 0.9376, 0.9273, 0.9233, 0.9478, 0.9267, 0.9727,\n",
              "                      0.9205, 0.9246, 0.9253, 0.9281, 0.9241, 0.9308, 0.9765, 0.9400, 0.9339,\n",
              "                      0.9452, 0.9658, 0.9190, 0.9370, 0.9383, 0.9345, 0.9351, 0.9656, 0.9239,\n",
              "                      0.9373, 0.9619, 0.9267, 0.9343, 0.9211, 0.9373, 0.9458, 0.9356, 0.9404,\n",
              "                      0.9440, 0.9266, 0.9248, 0.9338, 0.9421, 0.9308, 0.9219, 0.9490, 0.9247,\n",
              "                      0.9335, 0.9258, 0.9377, 0.9583, 0.9301, 0.9202, 0.9839, 0.9288, 0.9975,\n",
              "                      0.9315, 0.9321, 0.9439, 0.9224, 0.9199, 0.9263, 0.9534, 0.9505, 0.9323,\n",
              "                      0.9359, 0.9214, 0.9173, 0.9195, 0.9200, 0.9204, 0.9396, 0.9266, 0.9187,\n",
              "                      0.9267, 0.9400, 0.9300, 0.9311, 0.9423, 0.9660, 0.9335, 0.9290, 0.9234,\n",
              "                      0.9220, 0.9358, 0.9267, 0.9372, 0.9680, 0.9484, 0.9474, 0.9473, 0.9347,\n",
              "                      0.9395, 0.9245, 0.9464, 0.9334, 0.9356, 0.9232, 0.9371, 0.9285, 0.9214,\n",
              "                      0.9234, 0.9310, 0.9225, 0.9309, 0.9277, 0.9344, 0.9587, 0.9150])),\n",
              "             ('conv5.1.num_batches_tracked', tensor(1)),\n",
              "             ('conv5.3.weight',\n",
              "              tensor([[[[ 0.0092, -0.0187,  0.0177],\n",
              "                        [-0.0115, -0.0048,  0.0052],\n",
              "                        [-0.0237, -0.0185, -0.0156]],\n",
              "              \n",
              "                       [[ 0.0221,  0.0185,  0.0091],\n",
              "                        [ 0.0053, -0.0067, -0.0005],\n",
              "                        [ 0.0024,  0.0176,  0.0185]],\n",
              "              \n",
              "                       [[ 0.0209, -0.0100,  0.0105],\n",
              "                        [ 0.0199,  0.0205,  0.0196],\n",
              "                        [ 0.0220,  0.0178, -0.0208]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-0.0201,  0.0106, -0.0211],\n",
              "                        [ 0.0002, -0.0082, -0.0239],\n",
              "                        [-0.0104,  0.0196,  0.0129]],\n",
              "              \n",
              "                       [[-0.0232,  0.0064, -0.0151],\n",
              "                        [-0.0244,  0.0168,  0.0080],\n",
              "                        [-0.0178,  0.0103, -0.0067]],\n",
              "              \n",
              "                       [[-0.0071,  0.0074, -0.0028],\n",
              "                        [ 0.0223, -0.0144,  0.0219],\n",
              "                        [-0.0190, -0.0182, -0.0079]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 0.0245,  0.0084,  0.0219],\n",
              "                        [-0.0111, -0.0136, -0.0168],\n",
              "                        [-0.0064, -0.0153, -0.0192]],\n",
              "              \n",
              "                       [[-0.0028, -0.0060,  0.0141],\n",
              "                        [-0.0037, -0.0090,  0.0130],\n",
              "                        [ 0.0111,  0.0128,  0.0132]],\n",
              "              \n",
              "                       [[ 0.0092,  0.0069, -0.0207],\n",
              "                        [ 0.0251, -0.0108, -0.0041],\n",
              "                        [-0.0162,  0.0250, -0.0041]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-0.0176,  0.0069,  0.0022],\n",
              "                        [ 0.0020,  0.0065, -0.0025],\n",
              "                        [-0.0197,  0.0156,  0.0132]],\n",
              "              \n",
              "                       [[-0.0166, -0.0255, -0.0181],\n",
              "                        [-0.0071,  0.0255, -0.0240],\n",
              "                        [-0.0154, -0.0037, -0.0201]],\n",
              "              \n",
              "                       [[-0.0233, -0.0116,  0.0169],\n",
              "                        [ 0.0238,  0.0049,  0.0030],\n",
              "                        [ 0.0080,  0.0215,  0.0043]]],\n",
              "              \n",
              "              \n",
              "                      [[[-0.0163, -0.0206, -0.0193],\n",
              "                        [ 0.0001, -0.0125, -0.0158],\n",
              "                        [-0.0246,  0.0104, -0.0133]],\n",
              "              \n",
              "                       [[ 0.0165, -0.0195,  0.0036],\n",
              "                        [ 0.0083, -0.0039,  0.0013],\n",
              "                        [ 0.0014, -0.0177, -0.0021]],\n",
              "              \n",
              "                       [[ 0.0162,  0.0029,  0.0063],\n",
              "                        [ 0.0086,  0.0015,  0.0148],\n",
              "                        [ 0.0220,  0.0124, -0.0189]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 0.0180,  0.0015,  0.0210],\n",
              "                        [ 0.0121, -0.0049,  0.0178],\n",
              "                        [ 0.0219,  0.0190, -0.0120]],\n",
              "              \n",
              "                       [[ 0.0025, -0.0249,  0.0078],\n",
              "                        [ 0.0252,  0.0110,  0.0005],\n",
              "                        [-0.0178, -0.0193, -0.0203]],\n",
              "              \n",
              "                       [[ 0.0198,  0.0087,  0.0021],\n",
              "                        [-0.0065, -0.0161, -0.0158],\n",
              "                        [ 0.0158, -0.0216,  0.0059]]],\n",
              "              \n",
              "              \n",
              "                      ...,\n",
              "              \n",
              "              \n",
              "                      [[[ 0.0154, -0.0225,  0.0104],\n",
              "                        [-0.0237, -0.0253, -0.0251],\n",
              "                        [-0.0211, -0.0146,  0.0148]],\n",
              "              \n",
              "                       [[-0.0062,  0.0081, -0.0101],\n",
              "                        [ 0.0127,  0.0095,  0.0011],\n",
              "                        [ 0.0161,  0.0061, -0.0029]],\n",
              "              \n",
              "                       [[-0.0019,  0.0083, -0.0090],\n",
              "                        [ 0.0112,  0.0085,  0.0240],\n",
              "                        [-0.0015,  0.0116,  0.0201]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 0.0252,  0.0045,  0.0195],\n",
              "                        [ 0.0142, -0.0024, -0.0211],\n",
              "                        [ 0.0006, -0.0234, -0.0141]],\n",
              "              \n",
              "                       [[-0.0017,  0.0015, -0.0075],\n",
              "                        [ 0.0255, -0.0237, -0.0230],\n",
              "                        [ 0.0027, -0.0108,  0.0019]],\n",
              "              \n",
              "                       [[ 0.0234,  0.0171,  0.0254],\n",
              "                        [ 0.0136, -0.0243,  0.0079],\n",
              "                        [-0.0007,  0.0089, -0.0157]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 0.0205,  0.0009,  0.0129],\n",
              "                        [ 0.0246, -0.0068, -0.0007],\n",
              "                        [-0.0038, -0.0181, -0.0037]],\n",
              "              \n",
              "                       [[-0.0029, -0.0156,  0.0062],\n",
              "                        [ 0.0230,  0.0033,  0.0099],\n",
              "                        [-0.0115,  0.0186,  0.0133]],\n",
              "              \n",
              "                       [[-0.0105, -0.0027,  0.0187],\n",
              "                        [-0.0093, -0.0246,  0.0188],\n",
              "                        [-0.0089,  0.0096, -0.0078]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-0.0242,  0.0071,  0.0061],\n",
              "                        [-0.0130,  0.0167,  0.0245],\n",
              "                        [-0.0070, -0.0005, -0.0161]],\n",
              "              \n",
              "                       [[-0.0018,  0.0220,  0.0159],\n",
              "                        [-0.0105, -0.0135,  0.0078],\n",
              "                        [ 0.0114, -0.0077,  0.0037]],\n",
              "              \n",
              "                       [[-0.0102, -0.0148, -0.0112],\n",
              "                        [-0.0177, -0.0246,  0.0212],\n",
              "                        [ 0.0114,  0.0184,  0.0102]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 0.0044,  0.0163, -0.0212],\n",
              "                        [-0.0191,  0.0168,  0.0169],\n",
              "                        [-0.0204,  0.0187,  0.0119]],\n",
              "              \n",
              "                       [[ 0.0254,  0.0247, -0.0154],\n",
              "                        [-0.0207, -0.0081,  0.0222],\n",
              "                        [-0.0092, -0.0003, -0.0124]],\n",
              "              \n",
              "                       [[-0.0216, -0.0220,  0.0036],\n",
              "                        [ 0.0159, -0.0148,  0.0042],\n",
              "                        [-0.0217, -0.0095, -0.0002]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-0.0168, -0.0207,  0.0222],\n",
              "                        [-0.0052, -0.0089,  0.0153],\n",
              "                        [ 0.0144,  0.0017, -0.0164]],\n",
              "              \n",
              "                       [[ 0.0013,  0.0190, -0.0194],\n",
              "                        [ 0.0222,  0.0172,  0.0093],\n",
              "                        [-0.0021, -0.0032, -0.0146]],\n",
              "              \n",
              "                       [[-0.0077, -0.0212, -0.0173],\n",
              "                        [ 0.0137,  0.0235,  0.0151],\n",
              "                        [ 0.0146, -0.0025, -0.0219]]]])),\n",
              "             ('conv5.3.bias',\n",
              "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0.])),\n",
              "             ('conv5.4.weight',\n",
              "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1.])),\n",
              "             ('conv5.4.bias',\n",
              "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0.])),\n",
              "             ('conv5.4.running_mean',\n",
              "              tensor([ 3.5560e-03,  2.9532e-03, -2.6478e-02,  9.3992e-03, -9.9298e-03,\n",
              "                       1.0914e-02, -3.1868e-02, -8.0644e-03, -6.0148e-03, -1.9972e-02,\n",
              "                      -2.0765e-02,  3.3385e-02,  1.6998e-02,  1.7310e-03,  1.9080e-02,\n",
              "                      -4.8288e-02,  3.6194e-04, -9.5116e-02, -8.2117e-03,  1.1871e-02,\n",
              "                       1.5379e-02,  4.8559e-02,  2.2930e-02, -9.9032e-04,  7.1963e-04,\n",
              "                      -2.5588e-02, -3.4831e-02, -3.5337e-02, -2.3587e-02, -6.2107e-03,\n",
              "                      -9.6470e-03,  1.7732e-02,  9.7872e-03, -2.1740e-02, -8.0262e-03,\n",
              "                       1.4798e-02,  7.5452e-03,  2.4702e-02,  4.8691e-02, -7.4828e-03,\n",
              "                       1.2025e-02, -6.0095e-02,  2.1680e-02,  2.4976e-02, -7.0587e-03,\n",
              "                       2.9192e-02, -4.4675e-03,  2.6347e-02,  3.4716e-02,  3.0979e-04,\n",
              "                       3.4819e-02, -3.0114e-02,  4.2054e-02,  1.7050e-02, -1.1330e-02,\n",
              "                       4.4612e-03, -3.6015e-02,  8.7471e-02, -1.1052e-02,  1.2635e-02,\n",
              "                       2.5741e-02, -4.9198e-03, -2.4626e-02,  3.8753e-03,  1.8427e-02,\n",
              "                       2.0346e-02,  2.0005e-02, -4.3929e-02, -2.7767e-02,  3.0570e-02,\n",
              "                      -1.1424e-02, -1.6867e-02,  1.3824e-03, -2.6192e-02,  1.5879e-02,\n",
              "                       2.4331e-02,  2.8129e-02, -1.9449e-02, -2.3266e-02, -5.7710e-02,\n",
              "                       1.6236e-02, -1.8103e-02, -1.4304e-03,  2.0821e-03, -1.5811e-02,\n",
              "                      -5.0779e-03, -3.6491e-02, -1.9443e-03, -2.1920e-03,  2.5055e-02,\n",
              "                       8.8824e-03,  8.2213e-03,  1.0346e-02,  5.0666e-03,  2.6711e-02,\n",
              "                       4.2481e-03,  3.2081e-02, -2.4549e-02, -2.7254e-02, -3.4330e-02,\n",
              "                       6.2864e-03,  5.6208e-03,  7.3138e-04,  3.4480e-02, -3.3085e-03,\n",
              "                      -1.6719e-02,  2.2434e-02, -3.7596e-03, -2.3637e-02,  2.4056e-02,\n",
              "                      -1.2789e-02, -1.9785e-02,  3.0175e-02, -9.1451e-03, -2.8717e-03,\n",
              "                      -2.5492e-02,  3.0960e-02,  1.4332e-02,  1.9377e-02, -4.2856e-03,\n",
              "                       1.4471e-02,  5.7315e-04,  2.1384e-02, -1.3090e-02, -2.2859e-03,\n",
              "                      -4.0407e-02, -1.8026e-02,  9.2980e-03,  3.8203e-02,  3.2043e-02,\n",
              "                      -2.7841e-02,  1.5139e-02,  1.2635e-02,  7.6656e-03,  1.0828e-02,\n",
              "                      -1.2944e-02,  1.1472e-03, -2.7161e-02, -5.9106e-03, -1.6658e-02,\n",
              "                      -1.1829e-03,  1.2703e-02,  6.5775e-03,  2.3711e-02, -9.1791e-03,\n",
              "                       5.7696e-02, -5.5807e-02,  2.1537e-02, -1.9661e-02,  1.2536e-02,\n",
              "                      -5.1846e-03, -2.8268e-02, -7.4206e-03,  4.9202e-03,  2.6959e-02,\n",
              "                      -3.8930e-02, -1.8574e-02,  1.5595e-02,  5.4035e-02, -1.1188e-02,\n",
              "                       1.1769e-02, -8.5058e-03, -1.4495e-03, -1.0186e-02,  8.3587e-03,\n",
              "                       7.4673e-03,  1.9314e-02, -1.6370e-02, -3.9686e-02,  2.5857e-03,\n",
              "                      -3.1570e-02,  3.4804e-02, -1.6244e-04, -1.4546e-02,  1.3905e-02,\n",
              "                      -1.9703e-02,  7.2636e-03, -1.4430e-02,  1.9540e-02,  4.4326e-02,\n",
              "                       3.9898e-03, -2.5133e-02, -1.3242e-02, -1.8749e-03,  9.3831e-03,\n",
              "                       3.1449e-03, -1.8820e-02, -2.4447e-02,  1.3988e-02,  1.0284e-02,\n",
              "                       5.7781e-03,  4.5552e-03, -8.7719e-03,  4.3499e-03,  4.8213e-03,\n",
              "                       3.9578e-02, -3.5765e-02, -5.6608e-03, -8.4100e-03, -3.6817e-02,\n",
              "                       3.4300e-02,  1.0219e-02,  1.0584e-02,  2.7293e-02,  4.7973e-03,\n",
              "                      -6.4490e-03, -2.9228e-02, -1.0780e-02, -3.5220e-02, -1.1447e-03,\n",
              "                      -1.9114e-02, -5.2127e-02, -4.8718e-03, -1.6436e-02, -2.5975e-03,\n",
              "                       8.2468e-03, -1.8002e-04, -2.4035e-03,  1.4908e-02,  1.6503e-04,\n",
              "                      -2.7644e-03, -2.7715e-02,  9.2875e-03, -1.5435e-02, -6.5317e-04,\n",
              "                       2.9015e-02,  9.0789e-03, -1.0596e-02,  9.9497e-03, -4.5617e-02,\n",
              "                       3.6211e-02, -8.2790e-03,  6.7389e-03, -3.3664e-02,  5.9245e-03,\n",
              "                      -1.0439e-02,  1.7234e-02,  2.8533e-02, -3.8441e-02, -1.8447e-02,\n",
              "                      -2.0520e-02,  3.2649e-02,  2.4596e-03,  2.3264e-02, -4.2798e-03,\n",
              "                       2.2988e-02,  1.1648e-03, -6.5390e-04, -4.3482e-02,  2.8507e-02,\n",
              "                      -7.5815e-03, -1.4010e-02,  1.6472e-02,  2.5153e-02,  2.5684e-03,\n",
              "                      -2.7428e-02,  3.2899e-02, -3.9908e-03, -2.3535e-04, -2.4389e-02,\n",
              "                      -3.5262e-03,  1.9998e-02, -1.8764e-03,  1.6304e-02, -3.7921e-02,\n",
              "                      -3.4556e-03, -8.3903e-03,  9.1657e-03,  2.5421e-02, -2.1789e-03,\n",
              "                       1.2197e-02, -9.2780e-03, -8.8843e-03,  2.1391e-02, -1.4134e-02,\n",
              "                      -1.3496e-02, -7.7074e-03,  1.6316e-02, -5.5969e-03,  1.7849e-03,\n",
              "                      -1.7333e-03, -2.5834e-02, -2.3823e-03,  1.2918e-03,  4.2097e-02,\n",
              "                      -1.7888e-02, -2.1534e-02,  5.2577e-03,  1.7583e-02, -5.0156e-03,\n",
              "                      -5.4701e-03, -1.7426e-02,  2.0434e-02,  3.1692e-03,  2.4641e-03,\n",
              "                      -2.7609e-02, -1.7080e-05,  6.9475e-03, -2.1892e-02, -6.0555e-03,\n",
              "                      -4.9384e-04, -1.6153e-02, -5.8456e-03, -1.9522e-02, -3.1316e-02,\n",
              "                      -4.4466e-02,  3.0385e-02,  3.9515e-02,  3.5009e-03,  8.8127e-03,\n",
              "                      -2.1304e-02,  2.4574e-03, -7.9216e-03, -1.3567e-03, -5.3518e-03,\n",
              "                       1.4232e-02,  1.0472e-04, -1.2032e-02,  2.2230e-02,  1.1070e-02,\n",
              "                      -1.3278e-02, -1.2781e-02, -7.0432e-03, -1.0630e-02,  2.9404e-02,\n",
              "                       2.7555e-02,  7.3465e-03,  8.5413e-04, -1.6293e-02, -2.6771e-02,\n",
              "                       1.8914e-02,  1.6451e-02,  2.3321e-02, -3.2963e-02, -4.0298e-03,\n",
              "                      -3.2860e-02, -7.1805e-03, -1.8446e-03,  2.6534e-02,  1.9441e-02,\n",
              "                       4.0268e-04, -3.6970e-02,  4.1942e-03,  1.5384e-03, -3.3208e-02,\n",
              "                      -2.2610e-02,  4.3943e-02,  3.5897e-03,  5.5495e-03,  2.2570e-02,\n",
              "                      -4.2598e-02,  2.8855e-02, -9.9548e-03, -4.2002e-02, -1.2467e-03,\n",
              "                       4.1676e-02,  3.8312e-03, -1.4930e-02,  3.0773e-03, -5.0826e-03,\n",
              "                       1.0079e-02,  2.9801e-02, -3.8781e-02, -3.6669e-03,  1.9806e-02,\n",
              "                       2.9700e-02, -3.1689e-02, -1.8812e-02, -4.7925e-03, -2.3776e-02,\n",
              "                      -6.4373e-03,  2.0995e-02,  8.9053e-03,  2.8010e-02, -4.1653e-02,\n",
              "                       2.6934e-02,  2.0691e-02, -1.3667e-02,  2.1352e-02, -1.4837e-02,\n",
              "                       4.7883e-03,  6.1346e-03, -1.3403e-02, -8.8436e-03, -3.7620e-03,\n",
              "                       5.7417e-02,  3.8085e-02,  2.4937e-02, -2.2594e-02,  2.0826e-02,\n",
              "                       1.9436e-02,  2.0319e-02,  2.0467e-03,  2.0212e-02,  6.6676e-03,\n",
              "                       3.1965e-02,  3.0092e-02, -3.5047e-02,  1.0690e-02, -2.2982e-02,\n",
              "                      -6.8384e-03, -2.1471e-03, -5.7956e-02,  1.9555e-03, -9.6664e-04,\n",
              "                       3.9272e-03,  1.4619e-03, -1.2185e-03,  1.8552e-02,  2.1747e-02,\n",
              "                       6.2205e-04,  3.5691e-02, -3.6587e-02, -1.9326e-02, -7.7704e-03,\n",
              "                       9.9622e-03,  1.8297e-02, -1.0117e-02, -1.2490e-02, -1.0700e-02,\n",
              "                      -1.3242e-02,  2.2388e-02, -2.0364e-02,  3.0384e-02, -1.6297e-02,\n",
              "                       4.9763e-03,  1.8925e-02,  1.0520e-02, -1.9110e-02, -6.8098e-03,\n",
              "                      -7.5262e-03, -6.0989e-03,  2.7928e-02, -1.1874e-02,  9.1133e-03,\n",
              "                       1.1409e-02,  2.3599e-02,  6.2011e-03, -2.1949e-02, -3.4915e-02,\n",
              "                      -4.3405e-02, -1.1923e-02,  1.7220e-02,  1.9138e-02, -9.7659e-03,\n",
              "                       2.9381e-02, -3.1066e-02,  7.3303e-02, -1.2185e-02,  3.1172e-03,\n",
              "                      -2.6734e-02,  4.2913e-03,  1.0568e-02,  4.2802e-02, -1.5861e-02,\n",
              "                      -5.8022e-03, -3.3728e-02, -1.4874e-02,  1.7930e-02, -7.1627e-02,\n",
              "                       2.0897e-02,  2.1675e-02, -1.3822e-02,  1.3560e-02,  2.8354e-02,\n",
              "                      -2.4525e-02,  5.3247e-02, -3.5694e-02, -1.0091e-02,  4.4868e-02,\n",
              "                       2.2503e-02, -1.4569e-02, -2.1162e-02, -1.9046e-02,  1.7973e-03,\n",
              "                      -6.2056e-03, -1.7837e-02, -7.7023e-03,  2.5729e-02,  1.9829e-02,\n",
              "                      -9.8338e-03, -7.8283e-03, -6.8180e-03,  2.3149e-02,  2.0862e-03,\n",
              "                       3.8561e-03,  2.4023e-02, -2.2040e-03,  1.6306e-02, -3.0427e-02,\n",
              "                      -1.1209e-02, -5.5083e-02,  1.0788e-02, -3.6705e-03, -9.5660e-03,\n",
              "                       8.2397e-03,  1.3299e-02, -5.5674e-02, -1.5822e-03, -2.9630e-03,\n",
              "                      -2.0692e-02,  3.8218e-03, -2.6345e-02,  3.4891e-03, -1.8790e-02,\n",
              "                       1.9901e-02, -5.3550e-02,  1.9128e-02, -1.8781e-02,  4.4037e-02,\n",
              "                       1.9622e-02,  4.5815e-02])),\n",
              "             ('conv5.4.running_var',\n",
              "              tensor([0.9095, 0.9285, 0.9090, 0.9081, 0.9251, 0.9132, 0.9276, 0.9195, 0.9150,\n",
              "                      0.9153, 0.9081, 0.9093, 0.9288, 0.9102, 0.9178, 0.9124, 0.9114, 0.9152,\n",
              "                      0.9145, 0.9216, 0.9136, 0.9133, 0.9220, 0.9144, 0.9211, 0.9291, 0.9267,\n",
              "                      0.9195, 0.9245, 0.9607, 0.9200, 0.9235, 0.9156, 0.9153, 0.9136, 0.9132,\n",
              "                      0.9101, 0.9119, 0.9122, 0.9145, 0.9124, 0.9184, 0.9179, 0.9146, 0.9415,\n",
              "                      0.9129, 0.9258, 0.9269, 0.9266, 0.9109, 0.9125, 0.9125, 0.9321, 0.9178,\n",
              "                      0.9127, 0.9125, 0.9116, 0.9153, 0.9133, 0.9247, 0.9186, 0.9180, 0.9205,\n",
              "                      0.9125, 0.9167, 0.9297, 0.9306, 0.9225, 0.9214, 0.9133, 0.9196, 0.9190,\n",
              "                      0.9133, 0.9170, 0.9162, 0.9134, 0.9224, 0.9188, 0.9170, 0.9227, 0.9196,\n",
              "                      0.9142, 0.9089, 0.9181, 0.9244, 0.9203, 0.9102, 0.9125, 0.9253, 0.9154,\n",
              "                      0.9167, 0.9184, 0.9163, 0.9166, 0.9293, 0.9137, 0.9274, 0.9212, 0.9386,\n",
              "                      0.9147, 0.9127, 0.9251, 0.9368, 0.9179, 0.9105, 0.9167, 0.9097, 0.9115,\n",
              "                      0.9126, 0.9177, 0.9165, 0.9245, 0.9135, 0.9113, 0.9118, 0.9242, 0.9167,\n",
              "                      0.9209, 0.9342, 0.9170, 0.9115, 0.9158, 0.9165, 0.9100, 0.9296, 0.9212,\n",
              "                      0.9262, 0.9124, 0.9107, 0.9181, 0.9102, 0.9146, 0.9287, 0.9182, 0.9140,\n",
              "                      0.9121, 0.9102, 0.9215, 0.9382, 0.9156, 0.9123, 0.9143, 0.9187, 0.9122,\n",
              "                      0.9095, 0.9236, 0.9167, 0.9245, 0.9109, 0.9207, 0.9136, 0.9135, 0.9170,\n",
              "                      0.9103, 0.9162, 0.9260, 0.9145, 0.9243, 0.9234, 0.9101, 0.9180, 0.9116,\n",
              "                      0.9142, 0.9233, 0.9119, 0.9138, 0.9215, 0.9123, 0.9188, 0.9110, 0.9143,\n",
              "                      0.9168, 0.9089, 0.9149, 0.9139, 0.9109, 0.9138, 0.9162, 0.9160, 0.9154,\n",
              "                      0.9266, 0.9143, 0.9255, 0.9146, 0.9158, 0.9143, 0.9165, 0.9137, 0.9138,\n",
              "                      0.9087, 0.9301, 0.9186, 0.9142, 0.9254, 0.9254, 0.9240, 0.9099, 0.9256,\n",
              "                      0.9128, 0.9328, 0.9128, 0.9181, 0.9122, 0.9166, 0.9142, 0.9198, 0.9176,\n",
              "                      0.9197, 0.9115, 0.9148, 0.9157, 0.9162, 0.9213, 0.9103, 0.9110, 0.9422,\n",
              "                      0.9158, 0.9102, 0.9121, 0.9249, 0.9216, 0.9327, 0.9151, 0.9403, 0.9111,\n",
              "                      0.9240, 0.9160, 0.9142, 0.9189, 0.9164, 0.9121, 0.9174, 0.9295, 0.9106,\n",
              "                      0.9121, 0.9258, 0.9272, 0.9169, 0.9219, 0.9138, 0.9079, 0.9197, 0.9083,\n",
              "                      0.9156, 0.9118, 0.9170, 0.9149, 0.9122, 0.9239, 0.9113, 0.9102, 0.9137,\n",
              "                      0.9135, 0.9189, 0.9170, 0.9127, 0.9145, 0.9121, 0.9145, 0.9123, 0.9106,\n",
              "                      0.9094, 0.9112, 0.9206, 0.9165, 0.9172, 0.9222, 0.9181, 0.9186, 0.9193,\n",
              "                      0.9197, 0.9250, 0.9203, 0.9075, 0.9100, 0.9081, 0.9148, 0.9116, 0.9185,\n",
              "                      0.9225, 0.9121, 0.9218, 0.9265, 0.9119, 0.9157, 0.9115, 0.9137, 0.9219,\n",
              "                      0.9147, 0.9131, 0.9143, 0.9093, 0.9193, 0.9161, 0.9101, 0.9239, 0.9163,\n",
              "                      0.9115, 0.9161, 0.9128, 0.9144, 0.9211, 0.9104, 0.9134, 0.9103, 0.9231,\n",
              "                      0.9210, 0.9113, 0.9092, 0.9166, 0.9118, 0.9123, 0.9123, 0.9178, 0.9184,\n",
              "                      0.9183, 0.9130, 0.9230, 0.9093, 0.9196, 0.9149, 0.9125, 0.9287, 0.9120,\n",
              "                      0.9102, 0.9097, 0.9169, 0.9142, 0.9164, 0.9122, 0.9157, 0.9145, 0.9149,\n",
              "                      0.9218, 0.9259, 0.9216, 0.9127, 0.9183, 0.9255, 0.9224, 0.9126, 0.9148,\n",
              "                      0.9223, 0.9120, 0.9168, 0.9159, 0.9223, 0.9171, 0.9162, 0.9138, 0.9142,\n",
              "                      0.9205, 0.9198, 0.9186, 0.9106, 0.9123, 0.9185, 0.9203, 0.9186, 0.9128,\n",
              "                      0.9269, 0.9127, 0.9254, 0.9275, 0.9229, 0.9170, 0.9160, 0.9140, 0.9245,\n",
              "                      0.9195, 0.9106, 0.9122, 0.9151, 0.9113, 0.9180, 0.9140, 0.9136, 0.9329,\n",
              "                      0.9136, 0.9133, 0.9144, 0.9131, 0.9179, 0.9098, 0.9125, 0.9148, 0.9160,\n",
              "                      0.9123, 0.9133, 0.9680, 0.9151, 0.9103, 0.9105, 0.9148, 0.9120, 0.9376,\n",
              "                      0.9239, 0.9162, 0.9099, 0.9180, 0.9214, 0.9097, 0.9117, 0.9116, 0.9102,\n",
              "                      0.9175, 0.9198, 0.9098, 0.9204, 0.9108, 0.9195, 0.9299, 0.9131, 0.9147,\n",
              "                      0.9145, 0.9246, 0.9194, 0.9257, 0.9119, 0.9144, 0.9167, 0.9233, 0.9231,\n",
              "                      0.9284, 0.9137, 0.9142, 0.9174, 0.9214, 0.9123, 0.9103, 0.9151, 0.9126,\n",
              "                      0.9144, 0.9098, 0.9113, 0.9272, 0.9123, 0.9123, 0.9091, 0.9148, 0.9101,\n",
              "                      0.9105, 0.9102, 0.9203, 0.9183, 0.9171, 0.9152, 0.9180, 0.9139, 0.9091,\n",
              "                      0.9213, 0.9117, 0.9300, 0.9277, 0.9100, 0.9231, 0.9169, 0.9123, 0.9132,\n",
              "                      0.9108, 0.9192, 0.9155, 0.9163, 0.9122, 0.9297, 0.9126, 0.9222, 0.9131,\n",
              "                      0.9217, 0.9122, 0.9090, 0.9131, 0.9226, 0.9096, 0.9099, 0.9274, 0.9184,\n",
              "                      0.9196, 0.9125, 0.9204, 0.9199, 0.9223, 0.9167, 0.9250, 0.9116, 0.9167,\n",
              "                      0.9239, 0.9267, 0.9117, 0.9214, 0.9134, 0.9116, 0.9105, 0.9155, 0.9316,\n",
              "                      0.9154, 0.9132, 0.9105, 0.9101, 0.9195, 0.9185, 0.9192, 0.9224, 0.9149,\n",
              "                      0.9129, 0.9099, 0.9171, 0.9139, 0.9156, 0.9165, 0.9145, 0.9218])),\n",
              "             ('conv5.4.num_batches_tracked', tensor(1)),\n",
              "             ('classifier.0.weight',\n",
              "              tensor([[ 0.0352, -0.0267, -0.0012,  ...,  0.0065,  0.0148,  0.0053],\n",
              "                      [-0.0019, -0.0314,  0.0199,  ...,  0.0352,  0.0300,  0.0240],\n",
              "                      [ 0.0158,  0.0001,  0.0270,  ...,  0.0218, -0.0226,  0.0169],\n",
              "                      ...,\n",
              "                      [ 0.0332, -0.0280, -0.0070,  ...,  0.0329, -0.0175,  0.0011],\n",
              "                      [-0.0263,  0.0224,  0.0279,  ...,  0.0241, -0.0320,  0.0050],\n",
              "                      [-0.0195, -0.0295, -0.0123,  ..., -0.0184, -0.0071,  0.0182]])),\n",
              "             ('classifier.0.bias', tensor([0., 0., 0.,  ..., 0., 0., 0.])),\n",
              "             ('classifier.3.weight',\n",
              "              tensor([[-0.0061,  0.0251, -0.0140,  ...,  0.0035, -0.0239,  0.0069],\n",
              "                      [ 0.0034,  0.0225,  0.0083,  ...,  0.0163, -0.0152,  0.0075],\n",
              "                      [-0.0131,  0.0130, -0.0189,  ..., -0.0237, -0.0262, -0.0225],\n",
              "                      ...,\n",
              "                      [-0.0082, -0.0258,  0.0012,  ..., -0.0042, -0.0231,  0.0251],\n",
              "                      [ 0.0101,  0.0237, -0.0188,  ...,  0.0200, -0.0144, -0.0152],\n",
              "                      [ 0.0256, -0.0251, -0.0160,  ...,  0.0145, -0.0131,  0.0080]])),\n",
              "             ('classifier.3.bias', tensor([0., 0., 0.,  ..., 0., 0., 0.])),\n",
              "             ('classifier.6.weight',\n",
              "              tensor([[ 0.0195,  0.0280, -0.0240,  ...,  0.0289, -0.0224, -0.0180],\n",
              "                      [-0.0012,  0.0058,  0.0122,  ...,  0.0285,  0.0123,  0.0272],\n",
              "                      [-0.0256, -0.0283,  0.0301,  ...,  0.0330, -0.0061, -0.0268],\n",
              "                      ...,\n",
              "                      [-0.0330,  0.0320,  0.0044,  ...,  0.0099, -0.0238, -0.0059],\n",
              "                      [ 0.0309, -0.0283, -0.0133,  ...,  0.0173,  0.0309, -0.0345],\n",
              "                      [-0.0335, -0.0182,  0.0299,  ...,  0.0068, -0.0222,  0.0007]])),\n",
              "             ('classifier.6.bias',\n",
              "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0.]))])"
            ]
          },
          "metadata": {},
          "execution_count": 15
        }
      ],
      "source": [
        "model.state_dict()"
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "p_o6dQ3VaGiW"
      },
      "source": [
        "# 设置交叉熵损失函数，SGD优化器"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 16,
      "metadata": {
        "ExecuteTime": {
          "end_time": "2025-06-26T01:43:40.023837Z",
          "start_time": "2025-06-26T01:43:40.019952Z"
        },
        "id": "ycFLHrlbaGiW",
        "outputId": "dcd6aea9-2884-4934-ab13-e7bbd5ffe533",
        "colab": {
          "base_uri": "https://localhost:8080/"
        }
      },
      "outputs": [
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "损失函数: CrossEntropyLoss()\n"
          ]
        }
      ],
      "source": [
        "\n",
        "# 定义损失函数和优化器\n",
        "loss_fn = nn.CrossEntropyLoss()  # 交叉熵损失函数，适用于多分类问题，里边会做softmax，还有会把0-9标签转换成one-hot编码\n",
        "\n",
        "print(\"损失函数:\", loss_fn)\n",
        "\n"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 17,
      "metadata": {
        "ExecuteTime": {
          "end_time": "2025-06-26T01:43:40.035848Z",
          "start_time": "2025-06-26T01:43:40.032419Z"
        },
        "id": "CVErMt43aGiW"
      },
      "outputs": [],
      "source": [
        "model = VGG11_CIFAR10()\n",
        "\n",
        "optimizer = torch.optim.SGD(model.parameters(), lr=0.001, momentum=0.9)  # SGD优化器，学习率为0.01，动量为0.9"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 18,
      "metadata": {
        "ExecuteTime": {
          "end_time": "2025-06-26T01:45:37.732814Z",
          "start_time": "2025-06-26T01:43:40.035848Z"
        },
        "id": "Topc5Kb8aGiW",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 423,
          "referenced_widgets": [
            "31495146dd2d4fc6b5bea2977be25bd1",
            "4f31ae9bd93d4155a315518ace1d2530",
            "77b2c48a2d61416e9cd464ab8e915b75",
            "1df35024086f47bc8aaeb61009815e12",
            "4d36bce3d34f4b689e4d81de6b5c8011",
            "03956e0d53e74f8b90f369f327e51743",
            "71e44243e64a4a56821262260dc81b1d",
            "8f7e0f16738d49ca915b30faa7e4893f",
            "a5da3c3f5e064606b335c5528969da6f",
            "07a5e9ccc65a424eb00c30dd8ac87e2d",
            "7f3f78eb4b8c4529915784703edc1ea8"
          ]
        },
        "outputId": "e773a00b-8ec8-47fa-8c1a-0a7e8c2e7e92"
      },
      "outputs": [
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "使用设备: cpu\n",
            "训练开始，共训练35200步\n"
          ]
        },
        {
          "output_type": "display_data",
          "data": {
            "text/plain": [
              "  0%|          | 0/35200 [00:00<?, ?it/s]"
            ],
            "application/vnd.jupyter.widget-view+json": {
              "version_major": 2,
              "version_minor": 0,
              "model_id": "31495146dd2d4fc6b5bea2977be25bd1"
            }
          },
          "metadata": {}
        },
        {
          "output_type": "error",
          "ename": "KeyboardInterrupt",
          "evalue": "",
          "traceback": [
            "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
            "\u001b[0;31mKeyboardInterrupt\u001b[0m                         Traceback (most recent call last)",
            "\u001b[0;32m/tmp/ipython-input-18-3717610145.py\u001b[0m in \u001b[0;36m<cell line: 0>\u001b[0;34m()\u001b[0m\n\u001b[1;32m      6\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m      7\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 8\u001b[0;31m model, history = train_classification_model(model, train_loader, val_loader, loss_fn, optimizer, device, num_epochs=50,\n\u001b[0m\u001b[1;32m      9\u001b[0m early_stopping=early_stopping, model_saver=model_saver, tensorboard_logger=None)\n\u001b[1;32m     10\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
            "\u001b[0;32m/content/deeplearning_train.py\u001b[0m in \u001b[0;36mtrain_classification_model\u001b[0;34m(model, train_loader, val_loader, criterion, optimizer, device, num_epochs, tensorboard_logger, model_saver, early_stopping, eval_step)\u001b[0m\n\u001b[1;32m    208\u001b[0m                 \u001b[0;31m# 评估\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    209\u001b[0m                 \u001b[0;32mif\u001b[0m \u001b[0mglobal_step\u001b[0m \u001b[0;34m%\u001b[0m \u001b[0meval_step\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0;36m0\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 210\u001b[0;31m                     \u001b[0mval_acc\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mval_loss\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mevaluate_classification_model\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmodel\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mval_loader\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdevice\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mcriterion\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    211\u001b[0m                     record_dict[\"val\"].append({\n\u001b[1;32m    212\u001b[0m                         \u001b[0;34m\"loss\"\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mval_loss\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m\"acc\"\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mval_acc\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m\"step\"\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mglobal_step\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
            "\u001b[0;32m/content/deeplearning_train.py\u001b[0m in \u001b[0;36mevaluate_classification_model\u001b[0;34m(model, data_loader, device, criterion)\u001b[0m\n\u001b[1;32m    112\u001b[0m         \u001b[0;32mfor\u001b[0m \u001b[0mimages\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlabels\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mdata_loader\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    113\u001b[0m             \u001b[0mimages\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlabels\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mimages\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mto\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mdevice\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlabels\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mto\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mdevice\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 114\u001b[0;31m             \u001b[0moutputs\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mmodel\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mimages\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    115\u001b[0m             \u001b[0m_\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mpredicted\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmax\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0moutputs\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdata\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;36m1\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;31m#torch.max(outputs.data, 1)返回两个值，第一个是最大值，第二个是最大值的索引\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    116\u001b[0m             \u001b[0mtotal\u001b[0m \u001b[0;34m+=\u001b[0m \u001b[0mlabels\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msize\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;31m#labels.size(0)返回标签的维度，这里返回的是batch_size，因为每个批次有batch_size个标签\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
            "\u001b[0;32m/usr/local/lib/python3.11/dist-packages/torch/nn/modules/module.py\u001b[0m in \u001b[0;36m_wrapped_call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m   1737\u001b[0m             \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_compiled_call_impl\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m  \u001b[0;31m# type: ignore[misc]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1738\u001b[0m         \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1739\u001b[0;31m             \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_call_impl\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m   1740\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1741\u001b[0m     \u001b[0;31m# torchrec tests the code consistency with the following code\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
            "\u001b[0;32m/usr/local/lib/python3.11/dist-packages/torch/nn/modules/module.py\u001b[0m in \u001b[0;36m_call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m   1748\u001b[0m                 \u001b[0;32mor\u001b[0m \u001b[0m_global_backward_pre_hooks\u001b[0m \u001b[0;32mor\u001b[0m \u001b[0m_global_backward_hooks\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1749\u001b[0m                 or _global_forward_hooks or _global_forward_pre_hooks):\n\u001b[0;32m-> 1750\u001b[0;31m             \u001b[0;32mreturn\u001b[0m \u001b[0mforward_call\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m   1751\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1752\u001b[0m         \u001b[0mresult\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
            "\u001b[0;32m/tmp/ipython-input-11-2918853683.py\u001b[0m in \u001b[0;36mforward\u001b[0;34m(self, x)\u001b[0m\n\u001b[1;32m     89\u001b[0m         \u001b[0mx\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mconv2\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx\u001b[0m\u001b[0;34m)\u001b[0m  \u001b[0;31m# 第二组卷积\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     90\u001b[0m         \u001b[0mx\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mconv3\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx\u001b[0m\u001b[0;34m)\u001b[0m  \u001b[0;31m# 第三组卷积\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 91\u001b[0;31m         \u001b[0mx\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mconv4\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx\u001b[0m\u001b[0;34m)\u001b[0m  \u001b[0;31m# 第四组卷积\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m     92\u001b[0m         \u001b[0mx\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mconv5\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx\u001b[0m\u001b[0;34m)\u001b[0m  \u001b[0;31m# 第五组卷积\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     93\u001b[0m         \u001b[0mx\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mx\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mview\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msize\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m-\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m)\u001b[0m  \u001b[0;31m# 展平成一维\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
            "\u001b[0;32m/usr/local/lib/python3.11/dist-packages/torch/nn/modules/module.py\u001b[0m in \u001b[0;36m_wrapped_call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m   1737\u001b[0m             \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_compiled_call_impl\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m  \u001b[0;31m# type: ignore[misc]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1738\u001b[0m         \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1739\u001b[0;31m             \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_call_impl\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m   1740\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1741\u001b[0m     \u001b[0;31m# torchrec tests the code consistency with the following code\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
            "\u001b[0;32m/usr/local/lib/python3.11/dist-packages/torch/nn/modules/module.py\u001b[0m in \u001b[0;36m_call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m   1748\u001b[0m                 \u001b[0;32mor\u001b[0m \u001b[0m_global_backward_pre_hooks\u001b[0m \u001b[0;32mor\u001b[0m \u001b[0m_global_backward_hooks\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1749\u001b[0m                 or _global_forward_hooks or _global_forward_pre_hooks):\n\u001b[0;32m-> 1750\u001b[0;31m             \u001b[0;32mreturn\u001b[0m \u001b[0mforward_call\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m   1751\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1752\u001b[0m         \u001b[0mresult\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
            "\u001b[0;32m/usr/local/lib/python3.11/dist-packages/torch/nn/modules/container.py\u001b[0m in \u001b[0;36mforward\u001b[0;34m(self, input)\u001b[0m\n\u001b[1;32m    248\u001b[0m     \u001b[0;32mdef\u001b[0m \u001b[0mforward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0minput\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    249\u001b[0m         \u001b[0;32mfor\u001b[0m \u001b[0mmodule\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 250\u001b[0;31m             \u001b[0minput\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mmodule\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0minput\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    251\u001b[0m         \u001b[0;32mreturn\u001b[0m \u001b[0minput\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    252\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
            "\u001b[0;32m/usr/local/lib/python3.11/dist-packages/torch/nn/modules/module.py\u001b[0m in \u001b[0;36m_wrapped_call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m   1737\u001b[0m             \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_compiled_call_impl\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m  \u001b[0;31m# type: ignore[misc]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1738\u001b[0m         \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1739\u001b[0;31m             \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_call_impl\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m   1740\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1741\u001b[0m     \u001b[0;31m# torchrec tests the code consistency with the following code\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
            "\u001b[0;32m/usr/local/lib/python3.11/dist-packages/torch/nn/modules/module.py\u001b[0m in \u001b[0;36m_call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m   1748\u001b[0m                 \u001b[0;32mor\u001b[0m \u001b[0m_global_backward_pre_hooks\u001b[0m \u001b[0;32mor\u001b[0m \u001b[0m_global_backward_hooks\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1749\u001b[0m                 or _global_forward_hooks or _global_forward_pre_hooks):\n\u001b[0;32m-> 1750\u001b[0;31m             \u001b[0;32mreturn\u001b[0m \u001b[0mforward_call\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m   1751\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1752\u001b[0m         \u001b[0mresult\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
            "\u001b[0;32m/usr/local/lib/python3.11/dist-packages/torch/nn/modules/conv.py\u001b[0m in \u001b[0;36mforward\u001b[0;34m(self, input)\u001b[0m\n\u001b[1;32m    552\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    553\u001b[0m     \u001b[0;32mdef\u001b[0m \u001b[0mforward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0minput\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mTensor\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m->\u001b[0m \u001b[0mTensor\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 554\u001b[0;31m         \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_conv_forward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0minput\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mweight\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mbias\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    555\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    556\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
            "\u001b[0;32m/usr/local/lib/python3.11/dist-packages/torch/nn/modules/conv.py\u001b[0m in \u001b[0;36m_conv_forward\u001b[0;34m(self, input, weight, bias)\u001b[0m\n\u001b[1;32m    547\u001b[0m                 \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mgroups\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    548\u001b[0m             )\n\u001b[0;32m--> 549\u001b[0;31m         return F.conv2d(\n\u001b[0m\u001b[1;32m    550\u001b[0m             \u001b[0minput\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mweight\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mbias\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mstride\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpadding\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdilation\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mgroups\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    551\u001b[0m         )\n",
            "\u001b[0;31mKeyboardInterrupt\u001b[0m: "
          ]
        }
      ],
      "source": [
        "device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\")\n",
        "print(f\"使用设备: {device}\")\n",
        "model = model.to(device) #将模型移动到GPU\n",
        "early_stopping=EarlyStopping(patience=5, delta=0.001)\n",
        "model_saver=ModelSaver(save_dir='model_weights', save_best_only=True)\n",
        "\n",
        "\n",
        "model, history = train_classification_model(model, train_loader, val_loader, loss_fn, optimizer, device, num_epochs=50,\n",
        "early_stopping=early_stopping, model_saver=model_saver, tensorboard_logger=None)\n",
        "\n"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": null,
      "metadata": {
        "ExecuteTime": {
          "end_time": "2025-06-26T01:45:37.737721Z",
          "start_time": "2025-06-26T01:45:37.732814Z"
        },
        "id": "zJvmR6bRaGiW",
        "collapsed": true
      },
      "outputs": [],
      "source": [
        "history['train'][-100:-1]"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": null,
      "metadata": {
        "ExecuteTime": {
          "end_time": "2025-06-26T01:45:37.741226Z",
          "start_time": "2025-06-26T01:45:37.737721Z"
        },
        "id": "6OmEkKUTaGiW",
        "collapsed": true
      },
      "outputs": [],
      "source": [
        "history['val'][-1000:-1]"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": null,
      "metadata": {
        "ExecuteTime": {
          "end_time": "2025-06-26T01:45:37.816716Z",
          "start_time": "2025-06-26T01:45:37.744941Z"
        },
        "id": "eFbn-w1LaGiX"
      },
      "outputs": [],
      "source": [
        "plot_learning_curves(history, sample_step=500)  #横坐标是 steps"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": null,
      "metadata": {
        "ExecuteTime": {
          "end_time": "2025-06-26T01:45:37.818553Z",
          "start_time": "2025-06-26T01:45:37.816716Z"
        },
        "id": "hEf0XLgRaGiX"
      },
      "outputs": [],
      "source": [
        "# 创建一个SevenZipFile对象，用于读取'./test.7z'压缩包\n",
        "a = py7zr.SevenZipFile(r'./test.7z', 'r')\n",
        "# 将压缩包中的所有文件解压到'./competitions/cifar-10/'目录下\n",
        "a.extractall(path=r'./competitions/cifar-10/')\n",
        "# 关闭SevenZipFile对象，释放资源\n",
        "a.close()"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": null,
      "metadata": {
        "id": "yek4MjF_aGiX"
      },
      "outputs": [],
      "source": [
        "# 导入所需库\n",
        "import os\n",
        "import pandas as pd\n",
        "from PIL import Image\n",
        "import torch\n",
        "from torch.utils.data import Dataset, DataLoader\n",
        "from torchvision import transforms\n",
        "import tqdm\n",
        "\n",
        "# 定义测试数据集类\n",
        "class CIFAR10TestDataset(Dataset):\n",
        "    def __init__(self, img_dir, transform=None):\n",
        "        \"\"\"\n",
        "        初始化测试数据集\n",
        "\n",
        "        参数:\n",
        "            img_dir: 测试图片目录\n",
        "            transform: 图像预处理变换\n",
        "        \"\"\"\n",
        "        self.img_dir = img_dir\n",
        "        self.transform = transform\n",
        "        self.img_files = [f for f in os.listdir(img_dir) if f.endswith('.png')]\n",
        "\n",
        "    def __len__(self):\n",
        "        return len(self.img_files)\n",
        "\n",
        "    def __getitem__(self, idx):\n",
        "        img_path = os.path.join(self.img_dir, self.img_files[idx])\n",
        "        image = Image.open(img_path).convert('RGB')\n",
        "\n",
        "        if self.transform:\n",
        "            image = self.transform(image)\n",
        "\n",
        "        # 提取图像ID（文件名去掉扩展名）\n",
        "        img_id = int(os.path.splitext(self.img_files[idx])[0])\n",
        "\n",
        "        return image, img_id\n",
        "\n",
        "# 定义预测函数\n",
        "def predict_test_set(model, img_dir, labels_file, device, batch_size=64):\n",
        "    \"\"\"\n",
        "    预测测试集并生成提交文件\n",
        "\n",
        "    参数:\n",
        "        model: 训练好的模型\n",
        "        img_dir: 测试图片目录\n",
        "        labels_file: 提交模板文件路径\n",
        "        device: 计算设备\n",
        "        batch_size: 批处理大小\n",
        "    \"\"\"\n",
        "    # 图像预处理变换（与训练集相同）\n",
        "    transform = transforms.Compose([\n",
        "        transforms.ToTensor(),\n",
        "        transforms.Normalize((0.4917, 0.4823, 0.4467), (0.2024, 0.1995, 0.2010))\n",
        "    ])\n",
        "\n",
        "    # 创建测试数据集和数据加载器\n",
        "    test_dataset = CIFAR10TestDataset(img_dir, transform=transform)\n",
        "    test_loader = DataLoader(test_dataset, batch_size=batch_size, shuffle=False, num_workers=4)\n",
        "\n",
        "    # 设置模型为评估模式\n",
        "    model.eval()\n",
        "\n",
        "    # 读取提交模板\n",
        "    submission_df = pd.read_csv(labels_file)\n",
        "    predictions = {}\n",
        "\n",
        "    # 使用tqdm显示进度条\n",
        "    print(\"正在预测测试集...\")\n",
        "    with torch.no_grad():\n",
        "        for images, img_ids in tqdm.tqdm(test_loader, desc=\"预测进度\"):\n",
        "            images = images.to(device)\n",
        "            outputs = model(images)\n",
        "            _, predicted = torch.max(outputs, 1) #取最大的索引，作为预测结果\n",
        "\n",
        "            # 记录每个图像的预测结果\n",
        "            for i, img_id in enumerate(img_ids):\n",
        "                predictions[img_id.item()] = predicted[i].item() #因为一个批次有多个图像，所以需要predicted[i]\n",
        "\n",
        "    # 定义类别名称\n",
        "    class_names = ['airplane', 'automobile', 'bird', 'cat', 'deer', 'dog', 'frog', 'horse', 'ship', 'truck']\n",
        "\n",
        "    # 将数值标签转换为类别名称\n",
        "    labeled_predictions = {img_id: class_names[pred] for img_id, pred in predictions.items()}\n",
        "\n",
        "    # 直接创建DataFrame\n",
        "    submission_df = pd.DataFrame({\n",
        "        'id': list(labeled_predictions.keys()),\n",
        "        'label': list(labeled_predictions.values())\n",
        "    })\n",
        "    # 按id列排序\n",
        "    submission_df = submission_df.sort_values(by='id')\n",
        "\n",
        "    # 检查id列是否有重复值\n",
        "    has_duplicates = submission_df['id'].duplicated().any()\n",
        "    print(f\"id列是否有重复值: {has_duplicates}\")\n",
        "    # 保存预测结果\n",
        "    output_file = 'cifar10_submission.csv'\n",
        "    submission_df.to_csv(output_file, index=False)\n",
        "    print(f\"预测完成，结果已保存至 {output_file}\")\n",
        "\n",
        "# 执行测试集预测\n",
        "img_dir = r\"competitions/cifar-10/test\"\n",
        "labels_file = r\"./sampleSubmission.csv\"\n",
        "predict_test_set(model, img_dir, labels_file, device, batch_size=128)\n"
      ]
    }
  ],
  "metadata": {
    "kernelspec": {
      "display_name": "Python 3",
      "name": "python3"
    },
    "language_info": {
      "codemirror_mode": {
        "name": "ipython",
        "version": 3
      },
      "file_extension": ".py",
      "mimetype": "text/x-python",
      "name": "python",
      "nbconvert_exporter": "python",
      "pygments_lexer": "ipython3",
      "version": "3.12.3"
    },
    "colab": {
      "provenance": [],
      "gpuType": "T4"
    },
    "accelerator": "GPU",
    "widgets": {
      "application/vnd.jupyter.widget-state+json": {
        "31495146dd2d4fc6b5bea2977be25bd1": {
          "model_module": "@jupyter-widgets/controls",
          "model_name": "HBoxModel",
          "model_module_version": "1.5.0",
          "state": {
            "_dom_classes": [],
            "_model_module": "@jupyter-widgets/controls",
            "_model_module_version": "1.5.0",
            "_model_name": "HBoxModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/controls",
            "_view_module_version": "1.5.0",
            "_view_name": "HBoxView",
            "box_style": "",
            "children": [
              "IPY_MODEL_4f31ae9bd93d4155a315518ace1d2530",
              "IPY_MODEL_77b2c48a2d61416e9cd464ab8e915b75",
              "IPY_MODEL_1df35024086f47bc8aaeb61009815e12"
            ],
            "layout": "IPY_MODEL_4d36bce3d34f4b689e4d81de6b5c8011"
          }
        },
        "4f31ae9bd93d4155a315518ace1d2530": {
          "model_module": "@jupyter-widgets/controls",
          "model_name": "HTMLModel",
          "model_module_version": "1.5.0",
          "state": {
            "_dom_classes": [],
            "_model_module": "@jupyter-widgets/controls",
            "_model_module_version": "1.5.0",
            "_model_name": "HTMLModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/controls",
            "_view_module_version": "1.5.0",
            "_view_name": "HTMLView",
            "description": "",
            "description_tooltip": null,
            "layout": "IPY_MODEL_03956e0d53e74f8b90f369f327e51743",
            "placeholder": "​",
            "style": "IPY_MODEL_71e44243e64a4a56821262260dc81b1d",
            "value": "  9%"
          }
        },
        "77b2c48a2d61416e9cd464ab8e915b75": {
          "model_module": "@jupyter-widgets/controls",
          "model_name": "FloatProgressModel",
          "model_module_version": "1.5.0",
          "state": {
            "_dom_classes": [],
            "_model_module": "@jupyter-widgets/controls",
            "_model_module_version": "1.5.0",
            "_model_name": "FloatProgressModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/controls",
            "_view_module_version": "1.5.0",
            "_view_name": "ProgressView",
            "bar_style": "danger",
            "description": "",
            "description_tooltip": null,
            "layout": "IPY_MODEL_8f7e0f16738d49ca915b30faa7e4893f",
            "max": 35200,
            "min": 0,
            "orientation": "horizontal",
            "style": "IPY_MODEL_a5da3c3f5e064606b335c5528969da6f",
            "value": 3000
          }
        },
        "1df35024086f47bc8aaeb61009815e12": {
          "model_module": "@jupyter-widgets/controls",
          "model_name": "HTMLModel",
          "model_module_version": "1.5.0",
          "state": {
            "_dom_classes": [],
            "_model_module": "@jupyter-widgets/controls",
            "_model_module_version": "1.5.0",
            "_model_name": "HTMLModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/controls",
            "_view_module_version": "1.5.0",
            "_view_name": "HTMLView",
            "description": "",
            "description_tooltip": null,
            "layout": "IPY_MODEL_07a5e9ccc65a424eb00c30dd8ac87e2d",
            "placeholder": "​",
            "style": "IPY_MODEL_7f3f78eb4b8c4529915784703edc1ea8",
            "value": " 3000/35200 [1:52:53&lt;18:35:50,  2.08s/it, epoch=4, loss=0.5238, acc=79.69%]"
          }
        },
        "4d36bce3d34f4b689e4d81de6b5c8011": {
          "model_module": "@jupyter-widgets/base",
          "model_name": "LayoutModel",
          "model_module_version": "1.2.0",
          "state": {
            "_model_module": "@jupyter-widgets/base",
            "_model_module_version": "1.2.0",
            "_model_name": "LayoutModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/base",
            "_view_module_version": "1.2.0",
            "_view_name": "LayoutView",
            "align_content": null,
            "align_items": null,
            "align_self": null,
            "border": null,
            "bottom": null,
            "display": null,
            "flex": null,
            "flex_flow": null,
            "grid_area": null,
            "grid_auto_columns": null,
            "grid_auto_flow": null,
            "grid_auto_rows": null,
            "grid_column": null,
            "grid_gap": null,
            "grid_row": null,
            "grid_template_areas": null,
            "grid_template_columns": null,
            "grid_template_rows": null,
            "height": null,
            "justify_content": null,
            "justify_items": null,
            "left": null,
            "margin": null,
            "max_height": null,
            "max_width": null,
            "min_height": null,
            "min_width": null,
            "object_fit": null,
            "object_position": null,
            "order": null,
            "overflow": null,
            "overflow_x": null,
            "overflow_y": null,
            "padding": null,
            "right": null,
            "top": null,
            "visibility": null,
            "width": null
          }
        },
        "03956e0d53e74f8b90f369f327e51743": {
          "model_module": "@jupyter-widgets/base",
          "model_name": "LayoutModel",
          "model_module_version": "1.2.0",
          "state": {
            "_model_module": "@jupyter-widgets/base",
            "_model_module_version": "1.2.0",
            "_model_name": "LayoutModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/base",
            "_view_module_version": "1.2.0",
            "_view_name": "LayoutView",
            "align_content": null,
            "align_items": null,
            "align_self": null,
            "border": null,
            "bottom": null,
            "display": null,
            "flex": null,
            "flex_flow": null,
            "grid_area": null,
            "grid_auto_columns": null,
            "grid_auto_flow": null,
            "grid_auto_rows": null,
            "grid_column": null,
            "grid_gap": null,
            "grid_row": null,
            "grid_template_areas": null,
            "grid_template_columns": null,
            "grid_template_rows": null,
            "height": null,
            "justify_content": null,
            "justify_items": null,
            "left": null,
            "margin": null,
            "max_height": null,
            "max_width": null,
            "min_height": null,
            "min_width": null,
            "object_fit": null,
            "object_position": null,
            "order": null,
            "overflow": null,
            "overflow_x": null,
            "overflow_y": null,
            "padding": null,
            "right": null,
            "top": null,
            "visibility": null,
            "width": null
          }
        },
        "71e44243e64a4a56821262260dc81b1d": {
          "model_module": "@jupyter-widgets/controls",
          "model_name": "DescriptionStyleModel",
          "model_module_version": "1.5.0",
          "state": {
            "_model_module": "@jupyter-widgets/controls",
            "_model_module_version": "1.5.0",
            "_model_name": "DescriptionStyleModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/base",
            "_view_module_version": "1.2.0",
            "_view_name": "StyleView",
            "description_width": ""
          }
        },
        "8f7e0f16738d49ca915b30faa7e4893f": {
          "model_module": "@jupyter-widgets/base",
          "model_name": "LayoutModel",
          "model_module_version": "1.2.0",
          "state": {
            "_model_module": "@jupyter-widgets/base",
            "_model_module_version": "1.2.0",
            "_model_name": "LayoutModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/base",
            "_view_module_version": "1.2.0",
            "_view_name": "LayoutView",
            "align_content": null,
            "align_items": null,
            "align_self": null,
            "border": null,
            "bottom": null,
            "display": null,
            "flex": null,
            "flex_flow": null,
            "grid_area": null,
            "grid_auto_columns": null,
            "grid_auto_flow": null,
            "grid_auto_rows": null,
            "grid_column": null,
            "grid_gap": null,
            "grid_row": null,
            "grid_template_areas": null,
            "grid_template_columns": null,
            "grid_template_rows": null,
            "height": null,
            "justify_content": null,
            "justify_items": null,
            "left": null,
            "margin": null,
            "max_height": null,
            "max_width": null,
            "min_height": null,
            "min_width": null,
            "object_fit": null,
            "object_position": null,
            "order": null,
            "overflow": null,
            "overflow_x": null,
            "overflow_y": null,
            "padding": null,
            "right": null,
            "top": null,
            "visibility": null,
            "width": null
          }
        },
        "a5da3c3f5e064606b335c5528969da6f": {
          "model_module": "@jupyter-widgets/controls",
          "model_name": "ProgressStyleModel",
          "model_module_version": "1.5.0",
          "state": {
            "_model_module": "@jupyter-widgets/controls",
            "_model_module_version": "1.5.0",
            "_model_name": "ProgressStyleModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/base",
            "_view_module_version": "1.2.0",
            "_view_name": "StyleView",
            "bar_color": null,
            "description_width": ""
          }
        },
        "07a5e9ccc65a424eb00c30dd8ac87e2d": {
          "model_module": "@jupyter-widgets/base",
          "model_name": "LayoutModel",
          "model_module_version": "1.2.0",
          "state": {
            "_model_module": "@jupyter-widgets/base",
            "_model_module_version": "1.2.0",
            "_model_name": "LayoutModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/base",
            "_view_module_version": "1.2.0",
            "_view_name": "LayoutView",
            "align_content": null,
            "align_items": null,
            "align_self": null,
            "border": null,
            "bottom": null,
            "display": null,
            "flex": null,
            "flex_flow": null,
            "grid_area": null,
            "grid_auto_columns": null,
            "grid_auto_flow": null,
            "grid_auto_rows": null,
            "grid_column": null,
            "grid_gap": null,
            "grid_row": null,
            "grid_template_areas": null,
            "grid_template_columns": null,
            "grid_template_rows": null,
            "height": null,
            "justify_content": null,
            "justify_items": null,
            "left": null,
            "margin": null,
            "max_height": null,
            "max_width": null,
            "min_height": null,
            "min_width": null,
            "object_fit": null,
            "object_position": null,
            "order": null,
            "overflow": null,
            "overflow_x": null,
            "overflow_y": null,
            "padding": null,
            "right": null,
            "top": null,
            "visibility": null,
            "width": null
          }
        },
        "7f3f78eb4b8c4529915784703edc1ea8": {
          "model_module": "@jupyter-widgets/controls",
          "model_name": "DescriptionStyleModel",
          "model_module_version": "1.5.0",
          "state": {
            "_model_module": "@jupyter-widgets/controls",
            "_model_module_version": "1.5.0",
            "_model_name": "DescriptionStyleModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/base",
            "_view_module_version": "1.2.0",
            "_view_name": "StyleView",
            "description_width": ""
          }
        }
      }
    }
  },
  "nbformat": 4,
  "nbformat_minor": 0
}