{
  "cells": [
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "uGV2VjXF4pNs"
      },
      "source": [
        "# 查看FashionMNIST原始数据格式"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 2,
      "metadata": {
        "ExecuteTime": {
          "end_time": "2025-06-26T01:43:32.363026Z",
          "start_time": "2025-06-26T01:43:29.447990Z"
        },
        "id": "3djTfPq64pNt"
      },
      "outputs": [],
      "source": [
        "import torch\n",
        "import torchvision\n",
        "import numpy as np\n",
        "import matplotlib.pyplot as plt\n",
        "from torchvision import datasets, transforms\n",
        "from wangdao_deeplearning_train import EarlyStopping, ModelSaver,train_classification_model,plot_learning_curves\n",
        "from wangdao_deeplearning_train import evaluate_classification_model as evaluate_model\n"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 5,
      "metadata": {},
      "outputs": [
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "InceptionNet模型结构:\n",
            "Inception3(\n",
            "  (Conv2d_1a_3x3): BasicConv2d(\n",
            "    (conv): Conv2d(3, 32, kernel_size=(3, 3), stride=(2, 2), bias=False)\n",
            "    (bn): BatchNorm2d(32, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "  )\n",
            "  (Conv2d_2a_3x3): BasicConv2d(\n",
            "    (conv): Conv2d(32, 32, kernel_size=(3, 3), stride=(1, 1), bias=False)\n",
            "    (bn): BatchNorm2d(32, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "  )\n",
            "  (Conv2d_2b_3x3): BasicConv2d(\n",
            "    (conv): Conv2d(32, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
            "    (bn): BatchNorm2d(64, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "  )\n",
            "  (maxpool1): MaxPool2d(kernel_size=3, stride=2, padding=0, dilation=1, ceil_mode=False)\n",
            "  (Conv2d_3b_1x1): BasicConv2d(\n",
            "    (conv): Conv2d(64, 80, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
            "    (bn): BatchNorm2d(80, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "  )\n",
            "  (Conv2d_4a_3x3): BasicConv2d(\n",
            "    (conv): Conv2d(80, 192, kernel_size=(3, 3), stride=(1, 1), bias=False)\n",
            "    (bn): BatchNorm2d(192, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "  )\n",
            "  (maxpool2): MaxPool2d(kernel_size=3, stride=2, padding=0, dilation=1, ceil_mode=False)\n",
            "  (Mixed_5b): InceptionA(\n",
            "    (branch1x1): BasicConv2d(\n",
            "      (conv): Conv2d(192, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
            "      (bn): BatchNorm2d(64, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch5x5_1): BasicConv2d(\n",
            "      (conv): Conv2d(192, 48, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
            "      (bn): BatchNorm2d(48, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch5x5_2): BasicConv2d(\n",
            "      (conv): Conv2d(48, 64, kernel_size=(5, 5), stride=(1, 1), padding=(2, 2), bias=False)\n",
            "      (bn): BatchNorm2d(64, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch3x3dbl_1): BasicConv2d(\n",
            "      (conv): Conv2d(192, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
            "      (bn): BatchNorm2d(64, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch3x3dbl_2): BasicConv2d(\n",
            "      (conv): Conv2d(64, 96, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
            "      (bn): BatchNorm2d(96, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch3x3dbl_3): BasicConv2d(\n",
            "      (conv): Conv2d(96, 96, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
            "      (bn): BatchNorm2d(96, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch_pool): BasicConv2d(\n",
            "      (conv): Conv2d(192, 32, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
            "      (bn): BatchNorm2d(32, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "  )\n",
            "  (Mixed_5c): InceptionA(\n",
            "    (branch1x1): BasicConv2d(\n",
            "      (conv): Conv2d(256, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
            "      (bn): BatchNorm2d(64, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch5x5_1): BasicConv2d(\n",
            "      (conv): Conv2d(256, 48, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
            "      (bn): BatchNorm2d(48, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch5x5_2): BasicConv2d(\n",
            "      (conv): Conv2d(48, 64, kernel_size=(5, 5), stride=(1, 1), padding=(2, 2), bias=False)\n",
            "      (bn): BatchNorm2d(64, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch3x3dbl_1): BasicConv2d(\n",
            "      (conv): Conv2d(256, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
            "      (bn): BatchNorm2d(64, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch3x3dbl_2): BasicConv2d(\n",
            "      (conv): Conv2d(64, 96, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
            "      (bn): BatchNorm2d(96, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch3x3dbl_3): BasicConv2d(\n",
            "      (conv): Conv2d(96, 96, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
            "      (bn): BatchNorm2d(96, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch_pool): BasicConv2d(\n",
            "      (conv): Conv2d(256, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
            "      (bn): BatchNorm2d(64, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "  )\n",
            "  (Mixed_5d): InceptionA(\n",
            "    (branch1x1): BasicConv2d(\n",
            "      (conv): Conv2d(288, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
            "      (bn): BatchNorm2d(64, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch5x5_1): BasicConv2d(\n",
            "      (conv): Conv2d(288, 48, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
            "      (bn): BatchNorm2d(48, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch5x5_2): BasicConv2d(\n",
            "      (conv): Conv2d(48, 64, kernel_size=(5, 5), stride=(1, 1), padding=(2, 2), bias=False)\n",
            "      (bn): BatchNorm2d(64, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch3x3dbl_1): BasicConv2d(\n",
            "      (conv): Conv2d(288, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
            "      (bn): BatchNorm2d(64, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch3x3dbl_2): BasicConv2d(\n",
            "      (conv): Conv2d(64, 96, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
            "      (bn): BatchNorm2d(96, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch3x3dbl_3): BasicConv2d(\n",
            "      (conv): Conv2d(96, 96, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
            "      (bn): BatchNorm2d(96, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch_pool): BasicConv2d(\n",
            "      (conv): Conv2d(288, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
            "      (bn): BatchNorm2d(64, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "  )\n",
            "  (Mixed_6a): InceptionB(\n",
            "    (branch3x3): BasicConv2d(\n",
            "      (conv): Conv2d(288, 384, kernel_size=(3, 3), stride=(2, 2), bias=False)\n",
            "      (bn): BatchNorm2d(384, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch3x3dbl_1): BasicConv2d(\n",
            "      (conv): Conv2d(288, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
            "      (bn): BatchNorm2d(64, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch3x3dbl_2): BasicConv2d(\n",
            "      (conv): Conv2d(64, 96, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
            "      (bn): BatchNorm2d(96, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch3x3dbl_3): BasicConv2d(\n",
            "      (conv): Conv2d(96, 96, kernel_size=(3, 3), stride=(2, 2), bias=False)\n",
            "      (bn): BatchNorm2d(96, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "  )\n",
            "  (Mixed_6b): InceptionC(\n",
            "    (branch1x1): BasicConv2d(\n",
            "      (conv): Conv2d(768, 192, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
            "      (bn): BatchNorm2d(192, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch7x7_1): BasicConv2d(\n",
            "      (conv): Conv2d(768, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
            "      (bn): BatchNorm2d(128, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch7x7_2): BasicConv2d(\n",
            "      (conv): Conv2d(128, 128, kernel_size=(1, 7), stride=(1, 1), padding=(0, 3), bias=False)\n",
            "      (bn): BatchNorm2d(128, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch7x7_3): BasicConv2d(\n",
            "      (conv): Conv2d(128, 192, kernel_size=(7, 1), stride=(1, 1), padding=(3, 0), bias=False)\n",
            "      (bn): BatchNorm2d(192, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch7x7dbl_1): BasicConv2d(\n",
            "      (conv): Conv2d(768, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
            "      (bn): BatchNorm2d(128, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch7x7dbl_2): BasicConv2d(\n",
            "      (conv): Conv2d(128, 128, kernel_size=(7, 1), stride=(1, 1), padding=(3, 0), bias=False)\n",
            "      (bn): BatchNorm2d(128, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch7x7dbl_3): BasicConv2d(\n",
            "      (conv): Conv2d(128, 128, kernel_size=(1, 7), stride=(1, 1), padding=(0, 3), bias=False)\n",
            "      (bn): BatchNorm2d(128, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch7x7dbl_4): BasicConv2d(\n",
            "      (conv): Conv2d(128, 128, kernel_size=(7, 1), stride=(1, 1), padding=(3, 0), bias=False)\n",
            "      (bn): BatchNorm2d(128, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch7x7dbl_5): BasicConv2d(\n",
            "      (conv): Conv2d(128, 192, kernel_size=(1, 7), stride=(1, 1), padding=(0, 3), bias=False)\n",
            "      (bn): BatchNorm2d(192, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch_pool): BasicConv2d(\n",
            "      (conv): Conv2d(768, 192, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
            "      (bn): BatchNorm2d(192, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "  )\n",
            "  (Mixed_6c): InceptionC(\n",
            "    (branch1x1): BasicConv2d(\n",
            "      (conv): Conv2d(768, 192, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
            "      (bn): BatchNorm2d(192, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch7x7_1): BasicConv2d(\n",
            "      (conv): Conv2d(768, 160, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
            "      (bn): BatchNorm2d(160, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch7x7_2): BasicConv2d(\n",
            "      (conv): Conv2d(160, 160, kernel_size=(1, 7), stride=(1, 1), padding=(0, 3), bias=False)\n",
            "      (bn): BatchNorm2d(160, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch7x7_3): BasicConv2d(\n",
            "      (conv): Conv2d(160, 192, kernel_size=(7, 1), stride=(1, 1), padding=(3, 0), bias=False)\n",
            "      (bn): BatchNorm2d(192, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch7x7dbl_1): BasicConv2d(\n",
            "      (conv): Conv2d(768, 160, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
            "      (bn): BatchNorm2d(160, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch7x7dbl_2): BasicConv2d(\n",
            "      (conv): Conv2d(160, 160, kernel_size=(7, 1), stride=(1, 1), padding=(3, 0), bias=False)\n",
            "      (bn): BatchNorm2d(160, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch7x7dbl_3): BasicConv2d(\n",
            "      (conv): Conv2d(160, 160, kernel_size=(1, 7), stride=(1, 1), padding=(0, 3), bias=False)\n",
            "      (bn): BatchNorm2d(160, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch7x7dbl_4): BasicConv2d(\n",
            "      (conv): Conv2d(160, 160, kernel_size=(7, 1), stride=(1, 1), padding=(3, 0), bias=False)\n",
            "      (bn): BatchNorm2d(160, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch7x7dbl_5): BasicConv2d(\n",
            "      (conv): Conv2d(160, 192, kernel_size=(1, 7), stride=(1, 1), padding=(0, 3), bias=False)\n",
            "      (bn): BatchNorm2d(192, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch_pool): BasicConv2d(\n",
            "      (conv): Conv2d(768, 192, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
            "      (bn): BatchNorm2d(192, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "  )\n",
            "  (Mixed_6d): InceptionC(\n",
            "    (branch1x1): BasicConv2d(\n",
            "      (conv): Conv2d(768, 192, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
            "      (bn): BatchNorm2d(192, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch7x7_1): BasicConv2d(\n",
            "      (conv): Conv2d(768, 160, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
            "      (bn): BatchNorm2d(160, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch7x7_2): BasicConv2d(\n",
            "      (conv): Conv2d(160, 160, kernel_size=(1, 7), stride=(1, 1), padding=(0, 3), bias=False)\n",
            "      (bn): BatchNorm2d(160, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch7x7_3): BasicConv2d(\n",
            "      (conv): Conv2d(160, 192, kernel_size=(7, 1), stride=(1, 1), padding=(3, 0), bias=False)\n",
            "      (bn): BatchNorm2d(192, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch7x7dbl_1): BasicConv2d(\n",
            "      (conv): Conv2d(768, 160, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
            "      (bn): BatchNorm2d(160, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch7x7dbl_2): BasicConv2d(\n",
            "      (conv): Conv2d(160, 160, kernel_size=(7, 1), stride=(1, 1), padding=(3, 0), bias=False)\n",
            "      (bn): BatchNorm2d(160, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch7x7dbl_3): BasicConv2d(\n",
            "      (conv): Conv2d(160, 160, kernel_size=(1, 7), stride=(1, 1), padding=(0, 3), bias=False)\n",
            "      (bn): BatchNorm2d(160, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch7x7dbl_4): BasicConv2d(\n",
            "      (conv): Conv2d(160, 160, kernel_size=(7, 1), stride=(1, 1), padding=(3, 0), bias=False)\n",
            "      (bn): BatchNorm2d(160, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch7x7dbl_5): BasicConv2d(\n",
            "      (conv): Conv2d(160, 192, kernel_size=(1, 7), stride=(1, 1), padding=(0, 3), bias=False)\n",
            "      (bn): BatchNorm2d(192, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch_pool): BasicConv2d(\n",
            "      (conv): Conv2d(768, 192, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
            "      (bn): BatchNorm2d(192, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "  )\n",
            "  (Mixed_6e): InceptionC(\n",
            "    (branch1x1): BasicConv2d(\n",
            "      (conv): Conv2d(768, 192, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
            "      (bn): BatchNorm2d(192, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch7x7_1): BasicConv2d(\n",
            "      (conv): Conv2d(768, 192, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
            "      (bn): BatchNorm2d(192, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch7x7_2): BasicConv2d(\n",
            "      (conv): Conv2d(192, 192, kernel_size=(1, 7), stride=(1, 1), padding=(0, 3), bias=False)\n",
            "      (bn): BatchNorm2d(192, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch7x7_3): BasicConv2d(\n",
            "      (conv): Conv2d(192, 192, kernel_size=(7, 1), stride=(1, 1), padding=(3, 0), bias=False)\n",
            "      (bn): BatchNorm2d(192, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch7x7dbl_1): BasicConv2d(\n",
            "      (conv): Conv2d(768, 192, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
            "      (bn): BatchNorm2d(192, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch7x7dbl_2): BasicConv2d(\n",
            "      (conv): Conv2d(192, 192, kernel_size=(7, 1), stride=(1, 1), padding=(3, 0), bias=False)\n",
            "      (bn): BatchNorm2d(192, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch7x7dbl_3): BasicConv2d(\n",
            "      (conv): Conv2d(192, 192, kernel_size=(1, 7), stride=(1, 1), padding=(0, 3), bias=False)\n",
            "      (bn): BatchNorm2d(192, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch7x7dbl_4): BasicConv2d(\n",
            "      (conv): Conv2d(192, 192, kernel_size=(7, 1), stride=(1, 1), padding=(3, 0), bias=False)\n",
            "      (bn): BatchNorm2d(192, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch7x7dbl_5): BasicConv2d(\n",
            "      (conv): Conv2d(192, 192, kernel_size=(1, 7), stride=(1, 1), padding=(0, 3), bias=False)\n",
            "      (bn): BatchNorm2d(192, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch_pool): BasicConv2d(\n",
            "      (conv): Conv2d(768, 192, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
            "      (bn): BatchNorm2d(192, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "  )\n",
            "  (AuxLogits): InceptionAux(\n",
            "    (conv0): BasicConv2d(\n",
            "      (conv): Conv2d(768, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
            "      (bn): BatchNorm2d(128, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (conv1): BasicConv2d(\n",
            "      (conv): Conv2d(128, 768, kernel_size=(5, 5), stride=(1, 1), bias=False)\n",
            "      (bn): BatchNorm2d(768, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (fc): Linear(in_features=768, out_features=1000, bias=True)\n",
            "  )\n",
            "  (Mixed_7a): InceptionD(\n",
            "    (branch3x3_1): BasicConv2d(\n",
            "      (conv): Conv2d(768, 192, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
            "      (bn): BatchNorm2d(192, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch3x3_2): BasicConv2d(\n",
            "      (conv): Conv2d(192, 320, kernel_size=(3, 3), stride=(2, 2), bias=False)\n",
            "      (bn): BatchNorm2d(320, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch7x7x3_1): BasicConv2d(\n",
            "      (conv): Conv2d(768, 192, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
            "      (bn): BatchNorm2d(192, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch7x7x3_2): BasicConv2d(\n",
            "      (conv): Conv2d(192, 192, kernel_size=(1, 7), stride=(1, 1), padding=(0, 3), bias=False)\n",
            "      (bn): BatchNorm2d(192, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch7x7x3_3): BasicConv2d(\n",
            "      (conv): Conv2d(192, 192, kernel_size=(7, 1), stride=(1, 1), padding=(3, 0), bias=False)\n",
            "      (bn): BatchNorm2d(192, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch7x7x3_4): BasicConv2d(\n",
            "      (conv): Conv2d(192, 192, kernel_size=(3, 3), stride=(2, 2), bias=False)\n",
            "      (bn): BatchNorm2d(192, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "  )\n",
            "  (Mixed_7b): InceptionE(\n",
            "    (branch1x1): BasicConv2d(\n",
            "      (conv): Conv2d(1280, 320, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
            "      (bn): BatchNorm2d(320, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch3x3_1): BasicConv2d(\n",
            "      (conv): Conv2d(1280, 384, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
            "      (bn): BatchNorm2d(384, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch3x3_2a): BasicConv2d(\n",
            "      (conv): Conv2d(384, 384, kernel_size=(1, 3), stride=(1, 1), padding=(0, 1), bias=False)\n",
            "      (bn): BatchNorm2d(384, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch3x3_2b): BasicConv2d(\n",
            "      (conv): Conv2d(384, 384, kernel_size=(3, 1), stride=(1, 1), padding=(1, 0), bias=False)\n",
            "      (bn): BatchNorm2d(384, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch3x3dbl_1): BasicConv2d(\n",
            "      (conv): Conv2d(1280, 448, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
            "      (bn): BatchNorm2d(448, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch3x3dbl_2): BasicConv2d(\n",
            "      (conv): Conv2d(448, 384, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
            "      (bn): BatchNorm2d(384, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch3x3dbl_3a): BasicConv2d(\n",
            "      (conv): Conv2d(384, 384, kernel_size=(1, 3), stride=(1, 1), padding=(0, 1), bias=False)\n",
            "      (bn): BatchNorm2d(384, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch3x3dbl_3b): BasicConv2d(\n",
            "      (conv): Conv2d(384, 384, kernel_size=(3, 1), stride=(1, 1), padding=(1, 0), bias=False)\n",
            "      (bn): BatchNorm2d(384, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch_pool): BasicConv2d(\n",
            "      (conv): Conv2d(1280, 192, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
            "      (bn): BatchNorm2d(192, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "  )\n",
            "  (Mixed_7c): InceptionE(\n",
            "    (branch1x1): BasicConv2d(\n",
            "      (conv): Conv2d(2048, 320, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
            "      (bn): BatchNorm2d(320, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch3x3_1): BasicConv2d(\n",
            "      (conv): Conv2d(2048, 384, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
            "      (bn): BatchNorm2d(384, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch3x3_2a): BasicConv2d(\n",
            "      (conv): Conv2d(384, 384, kernel_size=(1, 3), stride=(1, 1), padding=(0, 1), bias=False)\n",
            "      (bn): BatchNorm2d(384, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch3x3_2b): BasicConv2d(\n",
            "      (conv): Conv2d(384, 384, kernel_size=(3, 1), stride=(1, 1), padding=(1, 0), bias=False)\n",
            "      (bn): BatchNorm2d(384, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch3x3dbl_1): BasicConv2d(\n",
            "      (conv): Conv2d(2048, 448, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
            "      (bn): BatchNorm2d(448, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch3x3dbl_2): BasicConv2d(\n",
            "      (conv): Conv2d(448, 384, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
            "      (bn): BatchNorm2d(384, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch3x3dbl_3a): BasicConv2d(\n",
            "      (conv): Conv2d(384, 384, kernel_size=(1, 3), stride=(1, 1), padding=(0, 1), bias=False)\n",
            "      (bn): BatchNorm2d(384, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch3x3dbl_3b): BasicConv2d(\n",
            "      (conv): Conv2d(384, 384, kernel_size=(3, 1), stride=(1, 1), padding=(1, 0), bias=False)\n",
            "      (bn): BatchNorm2d(384, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "    (branch_pool): BasicConv2d(\n",
            "      (conv): Conv2d(2048, 192, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
            "      (bn): BatchNorm2d(192, eps=0.001, momentum=0.1, affine=True, track_running_stats=True)\n",
            "    )\n",
            "  )\n",
            "  (avgpool): AdaptiveAvgPool2d(output_size=(1, 1))\n",
            "  (dropout): Dropout(p=0.5, inplace=False)\n",
            "  (fc): Linear(in_features=2048, out_features=1000, bias=True)\n",
            ")\n",
            "\n",
            "InceptionNet模型参数统计:\n",
            "----------------------------------------------------------------\n",
            "        Layer (type)               Output Shape         Param #\n",
            "================================================================\n",
            "            Conv2d-1         [-1, 32, 149, 149]             864\n",
            "       BatchNorm2d-2         [-1, 32, 149, 149]              64\n",
            "       BasicConv2d-3         [-1, 32, 149, 149]               0\n",
            "            Conv2d-4         [-1, 32, 147, 147]           9,216\n",
            "       BatchNorm2d-5         [-1, 32, 147, 147]              64\n",
            "       BasicConv2d-6         [-1, 32, 147, 147]               0\n",
            "            Conv2d-7         [-1, 64, 147, 147]          18,432\n",
            "       BatchNorm2d-8         [-1, 64, 147, 147]             128\n",
            "       BasicConv2d-9         [-1, 64, 147, 147]               0\n",
            "        MaxPool2d-10           [-1, 64, 73, 73]               0\n",
            "           Conv2d-11           [-1, 80, 73, 73]           5,120\n",
            "      BatchNorm2d-12           [-1, 80, 73, 73]             160\n",
            "      BasicConv2d-13           [-1, 80, 73, 73]               0\n",
            "           Conv2d-14          [-1, 192, 71, 71]         138,240\n",
            "      BatchNorm2d-15          [-1, 192, 71, 71]             384\n",
            "      BasicConv2d-16          [-1, 192, 71, 71]               0\n",
            "        MaxPool2d-17          [-1, 192, 35, 35]               0\n",
            "           Conv2d-18           [-1, 64, 35, 35]          12,288\n",
            "      BatchNorm2d-19           [-1, 64, 35, 35]             128\n",
            "      BasicConv2d-20           [-1, 64, 35, 35]               0\n",
            "           Conv2d-21           [-1, 48, 35, 35]           9,216\n",
            "      BatchNorm2d-22           [-1, 48, 35, 35]              96\n",
            "      BasicConv2d-23           [-1, 48, 35, 35]               0\n",
            "           Conv2d-24           [-1, 64, 35, 35]          76,800\n",
            "      BatchNorm2d-25           [-1, 64, 35, 35]             128\n",
            "      BasicConv2d-26           [-1, 64, 35, 35]               0\n",
            "           Conv2d-27           [-1, 64, 35, 35]          12,288\n",
            "      BatchNorm2d-28           [-1, 64, 35, 35]             128\n",
            "      BasicConv2d-29           [-1, 64, 35, 35]               0\n",
            "           Conv2d-30           [-1, 96, 35, 35]          55,296\n",
            "      BatchNorm2d-31           [-1, 96, 35, 35]             192\n",
            "      BasicConv2d-32           [-1, 96, 35, 35]               0\n",
            "           Conv2d-33           [-1, 96, 35, 35]          82,944\n",
            "      BatchNorm2d-34           [-1, 96, 35, 35]             192\n",
            "      BasicConv2d-35           [-1, 96, 35, 35]               0\n",
            "           Conv2d-36           [-1, 32, 35, 35]           6,144\n",
            "      BatchNorm2d-37           [-1, 32, 35, 35]              64\n",
            "      BasicConv2d-38           [-1, 32, 35, 35]               0\n",
            "       InceptionA-39          [-1, 256, 35, 35]               0\n",
            "           Conv2d-40           [-1, 64, 35, 35]          16,384\n",
            "      BatchNorm2d-41           [-1, 64, 35, 35]             128\n",
            "      BasicConv2d-42           [-1, 64, 35, 35]               0\n",
            "           Conv2d-43           [-1, 48, 35, 35]          12,288\n",
            "      BatchNorm2d-44           [-1, 48, 35, 35]              96\n",
            "      BasicConv2d-45           [-1, 48, 35, 35]               0\n",
            "           Conv2d-46           [-1, 64, 35, 35]          76,800\n",
            "      BatchNorm2d-47           [-1, 64, 35, 35]             128\n",
            "      BasicConv2d-48           [-1, 64, 35, 35]               0\n",
            "           Conv2d-49           [-1, 64, 35, 35]          16,384\n",
            "      BatchNorm2d-50           [-1, 64, 35, 35]             128\n",
            "      BasicConv2d-51           [-1, 64, 35, 35]               0\n",
            "           Conv2d-52           [-1, 96, 35, 35]          55,296\n",
            "      BatchNorm2d-53           [-1, 96, 35, 35]             192\n",
            "      BasicConv2d-54           [-1, 96, 35, 35]               0\n",
            "           Conv2d-55           [-1, 96, 35, 35]          82,944\n",
            "      BatchNorm2d-56           [-1, 96, 35, 35]             192\n",
            "      BasicConv2d-57           [-1, 96, 35, 35]               0\n",
            "           Conv2d-58           [-1, 64, 35, 35]          16,384\n",
            "      BatchNorm2d-59           [-1, 64, 35, 35]             128\n",
            "      BasicConv2d-60           [-1, 64, 35, 35]               0\n",
            "       InceptionA-61          [-1, 288, 35, 35]               0\n",
            "           Conv2d-62           [-1, 64, 35, 35]          18,432\n",
            "      BatchNorm2d-63           [-1, 64, 35, 35]             128\n",
            "      BasicConv2d-64           [-1, 64, 35, 35]               0\n",
            "           Conv2d-65           [-1, 48, 35, 35]          13,824\n",
            "      BatchNorm2d-66           [-1, 48, 35, 35]              96\n",
            "      BasicConv2d-67           [-1, 48, 35, 35]               0\n",
            "           Conv2d-68           [-1, 64, 35, 35]          76,800\n",
            "      BatchNorm2d-69           [-1, 64, 35, 35]             128\n",
            "      BasicConv2d-70           [-1, 64, 35, 35]               0\n",
            "           Conv2d-71           [-1, 64, 35, 35]          18,432\n",
            "      BatchNorm2d-72           [-1, 64, 35, 35]             128\n",
            "      BasicConv2d-73           [-1, 64, 35, 35]               0\n",
            "           Conv2d-74           [-1, 96, 35, 35]          55,296\n",
            "      BatchNorm2d-75           [-1, 96, 35, 35]             192\n",
            "      BasicConv2d-76           [-1, 96, 35, 35]               0\n",
            "           Conv2d-77           [-1, 96, 35, 35]          82,944\n",
            "      BatchNorm2d-78           [-1, 96, 35, 35]             192\n",
            "      BasicConv2d-79           [-1, 96, 35, 35]               0\n",
            "           Conv2d-80           [-1, 64, 35, 35]          18,432\n",
            "      BatchNorm2d-81           [-1, 64, 35, 35]             128\n",
            "      BasicConv2d-82           [-1, 64, 35, 35]               0\n",
            "       InceptionA-83          [-1, 288, 35, 35]               0\n",
            "           Conv2d-84          [-1, 384, 17, 17]         995,328\n",
            "      BatchNorm2d-85          [-1, 384, 17, 17]             768\n",
            "      BasicConv2d-86          [-1, 384, 17, 17]               0\n",
            "           Conv2d-87           [-1, 64, 35, 35]          18,432\n",
            "      BatchNorm2d-88           [-1, 64, 35, 35]             128\n",
            "      BasicConv2d-89           [-1, 64, 35, 35]               0\n",
            "           Conv2d-90           [-1, 96, 35, 35]          55,296\n",
            "      BatchNorm2d-91           [-1, 96, 35, 35]             192\n",
            "      BasicConv2d-92           [-1, 96, 35, 35]               0\n",
            "           Conv2d-93           [-1, 96, 17, 17]          82,944\n",
            "      BatchNorm2d-94           [-1, 96, 17, 17]             192\n",
            "      BasicConv2d-95           [-1, 96, 17, 17]               0\n",
            "       InceptionB-96          [-1, 768, 17, 17]               0\n",
            "           Conv2d-97          [-1, 192, 17, 17]         147,456\n",
            "      BatchNorm2d-98          [-1, 192, 17, 17]             384\n",
            "      BasicConv2d-99          [-1, 192, 17, 17]               0\n",
            "          Conv2d-100          [-1, 128, 17, 17]          98,304\n",
            "     BatchNorm2d-101          [-1, 128, 17, 17]             256\n",
            "     BasicConv2d-102          [-1, 128, 17, 17]               0\n",
            "          Conv2d-103          [-1, 128, 17, 17]         114,688\n",
            "     BatchNorm2d-104          [-1, 128, 17, 17]             256\n",
            "     BasicConv2d-105          [-1, 128, 17, 17]               0\n",
            "          Conv2d-106          [-1, 192, 17, 17]         172,032\n",
            "     BatchNorm2d-107          [-1, 192, 17, 17]             384\n",
            "     BasicConv2d-108          [-1, 192, 17, 17]               0\n",
            "          Conv2d-109          [-1, 128, 17, 17]          98,304\n",
            "     BatchNorm2d-110          [-1, 128, 17, 17]             256\n",
            "     BasicConv2d-111          [-1, 128, 17, 17]               0\n",
            "          Conv2d-112          [-1, 128, 17, 17]         114,688\n",
            "     BatchNorm2d-113          [-1, 128, 17, 17]             256\n",
            "     BasicConv2d-114          [-1, 128, 17, 17]               0\n",
            "          Conv2d-115          [-1, 128, 17, 17]         114,688\n",
            "     BatchNorm2d-116          [-1, 128, 17, 17]             256\n",
            "     BasicConv2d-117          [-1, 128, 17, 17]               0\n",
            "          Conv2d-118          [-1, 128, 17, 17]         114,688\n",
            "     BatchNorm2d-119          [-1, 128, 17, 17]             256\n",
            "     BasicConv2d-120          [-1, 128, 17, 17]               0\n",
            "          Conv2d-121          [-1, 192, 17, 17]         172,032\n",
            "     BatchNorm2d-122          [-1, 192, 17, 17]             384\n",
            "     BasicConv2d-123          [-1, 192, 17, 17]               0\n",
            "          Conv2d-124          [-1, 192, 17, 17]         147,456\n",
            "     BatchNorm2d-125          [-1, 192, 17, 17]             384\n",
            "     BasicConv2d-126          [-1, 192, 17, 17]               0\n",
            "      InceptionC-127          [-1, 768, 17, 17]               0\n",
            "          Conv2d-128          [-1, 192, 17, 17]         147,456\n",
            "     BatchNorm2d-129          [-1, 192, 17, 17]             384\n",
            "     BasicConv2d-130          [-1, 192, 17, 17]               0\n",
            "          Conv2d-131          [-1, 160, 17, 17]         122,880\n",
            "     BatchNorm2d-132          [-1, 160, 17, 17]             320\n",
            "     BasicConv2d-133          [-1, 160, 17, 17]               0\n",
            "          Conv2d-134          [-1, 160, 17, 17]         179,200\n",
            "     BatchNorm2d-135          [-1, 160, 17, 17]             320\n",
            "     BasicConv2d-136          [-1, 160, 17, 17]               0\n",
            "          Conv2d-137          [-1, 192, 17, 17]         215,040\n",
            "     BatchNorm2d-138          [-1, 192, 17, 17]             384\n",
            "     BasicConv2d-139          [-1, 192, 17, 17]               0\n",
            "          Conv2d-140          [-1, 160, 17, 17]         122,880\n",
            "     BatchNorm2d-141          [-1, 160, 17, 17]             320\n",
            "     BasicConv2d-142          [-1, 160, 17, 17]               0\n",
            "          Conv2d-143          [-1, 160, 17, 17]         179,200\n",
            "     BatchNorm2d-144          [-1, 160, 17, 17]             320\n",
            "     BasicConv2d-145          [-1, 160, 17, 17]               0\n",
            "          Conv2d-146          [-1, 160, 17, 17]         179,200\n",
            "     BatchNorm2d-147          [-1, 160, 17, 17]             320\n",
            "     BasicConv2d-148          [-1, 160, 17, 17]               0\n",
            "          Conv2d-149          [-1, 160, 17, 17]         179,200\n",
            "     BatchNorm2d-150          [-1, 160, 17, 17]             320\n",
            "     BasicConv2d-151          [-1, 160, 17, 17]               0\n",
            "          Conv2d-152          [-1, 192, 17, 17]         215,040\n",
            "     BatchNorm2d-153          [-1, 192, 17, 17]             384\n",
            "     BasicConv2d-154          [-1, 192, 17, 17]               0\n",
            "          Conv2d-155          [-1, 192, 17, 17]         147,456\n",
            "     BatchNorm2d-156          [-1, 192, 17, 17]             384\n",
            "     BasicConv2d-157          [-1, 192, 17, 17]               0\n",
            "      InceptionC-158          [-1, 768, 17, 17]               0\n",
            "          Conv2d-159          [-1, 192, 17, 17]         147,456\n",
            "     BatchNorm2d-160          [-1, 192, 17, 17]             384\n",
            "     BasicConv2d-161          [-1, 192, 17, 17]               0\n",
            "          Conv2d-162          [-1, 160, 17, 17]         122,880\n",
            "     BatchNorm2d-163          [-1, 160, 17, 17]             320\n",
            "     BasicConv2d-164          [-1, 160, 17, 17]               0\n",
            "          Conv2d-165          [-1, 160, 17, 17]         179,200\n",
            "     BatchNorm2d-166          [-1, 160, 17, 17]             320\n",
            "     BasicConv2d-167          [-1, 160, 17, 17]               0\n",
            "          Conv2d-168          [-1, 192, 17, 17]         215,040\n",
            "     BatchNorm2d-169          [-1, 192, 17, 17]             384\n",
            "     BasicConv2d-170          [-1, 192, 17, 17]               0\n",
            "          Conv2d-171          [-1, 160, 17, 17]         122,880\n",
            "     BatchNorm2d-172          [-1, 160, 17, 17]             320\n",
            "     BasicConv2d-173          [-1, 160, 17, 17]               0\n",
            "          Conv2d-174          [-1, 160, 17, 17]         179,200\n",
            "     BatchNorm2d-175          [-1, 160, 17, 17]             320\n",
            "     BasicConv2d-176          [-1, 160, 17, 17]               0\n",
            "          Conv2d-177          [-1, 160, 17, 17]         179,200\n",
            "     BatchNorm2d-178          [-1, 160, 17, 17]             320\n",
            "     BasicConv2d-179          [-1, 160, 17, 17]               0\n",
            "          Conv2d-180          [-1, 160, 17, 17]         179,200\n",
            "     BatchNorm2d-181          [-1, 160, 17, 17]             320\n",
            "     BasicConv2d-182          [-1, 160, 17, 17]               0\n",
            "          Conv2d-183          [-1, 192, 17, 17]         215,040\n",
            "     BatchNorm2d-184          [-1, 192, 17, 17]             384\n",
            "     BasicConv2d-185          [-1, 192, 17, 17]               0\n",
            "          Conv2d-186          [-1, 192, 17, 17]         147,456\n",
            "     BatchNorm2d-187          [-1, 192, 17, 17]             384\n",
            "     BasicConv2d-188          [-1, 192, 17, 17]               0\n",
            "      InceptionC-189          [-1, 768, 17, 17]               0\n",
            "          Conv2d-190          [-1, 192, 17, 17]         147,456\n",
            "     BatchNorm2d-191          [-1, 192, 17, 17]             384\n",
            "     BasicConv2d-192          [-1, 192, 17, 17]               0\n",
            "          Conv2d-193          [-1, 192, 17, 17]         147,456\n",
            "     BatchNorm2d-194          [-1, 192, 17, 17]             384\n",
            "     BasicConv2d-195          [-1, 192, 17, 17]               0\n",
            "          Conv2d-196          [-1, 192, 17, 17]         258,048\n",
            "     BatchNorm2d-197          [-1, 192, 17, 17]             384\n",
            "     BasicConv2d-198          [-1, 192, 17, 17]               0\n",
            "          Conv2d-199          [-1, 192, 17, 17]         258,048\n",
            "     BatchNorm2d-200          [-1, 192, 17, 17]             384\n",
            "     BasicConv2d-201          [-1, 192, 17, 17]               0\n",
            "          Conv2d-202          [-1, 192, 17, 17]         147,456\n",
            "     BatchNorm2d-203          [-1, 192, 17, 17]             384\n",
            "     BasicConv2d-204          [-1, 192, 17, 17]               0\n",
            "          Conv2d-205          [-1, 192, 17, 17]         258,048\n",
            "     BatchNorm2d-206          [-1, 192, 17, 17]             384\n",
            "     BasicConv2d-207          [-1, 192, 17, 17]               0\n",
            "          Conv2d-208          [-1, 192, 17, 17]         258,048\n",
            "     BatchNorm2d-209          [-1, 192, 17, 17]             384\n",
            "     BasicConv2d-210          [-1, 192, 17, 17]               0\n",
            "          Conv2d-211          [-1, 192, 17, 17]         258,048\n",
            "     BatchNorm2d-212          [-1, 192, 17, 17]             384\n",
            "     BasicConv2d-213          [-1, 192, 17, 17]               0\n",
            "          Conv2d-214          [-1, 192, 17, 17]         258,048\n",
            "     BatchNorm2d-215          [-1, 192, 17, 17]             384\n",
            "     BasicConv2d-216          [-1, 192, 17, 17]               0\n",
            "          Conv2d-217          [-1, 192, 17, 17]         147,456\n",
            "     BatchNorm2d-218          [-1, 192, 17, 17]             384\n",
            "     BasicConv2d-219          [-1, 192, 17, 17]               0\n",
            "      InceptionC-220          [-1, 768, 17, 17]               0\n",
            "          Conv2d-221            [-1, 128, 5, 5]          98,304\n",
            "     BatchNorm2d-222            [-1, 128, 5, 5]             256\n",
            "     BasicConv2d-223            [-1, 128, 5, 5]               0\n",
            "          Conv2d-224            [-1, 768, 1, 1]       2,457,600\n",
            "     BatchNorm2d-225            [-1, 768, 1, 1]           1,536\n",
            "     BasicConv2d-226            [-1, 768, 1, 1]               0\n",
            "          Linear-227                 [-1, 1000]         769,000\n",
            "    InceptionAux-228                 [-1, 1000]               0\n",
            "          Conv2d-229          [-1, 192, 17, 17]         147,456\n",
            "     BatchNorm2d-230          [-1, 192, 17, 17]             384\n",
            "     BasicConv2d-231          [-1, 192, 17, 17]               0\n",
            "          Conv2d-232            [-1, 320, 8, 8]         552,960\n",
            "     BatchNorm2d-233            [-1, 320, 8, 8]             640\n",
            "     BasicConv2d-234            [-1, 320, 8, 8]               0\n",
            "          Conv2d-235          [-1, 192, 17, 17]         147,456\n",
            "     BatchNorm2d-236          [-1, 192, 17, 17]             384\n",
            "     BasicConv2d-237          [-1, 192, 17, 17]               0\n",
            "          Conv2d-238          [-1, 192, 17, 17]         258,048\n",
            "     BatchNorm2d-239          [-1, 192, 17, 17]             384\n",
            "     BasicConv2d-240          [-1, 192, 17, 17]               0\n",
            "          Conv2d-241          [-1, 192, 17, 17]         258,048\n",
            "     BatchNorm2d-242          [-1, 192, 17, 17]             384\n",
            "     BasicConv2d-243          [-1, 192, 17, 17]               0\n",
            "          Conv2d-244            [-1, 192, 8, 8]         331,776\n",
            "     BatchNorm2d-245            [-1, 192, 8, 8]             384\n",
            "     BasicConv2d-246            [-1, 192, 8, 8]               0\n",
            "      InceptionD-247           [-1, 1280, 8, 8]               0\n",
            "          Conv2d-248            [-1, 320, 8, 8]         409,600\n",
            "     BatchNorm2d-249            [-1, 320, 8, 8]             640\n",
            "     BasicConv2d-250            [-1, 320, 8, 8]               0\n",
            "          Conv2d-251            [-1, 384, 8, 8]         491,520\n",
            "     BatchNorm2d-252            [-1, 384, 8, 8]             768\n",
            "     BasicConv2d-253            [-1, 384, 8, 8]               0\n",
            "          Conv2d-254            [-1, 384, 8, 8]         442,368\n",
            "     BatchNorm2d-255            [-1, 384, 8, 8]             768\n",
            "     BasicConv2d-256            [-1, 384, 8, 8]               0\n",
            "          Conv2d-257            [-1, 384, 8, 8]         442,368\n",
            "     BatchNorm2d-258            [-1, 384, 8, 8]             768\n",
            "     BasicConv2d-259            [-1, 384, 8, 8]               0\n",
            "          Conv2d-260            [-1, 448, 8, 8]         573,440\n",
            "     BatchNorm2d-261            [-1, 448, 8, 8]             896\n",
            "     BasicConv2d-262            [-1, 448, 8, 8]               0\n",
            "          Conv2d-263            [-1, 384, 8, 8]       1,548,288\n",
            "     BatchNorm2d-264            [-1, 384, 8, 8]             768\n",
            "     BasicConv2d-265            [-1, 384, 8, 8]               0\n",
            "          Conv2d-266            [-1, 384, 8, 8]         442,368\n",
            "     BatchNorm2d-267            [-1, 384, 8, 8]             768\n",
            "     BasicConv2d-268            [-1, 384, 8, 8]               0\n",
            "          Conv2d-269            [-1, 384, 8, 8]         442,368\n",
            "     BatchNorm2d-270            [-1, 384, 8, 8]             768\n",
            "     BasicConv2d-271            [-1, 384, 8, 8]               0\n",
            "          Conv2d-272            [-1, 192, 8, 8]         245,760\n",
            "     BatchNorm2d-273            [-1, 192, 8, 8]             384\n",
            "     BasicConv2d-274            [-1, 192, 8, 8]               0\n",
            "      InceptionE-275           [-1, 2048, 8, 8]               0\n",
            "          Conv2d-276            [-1, 320, 8, 8]         655,360\n",
            "     BatchNorm2d-277            [-1, 320, 8, 8]             640\n",
            "     BasicConv2d-278            [-1, 320, 8, 8]               0\n",
            "          Conv2d-279            [-1, 384, 8, 8]         786,432\n",
            "     BatchNorm2d-280            [-1, 384, 8, 8]             768\n",
            "     BasicConv2d-281            [-1, 384, 8, 8]               0\n",
            "          Conv2d-282            [-1, 384, 8, 8]         442,368\n",
            "     BatchNorm2d-283            [-1, 384, 8, 8]             768\n",
            "     BasicConv2d-284            [-1, 384, 8, 8]               0\n",
            "          Conv2d-285            [-1, 384, 8, 8]         442,368\n",
            "     BatchNorm2d-286            [-1, 384, 8, 8]             768\n",
            "     BasicConv2d-287            [-1, 384, 8, 8]               0\n",
            "          Conv2d-288            [-1, 448, 8, 8]         917,504\n",
            "     BatchNorm2d-289            [-1, 448, 8, 8]             896\n",
            "     BasicConv2d-290            [-1, 448, 8, 8]               0\n",
            "          Conv2d-291            [-1, 384, 8, 8]       1,548,288\n",
            "     BatchNorm2d-292            [-1, 384, 8, 8]             768\n",
            "     BasicConv2d-293            [-1, 384, 8, 8]               0\n",
            "          Conv2d-294            [-1, 384, 8, 8]         442,368\n",
            "     BatchNorm2d-295            [-1, 384, 8, 8]             768\n",
            "     BasicConv2d-296            [-1, 384, 8, 8]               0\n",
            "          Conv2d-297            [-1, 384, 8, 8]         442,368\n",
            "     BatchNorm2d-298            [-1, 384, 8, 8]             768\n",
            "     BasicConv2d-299            [-1, 384, 8, 8]               0\n",
            "          Conv2d-300            [-1, 192, 8, 8]         393,216\n",
            "     BatchNorm2d-301            [-1, 192, 8, 8]             384\n",
            "     BasicConv2d-302            [-1, 192, 8, 8]               0\n",
            "      InceptionE-303           [-1, 2048, 8, 8]               0\n",
            "AdaptiveAvgPool2d-304           [-1, 2048, 1, 1]               0\n",
            "         Dropout-305           [-1, 2048, 1, 1]               0\n",
            "          Linear-306                 [-1, 1000]       2,049,000\n",
            "================================================================\n",
            "Total params: 27,161,264\n",
            "Trainable params: 27,161,264\n",
            "Non-trainable params: 0\n",
            "----------------------------------------------------------------\n",
            "Input size (MB): 1.02\n",
            "Forward/backward pass size (MB): 228.66\n",
            "Params size (MB): 103.61\n",
            "Estimated Total Size (MB): 333.29\n",
            "----------------------------------------------------------------\n"
          ]
        },
        {
          "data": {
            "text/plain": [
              "'inception_model.png'"
            ]
          },
          "execution_count": 5,
          "metadata": {},
          "output_type": "execute_result"
        }
      ],
      "source": [
        "import torchvision.models as models\n",
        "# 导入必要的库\n",
        "import torch.nn as nn\n",
        "import torchvision.models as models\n",
        "from torchsummary import torchsummary\n",
        "from torchviz import make_dot\n",
        "\n",
        "# 加载预训练的InceptionNet模型（Inception v3）\n",
        "inception_model = models.inception_v3(pretrained=True)\n",
        "\n",
        "# 打印模型结构\n",
        "print(\"InceptionNet模型结构:\")\n",
        "print(inception_model)\n",
        "\n",
        "# 将模型移至GPU（如果可用）\n",
        "device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n",
        "inception_model = inception_model.to(device)\n",
        "\n",
        "# 使用torchsummary打印模型参数统计\n",
        "print(\"\\nInceptionNet模型参数统计:\")\n",
        "torchsummary.summary(inception_model, (3, 299, 299))  # Inception v3需要299x299的输入\n",
        "\n",
        "# 创建一个随机输入张量来可视化模型\n",
        "# 设置模型为评估模式以避免批归一化层的错误\n",
        "inception_model.eval()\n",
        "with torch.no_grad():\n",
        "    dummy_input = torch.randn(1, 3, 299, 299).to(device)\n",
        "    output = inception_model(dummy_input)\n",
        "\n",
        "# 使用torchviz可视化模型结构\n",
        "# 修复AttributeError: 'Tensor' object has no attribute 'logits'\n",
        "# Inception v3在eval模式下直接返回tensor而不是包含logits属性的对象\n",
        "model_graph = make_dot(output, params=dict(inception_model.named_parameters()))\n",
        "model_graph.render(\"inception_model\", format=\"png\")\n",
        "\n",
        "\n"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 3,
      "metadata": {
        "id": "Fi46_oyAY6qD"
      },
      "outputs": [],
      "source": [
        "import json\n",
        "token = {\"username\":\"cskaoyan\",\"key\":\"ff99d9d7ff71704e3e761217ceec03c5\"}\n",
        "with open('/content/kaggle.json', 'w') as file:\n",
        "  json.dump(token, file)"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 4,
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "FBunE0OvY6ZY",
        "outputId": "88e36bc7-1d9b-4341-d913-0b27c01e9033"
      },
      "outputs": [
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "{\"username\": \"cskaoyan\", \"key\": \"ff99d9d7ff71704e3e761217ceec03c5\"}"
          ]
        }
      ],
      "source": [
        "!cat /content/kaggle.json"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 5,
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "qXgB8rdbZIDU",
        "outputId": "25b83728-5319-4488-e53a-d9f0d1189c01"
      },
      "outputs": [
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "- path is now set to: /content\n"
          ]
        }
      ],
      "source": [
        "!mkdir -p ~/.kaggle\n",
        "!cp /content/kaggle.json ~/.kaggle/\n",
        "!chmod 600 ~/.kaggle/kaggle.json\n",
        "!kaggle config set -n path -v /content"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 6,
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "4feg3Y3_2IJC",
        "outputId": "ac1ec94f-a6c1-4424-8066-8f58593bffa5"
      },
      "outputs": [
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "Downloading cifar-10.zip to /content/competitions/cifar-10\n",
            " 96% 689M/715M [00:07<00:00, 186MB/s]\n",
            "100% 715M/715M [00:07<00:00, 103MB/s]\n"
          ]
        }
      ],
      "source": [
        "!kaggle competitions download -c cifar-10"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 7,
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "QDeB7tM12b9K",
        "outputId": "ecc5c7d2-0c2b-4e44-b73f-946ad9d8d024"
      },
      "outputs": [
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "Archive:  /content/competitions/cifar-10/cifar-10.zip\n",
            "  inflating: sampleSubmission.csv    \n",
            "  inflating: test.7z                 \n",
            "  inflating: train.7z                \n",
            "  inflating: trainLabels.csv         \n"
          ]
        }
      ],
      "source": [
        "!unzip /content/competitions/cifar-10/cifar-10.zip"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 8,
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "NK7iEl7I2bRK",
        "outputId": "7c09a743-54e4-457c-b5b0-d9b05cb3f2c8"
      },
      "outputs": [
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "Collecting py7zr\n",
            "  Downloading py7zr-1.0.0-py3-none-any.whl.metadata (17 kB)\n",
            "Collecting texttable (from py7zr)\n",
            "  Downloading texttable-1.7.0-py2.py3-none-any.whl.metadata (9.8 kB)\n",
            "Requirement already satisfied: pycryptodomex>=3.20.0 in /usr/local/lib/python3.11/dist-packages (from py7zr) (3.23.0)\n",
            "Collecting brotli>=1.1.0 (from py7zr)\n",
            "  Downloading Brotli-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (5.5 kB)\n",
            "Requirement already satisfied: psutil in /usr/local/lib/python3.11/dist-packages (from py7zr) (5.9.5)\n",
            "Collecting pyzstd>=0.16.1 (from py7zr)\n",
            "  Downloading pyzstd-0.17.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (2.5 kB)\n",
            "Collecting pyppmd<1.3.0,>=1.1.0 (from py7zr)\n",
            "  Downloading pyppmd-1.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (5.4 kB)\n",
            "Collecting pybcj<1.1.0,>=1.0.0 (from py7zr)\n",
            "  Downloading pybcj-1.0.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (3.7 kB)\n",
            "Collecting multivolumefile>=0.2.3 (from py7zr)\n",
            "  Downloading multivolumefile-0.2.3-py3-none-any.whl.metadata (6.3 kB)\n",
            "Collecting inflate64<1.1.0,>=1.0.0 (from py7zr)\n",
            "  Downloading inflate64-1.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (4.4 kB)\n",
            "Requirement already satisfied: typing-extensions>=4.13.2 in /usr/local/lib/python3.11/dist-packages (from pyzstd>=0.16.1->py7zr) (4.14.0)\n",
            "Downloading py7zr-1.0.0-py3-none-any.whl (69 kB)\n",
            "\u001b[2K   \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m69.7/69.7 kB\u001b[0m \u001b[31m4.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
            "\u001b[?25hDownloading Brotli-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (2.9 MB)\n",
            "\u001b[2K   \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m2.9/2.9 MB\u001b[0m \u001b[31m56.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
            "\u001b[?25hDownloading inflate64-1.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (96 kB)\n",
            "\u001b[2K   \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m96.4/96.4 kB\u001b[0m \u001b[31m11.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
            "\u001b[?25hDownloading multivolumefile-0.2.3-py3-none-any.whl (17 kB)\n",
            "Downloading pybcj-1.0.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (50 kB)\n",
            "\u001b[2K   \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m50.7/50.7 kB\u001b[0m \u001b[31m5.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
            "\u001b[?25hDownloading pyppmd-1.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (141 kB)\n",
            "\u001b[2K   \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m141.3/141.3 kB\u001b[0m \u001b[31m15.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
            "\u001b[?25hDownloading pyzstd-0.17.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (412 kB)\n",
            "\u001b[2K   \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m412.9/412.9 kB\u001b[0m \u001b[31m36.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
            "\u001b[?25hDownloading texttable-1.7.0-py2.py3-none-any.whl (10 kB)\n",
            "Installing collected packages: texttable, brotli, pyzstd, pyppmd, pybcj, multivolumefile, inflate64, py7zr\n",
            "Successfully installed brotli-1.1.0 inflate64-1.0.3 multivolumefile-0.2.3 py7zr-1.0.0 pybcj-1.0.6 pyppmd-1.2.0 pyzstd-0.17.0 texttable-1.7.0\n"
          ]
        }
      ],
      "source": [
        "%pip install py7zr\n",
        "import py7zr\n",
        "a =py7zr.SevenZipFile(r'./train.7z','r')\n",
        "a.extractall(path=r'./competitions/cifar-10/')\n",
        "a.close()"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 13,
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "rI5JDfji59q-",
        "outputId": "56707275-bdf6-4fc8-d8ce-d5e650bf6137"
      },
      "outputs": [
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "50000\n"
          ]
        }
      ],
      "source": [
        "!ls competitions/cifar-10/train|wc -l"
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "vk4EQTiM4pNt"
      },
      "source": [
        "# 加载数据并处理为tensor"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 8,
      "metadata": {
        "ExecuteTime": {
          "end_time": "2025-06-26T01:43:32.407799Z",
          "start_time": "2025-06-26T01:43:32.363026Z"
        },
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "nvguuJLl4pNt",
        "outputId": "34a599a2-04ed-4719-d8a4-291f38475487"
      },
      "outputs": [
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "完整数据集大小: 50000\n",
            "训练集大小: 45000\n",
            "验证集大小: 5000\n"
          ]
        }
      ],
      "source": [
        "# 加载CIFAR-10数据集\n",
        "import os\n",
        "import pandas as pd\n",
        "from PIL import Image\n",
        "from torch.utils.data import Dataset\n",
        "\n",
        "# 定义CIFAR-10数据集类\n",
        "class CIFAR10Dataset(Dataset):\n",
        "    def __init__(self, img_dir, labels_file, transform=None):\n",
        "        self.img_dir = img_dir\n",
        "        self.transform = transform\n",
        "\n",
        "        # 读取标签文件，read_csv默认读取第一行作为列名\n",
        "        self.labels_df = pd.read_csv(labels_file)\n",
        "        self.img_names = self.labels_df.iloc[:, 0].values.astype(str)  # 第一列是图片名称，确保为字符串类型\n",
        "\n",
        "        # 类别名称字典，使用字典可以提高查找速度\n",
        "        self.class_names_dict = {'airplane': 0, 'automobile': 1, 'bird': 2, 'cat': 3,\n",
        "                                 'deer': 4, 'dog': 5, 'frog': 6, 'horse': 7, 'ship': 8, 'truck': 9}\n",
        "        # 将文本标签转换为数字ID\n",
        "        self.labels = [self.class_names_dict[label] for label in self.labels_df.iloc[:, 1].values]\n",
        "\n",
        "    def __len__(self):\n",
        "        return len(self.labels)\n",
        "\n",
        "    def __getitem__(self, idx):\n",
        "        img_path = os.path.join(self.img_dir, self.img_names[idx] + '.png') #图片路径\n",
        "        image = Image.open(img_path) #打开图片\n",
        "        label = self.labels[idx]\n",
        "\n",
        "        if self.transform:\n",
        "            image_tensor = self.transform(image)\n",
        "\n",
        "        return image_tensor, label\n",
        "\n",
        "# 定义数据预处理\n",
        "transform = transforms.Compose([\n",
        "    transforms.ToTensor(),\n",
        "    transforms.Normalize((0.4917, 0.4823, 0.4467), (0.2024, 0.1995, 0.2010))\n",
        "])\n",
        "\n",
        "# 加载CIFAR-10数据集\n",
        "# img_dir = r\"competitions/cifar-10/train\"\n",
        "# labels_file = r\"./trainLabels.csv\"\n",
        "img_dir = r\"D:\\cifar-10\\train\\train\"\n",
        "labels_file = r\"D:\\cifar-10\\trainLabels.csv\"\n",
        "full_dataset = CIFAR10Dataset(img_dir=img_dir, labels_file=labels_file, transform=transform)\n",
        "\n",
        "# 定义类别名称\n",
        "class_names = ['airplane', 'automobile', 'bird', 'cat', 'deer', 'dog', 'frog', 'horse', 'ship', 'truck']\n",
        "\n",
        "# 划分训练集和验证集\n",
        "train_size = 45000\n",
        "val_size = 5000\n",
        "generator = torch.Generator().manual_seed(42)\n",
        "train_dataset, val_dataset = torch.utils.data.random_split(\n",
        "    full_dataset,\n",
        "    [train_size, val_size],\n",
        "    generator=generator\n",
        ")\n",
        "\n",
        "# 查看数据集基本信息\n",
        "print(f\"完整数据集大小: {len(full_dataset)}\")\n",
        "print(f\"训练集大小: {len(train_dataset)}\")\n",
        "print(f\"验证集大小: {len(val_dataset)}\")\n"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 9,
      "metadata": {
        "id": "1akKUts84pNu"
      },
      "outputs": [],
      "source": [
        "def cal_mean_std(ds):\n",
        "    mean = 0.\n",
        "    std = 0.\n",
        "    for img, _ in ds:\n",
        "        mean += img.mean(dim=(1, 2)) #dim=(1, 2)表示在通道维度上求平均\n",
        "        std += img.std(dim=(1, 2))  #dim=(1, 2)表示在通道维度上求标准差\n",
        "    mean /= len(ds)\n",
        "    std /= len(ds)\n",
        "    return mean, std\n",
        "# cal_mean_std(train_dataset)"
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "HrTSD6iw4pNu"
      },
      "source": [
        "# 把数据集划分为训练集45000和验证集5000，并给DataLoader"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 10,
      "metadata": {
        "ExecuteTime": {
          "end_time": "2025-06-26T01:43:33.144223Z",
          "start_time": "2025-06-26T01:43:33.135368Z"
        },
        "id": "qK_zQ__r4pNu"
      },
      "outputs": [],
      "source": [
        "\n",
        "# 创建数据加载器\n",
        "batch_size = 64\n",
        "train_loader = torch.utils.data.DataLoader(\n",
        "    train_dataset,\n",
        "    batch_size=batch_size,\n",
        "    shuffle=True #打乱数据集，每次迭代时，数据集的顺序都会被打乱\n",
        ")\n",
        "\n",
        "val_loader = torch.utils.data.DataLoader(\n",
        "    val_dataset,\n",
        "    batch_size=batch_size,\n",
        "    shuffle=False\n",
        ")\n",
        "\n",
        "\n"
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "KUyAkERd4pNu"
      },
      "source": [
        "# 搭建模型"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 11,
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "j17TXWWx4pNu",
        "outputId": "69251d29-61e5-4670-add4-f558616209e7"
      },
      "outputs": [
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "torch.Size([20, 100])\n"
          ]
        }
      ],
      "source": [
        "#理解每个接口的方法，单独写例子\n",
        "import torch.nn as nn\n",
        "m=nn.BatchNorm1d(100)\n",
        "x=torch.randn(20,100)\n",
        "print(m(x).shape)"
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "cFvbdkKd4pNu"
      },
      "source": [
        "# 搭建InceptionNet模型"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 12,
      "metadata": {
        "ExecuteTime": {
          "end_time": "2025-06-26T01:43:33.152657Z",
          "start_time": "2025-06-26T01:43:33.148120Z"
        },
        "id": "UOfee2qW4pNu"
      },
      "outputs": [],
      "source": [
        "import torch\n",
        "import torch.nn as nn\n",
        "import torch.nn.functional as F\n",
        "\n",
        "# 定义Inception模块\n",
        "class InceptionBlock(nn.Module):\n",
        "    def __init__(self, in_channels, out1x1, red3x3, out3x3, red5x5, out5x5, pool_proj):\n",
        "        super(InceptionBlock, self).__init__()\n",
        "        \n",
        "        # 1x1卷积分支\n",
        "        self.branch1 = nn.Conv2d(in_channels, out1x1, kernel_size=1) #64\n",
        "        \n",
        "        # 3x3卷积分支\n",
        "        self.branch2 = nn.Sequential(\n",
        "            nn.Conv2d(in_channels, red3x3, kernel_size=1),#96\n",
        "            nn.BatchNorm2d(red3x3),\n",
        "            nn.ReLU(inplace=True),\n",
        "            nn.Conv2d(red3x3, out3x3, kernel_size=3, padding=1),\n",
        "            nn.BatchNorm2d(out3x3),\n",
        "            nn.ReLU(inplace=True)\n",
        "        ) #128\n",
        "        \n",
        "        # 5x5卷积分支\n",
        "        self.branch3 = nn.Sequential(\n",
        "            nn.Conv2d(in_channels, red5x5, kernel_size=1), #16\n",
        "            nn.BatchNorm2d(red5x5),\n",
        "            nn.ReLU(inplace=True),\n",
        "            nn.Conv2d(red5x5, out5x5, kernel_size=5, padding=2),\n",
        "            nn.BatchNorm2d(out5x5),\n",
        "            nn.ReLU(inplace=True)\n",
        "        ) #32\n",
        "        \n",
        "        # 最大池化分支\n",
        "        self.branch4 = nn.Sequential(\n",
        "            nn.MaxPool2d(kernel_size=3, stride=1, padding=1),\n",
        "            nn.Conv2d(in_channels, pool_proj, kernel_size=1), #32\n",
        "            nn.BatchNorm2d(pool_proj),\n",
        "            nn.ReLU(inplace=True)\n",
        "        )\n",
        "    \n",
        "    def forward(self, x):\n",
        "        branch1 = self.branch1(x)\n",
        "        branch2 = self.branch2(x)\n",
        "        branch3 = self.branch3(x)\n",
        "        branch4 = self.branch4(x)\n",
        "        \n",
        "        # 在通道维度上拼接四个分支的输出\n",
        "        return torch.cat([branch1, branch2, branch3, branch4], 1)\n",
        "\n",
        "# 定义InceptionNet模型\n",
        "class InceptionNetV1(nn.Module):\n",
        "    def __init__(self, num_classes=10):\n",
        "        super().__init__()\n",
        "        \n",
        "        # 初始卷积层\n",
        "        self.conv_block1 = nn.Sequential(\n",
        "            nn.Conv2d(3, 64, kernel_size=3, stride=1, padding=1),\n",
        "            nn.BatchNorm2d(64),\n",
        "            nn.ReLU(inplace=True),\n",
        "            nn.MaxPool2d(kernel_size=2, stride=2)\n",
        "        )\n",
        "        \n",
        "        # Inception模块\n",
        "        self.inception1 = InceptionBlock(64, 64, 96, 128, 16, 32, 32)  # 输出通道: 64+128+32+32=256\n",
        "        \n",
        "        # 池化层\n",
        "        self.maxpool1 = nn.MaxPool2d(kernel_size=2, stride=2)\n",
        "        \n",
        "        # 第二个Inception模块\n",
        "        self.inception2 = InceptionBlock(256, 128, 128, 192, 32, 96, 64)  # 输出通道: 128+192+96+64=480\n",
        "        \n",
        "        # 池化层\n",
        "        self.maxpool2 = nn.MaxPool2d(kernel_size=2, stride=2)\n",
        "        \n",
        "        # 全连接层\n",
        "        self.classifier = nn.Sequential(\n",
        "            nn.Dropout(0.5),\n",
        "            nn.Linear(480 * 4 * 4, 512),\n",
        "            nn.ReLU(inplace=True),\n",
        "            nn.Dropout(0.5),\n",
        "            nn.Linear(512, num_classes)\n",
        "        )\n",
        "    \n",
        "    def forward(self, x):\n",
        "        # 卷积块\n",
        "        x = self.conv_block1(x)\n",
        "        \n",
        "        # Inception块\n",
        "        # 第一个Inception块: 输入[batch_size, 64, 16, 16] -> 输出[batch_size, 256, 16, 16]\n",
        "        x = self.inception1(x)\n",
        "        print(f'inception1: {x.shape}')\n",
        "        # 池化层: 输入[batch_size, 256, 16, 16] -> 输出[batch_size, 256, 8, 8]\n",
        "        x = self.maxpool1(x)\n",
        "        \n",
        "        # 第二个Inception块: 输入[batch_size, 256, 8, 8] -> 输出[batch_size, 480, 8, 8]\n",
        "        x = self.inception2(x)\n",
        "        print(f'inception2: {x.shape}')\n",
        "        # 池化层: 输入[batch_size, 480, 8, 8] -> 输出[batch_size, 480, 4, 4]\n",
        "        x = self.maxpool2(x)\n",
        "        \n",
        "        # 展平: 将[batch_size, 480, 4, 4]转换为[batch_size, 480*4*4]\n",
        "        x = torch.flatten(x, 1)\n",
        "        \n",
        "        # 分类器\n",
        "        x = self.classifier(x)\n",
        "        \n",
        "        return x\n"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 13,
      "metadata": {
        "ExecuteTime": {
          "end_time": "2025-06-26T01:43:33.185031Z",
          "start_time": "2025-06-26T01:43:33.152657Z"
        },
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "5Ll8FXqD4pNv",
        "outputId": "b9abde01-e362-4cb4-b7bd-9802dece9b6f"
      },
      "outputs": [
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "批次图像形状: torch.Size([64, 3, 32, 32])\n",
            "批次标签形状: torch.Size([64])\n",
            "----------------------------------------------------------------------------------------------------\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "torch.Size([64, 10])\n"
          ]
        }
      ],
      "source": [
        "# 实例化模型\n",
        "model = InceptionNetV1()\n",
        "\n",
        "# 从train_loader获取第一个批次的数据\n",
        "dataiter = iter(train_loader)\n",
        "images, labels = next(dataiter)\n",
        "\n",
        "# 查看批次数据的形状\n",
        "print(\"批次图像形状:\", images.shape)\n",
        "print(\"批次标签形状:\", labels.shape)\n",
        "\n",
        "\n",
        "print('-'*100)\n",
        "# 进行前向传播\n",
        "with torch.no_grad():  # 不需要计算梯度\n",
        "    outputs = model(images)\n",
        "\n",
        "\n",
        "print(outputs.shape)\n"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 14,
      "metadata": {
        "ExecuteTime": {
          "end_time": "2025-06-26T01:43:33.203053Z",
          "start_time": "2025-06-26T01:43:33.199532Z"
        },
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "V8zEsAla4pNv",
        "outputId": "37d5ed89-07ad-4fdc-e70d-1aed25939a5c"
      },
      "outputs": [
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "需要求梯度的参数总量: 4467162\n",
            "模型总参数量: 4467162\n",
            "\n",
            "各层参数量明细:\n",
            "conv_block1.0.weight: 1728 参数\n",
            "conv_block1.0.bias: 64 参数\n",
            "conv_block1.1.weight: 64 参数\n",
            "conv_block1.1.bias: 64 参数\n",
            "inception1.branch1.weight: 4096 参数\n",
            "inception1.branch1.bias: 64 参数\n",
            "inception1.branch2.0.weight: 6144 参数\n",
            "inception1.branch2.0.bias: 96 参数\n",
            "inception1.branch2.1.weight: 96 参数\n",
            "inception1.branch2.1.bias: 96 参数\n",
            "inception1.branch2.3.weight: 110592 参数\n",
            "inception1.branch2.3.bias: 128 参数\n",
            "inception1.branch2.4.weight: 128 参数\n",
            "inception1.branch2.4.bias: 128 参数\n",
            "inception1.branch3.0.weight: 1024 参数\n",
            "inception1.branch3.0.bias: 16 参数\n",
            "inception1.branch3.1.weight: 16 参数\n",
            "inception1.branch3.1.bias: 16 参数\n",
            "inception1.branch3.3.weight: 12800 参数\n",
            "inception1.branch3.3.bias: 32 参数\n",
            "inception1.branch3.4.weight: 32 参数\n",
            "inception1.branch3.4.bias: 32 参数\n",
            "inception1.branch4.1.weight: 2048 参数\n",
            "inception1.branch4.1.bias: 32 参数\n",
            "inception1.branch4.2.weight: 32 参数\n",
            "inception1.branch4.2.bias: 32 参数\n",
            "inception2.branch1.weight: 32768 参数\n",
            "inception2.branch1.bias: 128 参数\n",
            "inception2.branch2.0.weight: 32768 参数\n",
            "inception2.branch2.0.bias: 128 参数\n",
            "inception2.branch2.1.weight: 128 参数\n",
            "inception2.branch2.1.bias: 128 参数\n",
            "inception2.branch2.3.weight: 221184 参数\n",
            "inception2.branch2.3.bias: 192 参数\n",
            "inception2.branch2.4.weight: 192 参数\n",
            "inception2.branch2.4.bias: 192 参数\n",
            "inception2.branch3.0.weight: 8192 参数\n",
            "inception2.branch3.0.bias: 32 参数\n",
            "inception2.branch3.1.weight: 32 参数\n",
            "inception2.branch3.1.bias: 32 参数\n",
            "inception2.branch3.3.weight: 76800 参数\n",
            "inception2.branch3.3.bias: 96 参数\n",
            "inception2.branch3.4.weight: 96 参数\n",
            "inception2.branch3.4.bias: 96 参数\n",
            "inception2.branch4.1.weight: 16384 参数\n",
            "inception2.branch4.1.bias: 64 参数\n",
            "inception2.branch4.2.weight: 64 参数\n",
            "inception2.branch4.2.bias: 64 参数\n",
            "classifier.1.weight: 3932160 参数\n",
            "classifier.1.bias: 512 参数\n",
            "classifier.4.weight: 5120 参数\n",
            "classifier.4.bias: 10 参数\n"
          ]
        }
      ],
      "source": [
        "# 计算模型的总参数量\n",
        "# 统计需要求梯度的参数总量\n",
        "total_params = sum(p.numel() for p in model.parameters() if p.requires_grad)\n",
        "print(f\"需要求梯度的参数总量: {total_params}\")\n",
        "\n",
        "# 统计所有参数总量\n",
        "all_params = sum(p.numel() for p in model.parameters())\n",
        "print(f\"模型总参数量: {all_params}\")\n",
        "\n",
        "# 查看每层参数量明细\n",
        "print(\"\\n各层参数量明细:\")\n",
        "for name, param in model.named_parameters():\n",
        "    print(f\"{name}: {param.numel()} 参数\")\n"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 15,
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "0XQuUiCe4pNv",
        "outputId": "8b972040-9501-414d-dd43-91c6d9ac75cd"
      },
      "outputs": [
        {
          "data": {
            "text/plain": [
              "294912"
            ]
          },
          "execution_count": 15,
          "metadata": {},
          "output_type": "execute_result"
        }
      ],
      "source": [
        "128*3*3*256"
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "1B2dFDE14pNv"
      },
      "source": [
        "# 各层参数量明细:\n",
        "conv1.weight: 288 参数 3*3*1*32\n",
        "conv1.bias: 32 参数\n",
        "conv2.weight: 9216 参数 3*3*32*32\n",
        "conv2.bias: 32 参数  \n",
        "conv3.weight: 18432 参数 3*3*32*64\n",
        "conv3.bias: 64 参数\n",
        "conv4.weight: 36864 参数  3*3*64*64\n",
        "conv4.bias: 64 参数\n",
        "conv5.weight: 73728 参数\n",
        "conv5.bias: 128 参数\n",
        "conv6.weight: 147456 参数\n",
        "conv6.bias: 128 参数\n",
        "fc1.weight: 294912 参数 128*3*3*256\n",
        "fc1.bias: 256 参数\n",
        "fc2.weight: 2560 参数\n",
        "fc2.bias: 10 参数"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 16,
      "metadata": {
        "ExecuteTime": {
          "end_time": "2025-06-26T01:43:33.217395Z",
          "start_time": "2025-06-26T01:43:33.203561Z"
        },
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "al9xZTJQ4pNv",
        "outputId": "92e47aaf-0503-47e1-a220-3cd67d705dd6"
      },
      "outputs": [
        {
          "data": {
            "text/plain": [
              "OrderedDict([('conv_block1.0.weight',\n",
              "              tensor([[[[-0.1554,  0.1794,  0.1625],\n",
              "                        [ 0.0350,  0.1002,  0.0931],\n",
              "                        [-0.1544, -0.0381,  0.0700]],\n",
              "              \n",
              "                       [[ 0.0683, -0.1059, -0.1578],\n",
              "                        [-0.1490,  0.0816, -0.0413],\n",
              "                        [-0.1514,  0.1177,  0.0438]],\n",
              "              \n",
              "                       [[ 0.1034,  0.1652, -0.0112],\n",
              "                        [ 0.0055,  0.0973, -0.0917],\n",
              "                        [ 0.0653, -0.0385,  0.0453]]],\n",
              "              \n",
              "              \n",
              "                      [[[-0.0136,  0.0360,  0.1435],\n",
              "                        [ 0.1055, -0.0748,  0.1533],\n",
              "                        [ 0.1093, -0.0693,  0.1833]],\n",
              "              \n",
              "                       [[ 0.0848, -0.0206, -0.0099],\n",
              "                        [-0.1617,  0.1485, -0.0098],\n",
              "                        [-0.1388, -0.1669, -0.1777]],\n",
              "              \n",
              "                       [[-0.0944, -0.0451, -0.1179],\n",
              "                        [-0.1395,  0.0137,  0.0297],\n",
              "                        [-0.1663,  0.0760,  0.0750]]],\n",
              "              \n",
              "              \n",
              "                      [[[-0.1335,  0.0447,  0.1426],\n",
              "                        [-0.1152,  0.1658, -0.1227],\n",
              "                        [-0.0542,  0.0829, -0.1207]],\n",
              "              \n",
              "                       [[ 0.0265,  0.0962, -0.0258],\n",
              "                        [-0.0004,  0.0024,  0.0887],\n",
              "                        [ 0.1284,  0.0594, -0.1300]],\n",
              "              \n",
              "                       [[-0.0620, -0.0110, -0.1405],\n",
              "                        [ 0.0578, -0.1161,  0.1229],\n",
              "                        [ 0.0502, -0.0802, -0.0922]]],\n",
              "              \n",
              "              \n",
              "                      ...,\n",
              "              \n",
              "              \n",
              "                      [[[ 0.0296, -0.0511, -0.1893],\n",
              "                        [-0.1898,  0.1606, -0.0854],\n",
              "                        [-0.1722,  0.1891,  0.0185]],\n",
              "              \n",
              "                       [[-0.0333, -0.0112,  0.0563],\n",
              "                        [ 0.1562,  0.1744, -0.1575],\n",
              "                        [ 0.1485, -0.0337,  0.0902]],\n",
              "              \n",
              "                       [[ 0.1237, -0.1425,  0.1832],\n",
              "                        [ 0.0283, -0.1777,  0.1015],\n",
              "                        [ 0.0812, -0.1168,  0.0189]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 0.1015, -0.0071, -0.0082],\n",
              "                        [-0.0834, -0.1898,  0.0786],\n",
              "                        [-0.1541, -0.1423, -0.0257]],\n",
              "              \n",
              "                       [[-0.0630,  0.0977, -0.0994],\n",
              "                        [ 0.0896,  0.0581, -0.0851],\n",
              "                        [-0.0437,  0.0672,  0.1251]],\n",
              "              \n",
              "                       [[-0.0407,  0.0963, -0.0496],\n",
              "                        [-0.1749, -0.0300,  0.1771],\n",
              "                        [-0.0344, -0.0766,  0.0046]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 0.1691, -0.0583,  0.0504],\n",
              "                        [-0.1818, -0.0818, -0.0053],\n",
              "                        [ 0.0793,  0.0372,  0.0690]],\n",
              "              \n",
              "                       [[ 0.0217, -0.0053, -0.0265],\n",
              "                        [-0.0392, -0.0904, -0.0498],\n",
              "                        [-0.0140, -0.0289, -0.1892]],\n",
              "              \n",
              "                       [[-0.0768, -0.0369, -0.1136],\n",
              "                        [-0.0938,  0.1520,  0.1581],\n",
              "                        [-0.0900, -0.1215,  0.0717]]]])),\n",
              "             ('conv_block1.0.bias',\n",
              "              tensor([ 0.0711, -0.0634, -0.0380, -0.1313,  0.1659, -0.1903,  0.0209, -0.0868,\n",
              "                       0.0708,  0.1405, -0.0969,  0.0832,  0.1342, -0.0377,  0.1631,  0.0952,\n",
              "                      -0.0675,  0.1022,  0.1529, -0.1615, -0.1128,  0.1904,  0.1172, -0.1429,\n",
              "                      -0.0629, -0.0780, -0.1447,  0.1225,  0.0688, -0.1084, -0.1318,  0.1546,\n",
              "                      -0.0175,  0.1231, -0.0815, -0.0354,  0.0307, -0.0475,  0.0448, -0.1249,\n",
              "                       0.1643,  0.1286, -0.0319,  0.0739,  0.0828, -0.1796, -0.0118,  0.0547,\n",
              "                       0.1487, -0.1810, -0.1810, -0.0641, -0.1744,  0.1558, -0.0795,  0.1323,\n",
              "                       0.0717, -0.1660, -0.1887, -0.0772,  0.1046, -0.0368,  0.0351,  0.1071])),\n",
              "             ('conv_block1.1.weight',\n",
              "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1.])),\n",
              "             ('conv_block1.1.bias',\n",
              "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.])),\n",
              "             ('conv_block1.1.running_mean',\n",
              "              tensor([ 7.0992e-03, -7.2096e-03, -4.3471e-03, -1.7066e-02,  1.6935e-02,\n",
              "                      -1.4776e-02,  4.1580e-03, -7.0426e-03,  6.2807e-03,  1.4324e-02,\n",
              "                      -8.6726e-03,  5.8696e-03,  1.3964e-02, -3.2734e-03,  1.4193e-02,\n",
              "                       1.1629e-02, -4.2552e-03,  8.9686e-03,  1.7486e-02, -1.3511e-02,\n",
              "                      -1.1725e-02,  1.9334e-02,  8.2566e-03, -1.2007e-02, -4.3676e-03,\n",
              "                      -7.0308e-03, -1.4432e-02,  1.3552e-02,  4.4050e-03, -1.1762e-02,\n",
              "                      -1.1358e-02,  1.4743e-02,  1.6261e-03,  1.2294e-02, -5.6793e-03,\n",
              "                      -4.1793e-03,  5.2155e-03, -4.5203e-03,  4.5605e-03, -1.4682e-02,\n",
              "                       1.8550e-02,  1.2135e-02,  2.2245e-05,  7.1107e-03,  6.8827e-03,\n",
              "                      -1.8853e-02, -4.6032e-05,  3.8039e-03,  1.3762e-02, -1.6879e-02,\n",
              "                      -1.8747e-02, -5.4853e-03, -1.7538e-02,  1.2382e-02, -9.9374e-03,\n",
              "                       1.5577e-02,  8.4790e-03, -1.5143e-02, -1.7771e-02, -8.1827e-03,\n",
              "                       1.1323e-02, -4.0827e-03,  5.0277e-03,  1.2181e-02])),\n",
              "             ('conv_block1.1.running_var',\n",
              "              tensor([0.9229, 0.9257, 0.9050, 1.0340, 0.9194, 0.9299, 0.9307, 0.9300, 0.9363,\n",
              "                      0.9334, 0.9148, 1.0010, 0.9190, 0.9450, 0.9453, 0.9161, 0.9208, 0.9262,\n",
              "                      0.9779, 0.9286, 0.9213, 0.9761, 0.9611, 0.9986, 0.9311, 0.9060, 0.9124,\n",
              "                      0.9247, 0.9202, 0.9149, 0.9321, 0.9140, 0.9442, 0.9039, 0.9955, 0.9115,\n",
              "                      0.9164, 0.9364, 0.9053, 0.9898, 0.9220, 0.9308, 0.9775, 0.9200, 1.0423,\n",
              "                      0.9396, 0.9841, 0.9127, 0.9455, 0.9192, 0.9147, 0.9141, 0.9196, 1.0012,\n",
              "                      0.9155, 0.9194, 0.9145, 0.9156, 0.9356, 0.9364, 0.9123, 0.9095, 0.9262,\n",
              "                      0.9342])),\n",
              "             ('conv_block1.1.num_batches_tracked', tensor(1)),\n",
              "             ('inception1.branch1.weight',\n",
              "              tensor([[[[ 0.0958]],\n",
              "              \n",
              "                       [[-0.0219]],\n",
              "              \n",
              "                       [[ 0.0546]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-0.0900]],\n",
              "              \n",
              "                       [[-0.0824]],\n",
              "              \n",
              "                       [[ 0.0567]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 0.1003]],\n",
              "              \n",
              "                       [[ 0.0846]],\n",
              "              \n",
              "                       [[ 0.0014]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 0.0543]],\n",
              "              \n",
              "                       [[-0.0980]],\n",
              "              \n",
              "                       [[-0.0584]]],\n",
              "              \n",
              "              \n",
              "                      [[[-0.0168]],\n",
              "              \n",
              "                       [[ 0.0278]],\n",
              "              \n",
              "                       [[ 0.0144]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 0.0911]],\n",
              "              \n",
              "                       [[ 0.0909]],\n",
              "              \n",
              "                       [[ 0.0036]]],\n",
              "              \n",
              "              \n",
              "                      ...,\n",
              "              \n",
              "              \n",
              "                      [[[-0.0197]],\n",
              "              \n",
              "                       [[-0.1007]],\n",
              "              \n",
              "                       [[-0.0071]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 0.0788]],\n",
              "              \n",
              "                       [[-0.0111]],\n",
              "              \n",
              "                       [[ 0.0718]]],\n",
              "              \n",
              "              \n",
              "                      [[[-0.0372]],\n",
              "              \n",
              "                       [[ 0.1014]],\n",
              "              \n",
              "                       [[ 0.0817]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 0.0123]],\n",
              "              \n",
              "                       [[-0.0972]],\n",
              "              \n",
              "                       [[-0.0769]]],\n",
              "              \n",
              "              \n",
              "                      [[[-0.0719]],\n",
              "              \n",
              "                       [[ 0.1180]],\n",
              "              \n",
              "                       [[ 0.0464]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-0.0425]],\n",
              "              \n",
              "                       [[-0.1148]],\n",
              "              \n",
              "                       [[ 0.0038]]]])),\n",
              "             ('inception1.branch1.bias',\n",
              "              tensor([ 0.0769, -0.0992,  0.0717, -0.0335, -0.0401, -0.0959, -0.0751,  0.0196,\n",
              "                      -0.0308, -0.0316, -0.1140,  0.0843,  0.0340,  0.0754, -0.0571,  0.0591,\n",
              "                       0.0497,  0.0046, -0.0421,  0.0679, -0.0382, -0.0186, -0.0184,  0.0423,\n",
              "                      -0.0261, -0.0614, -0.0147, -0.0337,  0.1116,  0.0719,  0.0315, -0.0750,\n",
              "                      -0.1026,  0.1172,  0.1041, -0.0594,  0.0750,  0.0201,  0.0677,  0.0730,\n",
              "                       0.1134,  0.0668,  0.1088,  0.0528,  0.0090, -0.0048,  0.0206,  0.0726,\n",
              "                      -0.0091,  0.0223, -0.1110, -0.0164, -0.0409,  0.0536, -0.0738,  0.1167,\n",
              "                      -0.0202, -0.0165,  0.0797,  0.0898, -0.0513,  0.1155, -0.1185, -0.0854])),\n",
              "             ('inception1.branch2.0.weight',\n",
              "              tensor([[[[-0.0371]],\n",
              "              \n",
              "                       [[ 0.0963]],\n",
              "              \n",
              "                       [[ 0.0317]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 0.0608]],\n",
              "              \n",
              "                       [[ 0.0919]],\n",
              "              \n",
              "                       [[-0.0934]]],\n",
              "              \n",
              "              \n",
              "                      [[[-0.0036]],\n",
              "              \n",
              "                       [[ 0.0261]],\n",
              "              \n",
              "                       [[-0.0931]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-0.0635]],\n",
              "              \n",
              "                       [[-0.0574]],\n",
              "              \n",
              "                       [[-0.0204]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 0.0466]],\n",
              "              \n",
              "                       [[ 0.1048]],\n",
              "              \n",
              "                       [[ 0.0881]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 0.0204]],\n",
              "              \n",
              "                       [[-0.1166]],\n",
              "              \n",
              "                       [[-0.0064]]],\n",
              "              \n",
              "              \n",
              "                      ...,\n",
              "              \n",
              "              \n",
              "                      [[[-0.0590]],\n",
              "              \n",
              "                       [[ 0.0840]],\n",
              "              \n",
              "                       [[-0.0896]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 0.0779]],\n",
              "              \n",
              "                       [[-0.0365]],\n",
              "              \n",
              "                       [[ 0.0872]]],\n",
              "              \n",
              "              \n",
              "                      [[[-0.0045]],\n",
              "              \n",
              "                       [[-0.0831]],\n",
              "              \n",
              "                       [[ 0.0747]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-0.0342]],\n",
              "              \n",
              "                       [[-0.0050]],\n",
              "              \n",
              "                       [[ 0.1091]]],\n",
              "              \n",
              "              \n",
              "                      [[[-0.0120]],\n",
              "              \n",
              "                       [[-0.1248]],\n",
              "              \n",
              "                       [[-0.0665]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-0.0414]],\n",
              "              \n",
              "                       [[-0.0627]],\n",
              "              \n",
              "                       [[-0.1078]]]])),\n",
              "             ('inception1.branch2.0.bias',\n",
              "              tensor([-0.0740,  0.0455,  0.0480,  0.0016,  0.0825, -0.0208, -0.0546, -0.0088,\n",
              "                       0.0649, -0.0020,  0.0491, -0.0475,  0.1108,  0.0898, -0.1117,  0.0764,\n",
              "                       0.1041, -0.1130,  0.0026, -0.1172,  0.1068, -0.0013,  0.0268,  0.0053,\n",
              "                      -0.1119, -0.0938, -0.0965,  0.0320,  0.0087,  0.0246,  0.1019,  0.0129,\n",
              "                      -0.0724,  0.1135,  0.0462, -0.0779, -0.1031,  0.0789,  0.0082, -0.0553,\n",
              "                      -0.0686, -0.0153, -0.0538, -0.0903, -0.0970,  0.0737, -0.0513, -0.0128,\n",
              "                       0.0368, -0.1186,  0.0996, -0.0669, -0.0696, -0.1177, -0.0324,  0.1090,\n",
              "                       0.0191,  0.0895,  0.0619, -0.0110, -0.0689, -0.1242,  0.0972, -0.1129,\n",
              "                       0.1195, -0.1049, -0.0448, -0.1234, -0.0943,  0.0320,  0.0887,  0.0763,\n",
              "                       0.0250, -0.0030, -0.0376,  0.0051, -0.1051, -0.1162,  0.0080, -0.0944,\n",
              "                      -0.0421,  0.0372,  0.0628,  0.0980,  0.0072,  0.0318, -0.0071, -0.0642,\n",
              "                      -0.0852, -0.0053,  0.1249,  0.0839,  0.0785,  0.0134, -0.0677, -0.1200])),\n",
              "             ('inception1.branch2.1.weight',\n",
              "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1.])),\n",
              "             ('inception1.branch2.1.bias',\n",
              "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.])),\n",
              "             ('inception1.branch2.1.running_mean',\n",
              "              tensor([ 0.0304, -0.0313, -0.0159,  0.0241,  0.0251, -0.0564,  0.0344, -0.0658,\n",
              "                      -0.0088,  0.0027, -0.0121,  0.0018,  0.0395,  0.0217, -0.0313, -0.0390,\n",
              "                       0.0835,  0.0074, -0.0509, -0.0918,  0.0197, -0.0188,  0.0054,  0.0569,\n",
              "                      -0.0211,  0.0251, -0.0919,  0.0413, -0.0087, -0.0216, -0.0431,  0.0077,\n",
              "                       0.0008, -0.0131, -0.0752,  0.0024,  0.0479, -0.0113, -0.0027, -0.0061,\n",
              "                      -0.0022,  0.0255,  0.0355,  0.0230,  0.0054,  0.0027, -0.0153, -0.0205,\n",
              "                       0.0404, -0.0568,  0.0206, -0.0613, -0.0449,  0.0292, -0.0154,  0.0642,\n",
              "                       0.0188, -0.0072,  0.0282,  0.0192, -0.0002,  0.0001, -0.0214, -0.0173,\n",
              "                       0.0183,  0.0289, -0.0125, -0.0735, -0.0292, -0.0022, -0.0434, -0.0264,\n",
              "                      -0.0012,  0.0565,  0.0107, -0.0088, -0.0061,  0.0031,  0.0445, -0.0012,\n",
              "                      -0.0376,  0.0380, -0.0181,  0.0135, -0.0185, -0.0022, -0.0662, -0.0590,\n",
              "                       0.0103,  0.0192,  0.0370, -0.0583, -0.0409,  0.0737, -0.0333, -0.0748])),\n",
              "             ('inception1.branch2.1.running_var',\n",
              "              tensor([0.9310, 0.9067, 0.9314, 0.9116, 0.9135, 0.9143, 0.9109, 0.9351, 0.9171,\n",
              "                      0.9064, 0.9150, 0.9197, 0.9077, 0.9248, 0.9038, 0.9106, 0.9259, 0.9067,\n",
              "                      0.9108, 0.9126, 0.9069, 0.9042, 0.9136, 0.9226, 0.9067, 0.9174, 0.9357,\n",
              "                      0.9431, 0.9063, 0.9137, 0.9187, 0.9176, 0.9054, 0.9094, 0.9234, 0.9075,\n",
              "                      0.9137, 0.9052, 0.9156, 0.9222, 0.9072, 0.9124, 0.9119, 0.9147, 0.9146,\n",
              "                      0.9045, 0.9216, 0.9125, 0.9101, 0.9087, 0.9158, 0.9097, 0.9176, 0.9086,\n",
              "                      0.9468, 0.9087, 0.9111, 0.9337, 0.9088, 0.9040, 0.9204, 0.9389, 0.9060,\n",
              "                      0.9077, 0.9188, 0.9460, 0.9129, 0.9126, 0.9043, 0.9097, 0.9162, 0.9066,\n",
              "                      0.9043, 0.9091, 0.9079, 0.9128, 0.9164, 0.9216, 0.9119, 0.9149, 0.9140,\n",
              "                      0.9141, 0.9054, 0.9094, 0.9059, 0.9072, 0.9098, 0.9157, 0.9378, 0.9067,\n",
              "                      0.9131, 0.9174, 0.9151, 0.9191, 0.9110, 0.9083])),\n",
              "             ('inception1.branch2.1.num_batches_tracked', tensor(1)),\n",
              "             ('inception1.branch2.3.weight',\n",
              "              tensor([[[[-8.6685e-03, -1.8273e-03,  1.4961e-02],\n",
              "                        [ 3.1815e-02,  6.4519e-03,  2.0091e-02],\n",
              "                        [ 1.8359e-02,  3.0242e-03,  2.2393e-02]],\n",
              "              \n",
              "                       [[-1.0190e-02, -2.8252e-03,  2.0831e-02],\n",
              "                        [ 2.1037e-02,  1.1279e-02,  1.5399e-02],\n",
              "                        [-9.2478e-03,  1.0498e-02, -1.0808e-02]],\n",
              "              \n",
              "                       [[ 2.4066e-02, -3.2336e-02, -3.2904e-02],\n",
              "                        [-1.1590e-02,  3.2234e-02,  2.8051e-03],\n",
              "                        [ 2.6531e-02,  3.1926e-02,  2.0646e-02]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 1.2667e-02, -2.0943e-02, -1.2556e-02],\n",
              "                        [ 9.6249e-03, -2.5953e-03,  2.3217e-02],\n",
              "                        [-1.4014e-02,  2.2027e-03,  1.7341e-03]],\n",
              "              \n",
              "                       [[ 1.1537e-02, -1.7126e-03, -4.4785e-03],\n",
              "                        [-3.2385e-02,  1.8674e-02,  3.6759e-03],\n",
              "                        [ 6.0848e-03, -1.4490e-02,  1.1270e-02]],\n",
              "              \n",
              "                       [[ 2.1002e-02,  1.2030e-03, -2.3713e-02],\n",
              "                        [-1.7807e-02, -9.0334e-03, -3.2187e-03],\n",
              "                        [-1.7897e-02, -2.3901e-02, -2.0047e-02]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 2.2148e-02, -2.8522e-02, -1.0714e-02],\n",
              "                        [ 2.4711e-02, -1.4893e-02, -3.3430e-02],\n",
              "                        [-2.5654e-02, -1.3385e-02,  4.9928e-03]],\n",
              "              \n",
              "                       [[ 2.7453e-02, -1.3701e-02,  3.1392e-02],\n",
              "                        [ 3.2883e-02, -1.9024e-02,  1.5234e-02],\n",
              "                        [ 2.4637e-03, -7.5464e-03, -2.6906e-02]],\n",
              "              \n",
              "                       [[-4.6671e-03,  2.9640e-02, -1.4215e-02],\n",
              "                        [-3.0722e-02, -4.8169e-03, -2.7050e-02],\n",
              "                        [-1.8095e-02,  2.7588e-02,  1.2829e-02]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-2.1386e-02, -2.9592e-02, -2.2195e-02],\n",
              "                        [-1.2901e-02, -1.2037e-02, -2.9870e-02],\n",
              "                        [-8.4832e-03, -3.2741e-02,  1.8953e-02]],\n",
              "              \n",
              "                       [[ 5.1098e-03,  7.1313e-03,  6.0873e-04],\n",
              "                        [ 1.0884e-02, -5.1107e-03,  8.1910e-04],\n",
              "                        [ 2.8761e-02,  7.5198e-03,  3.6915e-03]],\n",
              "              \n",
              "                       [[ 2.8082e-02, -2.5386e-04, -2.9710e-02],\n",
              "                        [ 1.1588e-02,  2.7045e-02, -1.4624e-02],\n",
              "                        [-3.8521e-03,  2.5546e-02, -1.8276e-02]]],\n",
              "              \n",
              "              \n",
              "                      [[[-2.5291e-02,  3.1732e-02,  2.9971e-02],\n",
              "                        [ 2.7413e-02,  1.0881e-02,  1.9542e-02],\n",
              "                        [ 1.5302e-02, -1.2822e-02, -1.3946e-02]],\n",
              "              \n",
              "                       [[-3.1311e-02, -2.6756e-02,  2.3030e-02],\n",
              "                        [-3.1114e-02, -8.4441e-03,  2.6030e-02],\n",
              "                        [ 2.7359e-02,  3.3101e-02, -1.2336e-02]],\n",
              "              \n",
              "                       [[-8.0928e-04, -1.3921e-02,  1.9060e-02],\n",
              "                        [-9.6995e-04, -5.8179e-03, -2.8882e-02],\n",
              "                        [ 3.3076e-02, -2.7372e-02,  8.4487e-03]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 2.3618e-02,  3.1052e-02,  2.7074e-02],\n",
              "                        [ 3.4919e-03, -2.7682e-02, -9.3837e-03],\n",
              "                        [-2.0763e-02, -2.0539e-02, -1.2950e-02]],\n",
              "              \n",
              "                       [[ 2.9522e-02,  1.2780e-02,  3.1664e-02],\n",
              "                        [-2.9087e-02,  1.2914e-02, -1.2037e-02],\n",
              "                        [-2.8957e-02, -2.0270e-02, -6.6677e-03]],\n",
              "              \n",
              "                       [[-3.0833e-02,  8.6204e-03, -1.6017e-02],\n",
              "                        [-1.7043e-02, -3.3304e-03,  1.8687e-02],\n",
              "                        [ 3.1334e-02,  1.2986e-02, -2.1402e-02]]],\n",
              "              \n",
              "              \n",
              "                      ...,\n",
              "              \n",
              "              \n",
              "                      [[[ 3.1241e-02, -2.0376e-02, -2.5320e-02],\n",
              "                        [-1.3206e-03,  2.8397e-02,  8.8186e-03],\n",
              "                        [ 2.8465e-02,  3.3686e-02,  2.6864e-02]],\n",
              "              \n",
              "                       [[-2.4782e-02, -3.2141e-02, -5.2714e-04],\n",
              "                        [-4.7648e-03, -2.3268e-02,  9.9106e-03],\n",
              "                        [-2.7145e-03, -2.6923e-02, -1.2990e-02]],\n",
              "              \n",
              "                       [[-1.6192e-02, -2.2515e-02,  2.0534e-02],\n",
              "                        [ 6.7520e-03, -1.5759e-02, -3.2961e-02],\n",
              "                        [-5.9088e-03, -1.7726e-02,  1.0097e-02]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 1.7300e-02,  3.4791e-03, -2.2474e-02],\n",
              "                        [-2.8907e-02, -2.9809e-02,  2.3993e-02],\n",
              "                        [-2.8255e-02,  2.9447e-02, -7.7286e-04]],\n",
              "              \n",
              "                       [[ 4.4482e-03,  2.6728e-02, -2.1245e-03],\n",
              "                        [-4.5354e-03,  2.5646e-02, -6.7868e-04],\n",
              "                        [ 8.2128e-03, -1.5296e-04,  2.8699e-03]],\n",
              "              \n",
              "                       [[ 2.1570e-03, -2.6512e-02, -8.9853e-03],\n",
              "                        [-2.0681e-02, -3.2730e-02,  9.5643e-03],\n",
              "                        [ 1.5472e-02, -7.9185e-03,  6.0404e-03]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 2.6412e-02, -4.6983e-03,  2.8032e-02],\n",
              "                        [-7.1549e-03,  2.2963e-02,  1.9623e-03],\n",
              "                        [ 1.2402e-02,  5.0353e-03, -1.8290e-03]],\n",
              "              \n",
              "                       [[ 2.8888e-02,  3.1518e-02, -2.2046e-02],\n",
              "                        [ 3.1616e-02, -2.0166e-02,  1.8276e-02],\n",
              "                        [-2.3972e-02,  9.1548e-03, -1.5722e-02]],\n",
              "              \n",
              "                       [[ 5.1563e-03, -2.7183e-02, -2.6016e-02],\n",
              "                        [ 2.2589e-02, -3.2233e-02, -7.9484e-03],\n",
              "                        [-5.7511e-03,  5.0001e-05, -7.6056e-03]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 6.7069e-03, -2.3178e-02, -1.8451e-02],\n",
              "                        [-1.1248e-02,  2.3188e-02, -5.3556e-03],\n",
              "                        [-4.2746e-03,  2.4058e-03,  4.9462e-03]],\n",
              "              \n",
              "                       [[-7.4472e-03,  3.3321e-02, -1.1395e-02],\n",
              "                        [-2.8805e-02, -2.0542e-03, -5.3778e-03],\n",
              "                        [-9.7558e-03, -1.0684e-02,  9.3391e-03]],\n",
              "              \n",
              "                       [[-4.5545e-03,  2.5650e-02, -2.4926e-02],\n",
              "                        [ 4.6010e-03, -2.8498e-02,  6.6773e-03],\n",
              "                        [-2.8508e-02,  1.6843e-03, -7.8469e-03]]],\n",
              "              \n",
              "              \n",
              "                      [[[-3.1646e-02, -2.2464e-02,  1.8613e-02],\n",
              "                        [-2.4622e-02,  1.4247e-02, -2.1068e-02],\n",
              "                        [-1.7465e-02,  1.1727e-02,  1.2939e-02]],\n",
              "              \n",
              "                       [[-3.2679e-02, -3.5801e-03,  3.0872e-04],\n",
              "                        [-1.6293e-02, -3.7009e-03,  5.7671e-03],\n",
              "                        [-3.2939e-02,  1.1077e-02, -2.4005e-02]],\n",
              "              \n",
              "                       [[ 2.7723e-02, -3.2518e-02, -1.6305e-02],\n",
              "                        [-8.3653e-03,  2.0542e-02,  1.8606e-02],\n",
              "                        [-8.3989e-03, -3.4455e-03, -1.1842e-02]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 1.7182e-02,  5.4077e-03, -1.2441e-02],\n",
              "                        [-2.7739e-02, -1.0586e-02, -2.1844e-02],\n",
              "                        [-3.1984e-02,  3.3258e-02, -7.5562e-03]],\n",
              "              \n",
              "                       [[-7.0493e-03, -2.6907e-02, -1.0336e-02],\n",
              "                        [ 1.0026e-02, -3.3244e-02, -5.2780e-03],\n",
              "                        [-1.1118e-02, -1.3464e-02,  1.5342e-02]],\n",
              "              \n",
              "                       [[-2.0701e-02, -2.7536e-02,  4.5898e-03],\n",
              "                        [ 1.2445e-02,  1.4225e-02, -4.7157e-03],\n",
              "                        [ 3.1878e-02,  1.6916e-02, -2.8639e-02]]]])),\n",
              "             ('inception1.branch2.3.bias',\n",
              "              tensor([ 0.0138, -0.0288, -0.0155, -0.0132,  0.0091, -0.0095,  0.0303, -0.0163,\n",
              "                       0.0121,  0.0087, -0.0008, -0.0334,  0.0077,  0.0106, -0.0102,  0.0320,\n",
              "                      -0.0101, -0.0269, -0.0043,  0.0022,  0.0164, -0.0338, -0.0065, -0.0202,\n",
              "                      -0.0185, -0.0338,  0.0059, -0.0323, -0.0082, -0.0064, -0.0165,  0.0152,\n",
              "                       0.0128,  0.0006,  0.0274,  0.0135, -0.0082,  0.0257, -0.0262,  0.0264,\n",
              "                       0.0135,  0.0228,  0.0220, -0.0306, -0.0141, -0.0338, -0.0003, -0.0055,\n",
              "                       0.0179,  0.0249,  0.0005, -0.0201, -0.0250,  0.0312, -0.0051, -0.0326,\n",
              "                       0.0300,  0.0297,  0.0087, -0.0169, -0.0315, -0.0042,  0.0143, -0.0112,\n",
              "                      -0.0211,  0.0320,  0.0109, -0.0089, -0.0294, -0.0046,  0.0053,  0.0204,\n",
              "                       0.0002,  0.0255, -0.0324,  0.0340, -0.0287,  0.0176,  0.0270, -0.0037,\n",
              "                      -0.0131,  0.0086,  0.0217,  0.0270,  0.0211, -0.0238,  0.0118,  0.0120,\n",
              "                      -0.0044, -0.0243, -0.0016, -0.0010,  0.0229, -0.0332,  0.0309,  0.0171,\n",
              "                       0.0104, -0.0125, -0.0146,  0.0271, -0.0101, -0.0098, -0.0118,  0.0109,\n",
              "                      -0.0090, -0.0108,  0.0281,  0.0154, -0.0215, -0.0199,  0.0295,  0.0034,\n",
              "                      -0.0206, -0.0042,  0.0030,  0.0085, -0.0035,  0.0286,  0.0229, -0.0331,\n",
              "                      -0.0164,  0.0002, -0.0329,  0.0286,  0.0273, -0.0318, -0.0004,  0.0064])),\n",
              "             ('inception1.branch2.4.weight',\n",
              "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1.])),\n",
              "             ('inception1.branch2.4.bias',\n",
              "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0.])),\n",
              "             ('inception1.branch2.4.running_mean',\n",
              "              tensor([ 1.6796e-02,  9.1661e-04, -2.8961e-02, -2.3616e-02,  3.6804e-02,\n",
              "                       1.0197e-03, -2.2003e-02,  3.0300e-02, -6.6341e-03,  3.3491e-03,\n",
              "                       1.7677e-02,  3.2072e-03,  3.1075e-02, -2.9345e-02, -2.4213e-02,\n",
              "                      -2.4129e-02,  1.5394e-02,  3.8078e-03,  1.7292e-02, -4.1761e-03,\n",
              "                       1.7449e-02,  5.7438e-02,  6.0042e-03,  2.5767e-02, -8.1299e-03,\n",
              "                       1.1243e-02, -2.1037e-02, -2.2831e-02, -1.4660e-02,  2.0050e-02,\n",
              "                      -1.1256e-02,  1.3754e-02, -4.1805e-02, -1.9337e-02,  3.7208e-03,\n",
              "                      -1.0678e-03, -5.0891e-02, -2.2984e-02, -8.7412e-03,  7.9220e-03,\n",
              "                       7.7334e-04, -2.3384e-03, -9.1046e-03,  1.9327e-02, -9.6894e-04,\n",
              "                      -3.2991e-02, -1.7612e-02,  9.9270e-03, -1.7627e-02, -3.1337e-02,\n",
              "                       9.8869e-03,  4.2385e-03, -6.7868e-03,  1.1455e-02, -2.4443e-02,\n",
              "                      -7.8486e-03,  3.9155e-02, -1.0362e-02,  2.8579e-02,  4.8106e-03,\n",
              "                      -4.8537e-02, -2.9120e-02, -1.7100e-02,  1.0602e-03,  1.6504e-02,\n",
              "                       1.1316e-02, -1.3618e-02, -1.4768e-02, -9.3437e-04, -2.9269e-02,\n",
              "                      -2.6427e-02, -1.1611e-02,  2.4415e-02,  3.3821e-02, -3.2716e-03,\n",
              "                      -6.5870e-03,  6.1430e-03, -4.2047e-03,  4.4208e-02, -3.8521e-02,\n",
              "                       7.7472e-03,  2.2277e-02,  1.3354e-02, -1.1266e-02,  2.3300e-02,\n",
              "                      -7.7757e-03,  3.7418e-02,  1.1845e-03,  4.5789e-02,  1.1939e-02,\n",
              "                       2.3957e-02, -1.2325e-02,  1.2124e-02, -2.7767e-02,  2.6283e-03,\n",
              "                       3.9397e-02,  1.7392e-02, -2.5056e-02,  5.1877e-02,  5.1890e-03,\n",
              "                       2.2208e-02,  6.1989e-03,  1.3953e-02, -2.2722e-03,  8.2791e-03,\n",
              "                       1.1426e-02, -5.5494e-03,  3.7129e-02, -7.9600e-06, -2.9249e-02,\n",
              "                      -1.0319e-02,  4.1918e-02,  1.9644e-02,  2.2817e-02,  8.5703e-03,\n",
              "                      -7.2721e-03, -1.7094e-02, -1.1075e-02,  2.9805e-02,  1.7833e-03,\n",
              "                       4.1782e-03, -1.6839e-03,  3.5390e-02,  1.4468e-02, -7.9557e-05,\n",
              "                       1.6102e-02, -8.0869e-03, -4.6661e-02])),\n",
              "             ('inception1.branch2.4.running_var',\n",
              "              tensor([0.9097, 0.9198, 0.9065, 0.9094, 0.9166, 0.9076, 0.9075, 0.9096, 0.9111,\n",
              "                      0.9084, 0.9096, 0.9079, 0.9114, 0.9179, 0.9100, 0.9082, 0.9151, 0.9067,\n",
              "                      0.9163, 0.9130, 0.9094, 0.9082, 0.9206, 0.9112, 0.9126, 0.9123, 0.9112,\n",
              "                      0.9096, 0.9113, 0.9134, 0.9093, 0.9084, 0.9064, 0.9145, 0.9077, 0.9156,\n",
              "                      0.9076, 0.9130, 0.9071, 0.9084, 0.9076, 0.9109, 0.9056, 0.9084, 0.9126,\n",
              "                      0.9119, 0.9126, 0.9133, 0.9109, 0.9121, 0.9096, 0.9096, 0.9166, 0.9154,\n",
              "                      0.9144, 0.9083, 0.9120, 0.9120, 0.9067, 0.9076, 0.9107, 0.9111, 0.9112,\n",
              "                      0.9081, 0.9063, 0.9135, 0.9101, 0.9089, 0.9091, 0.9137, 0.9138, 0.9058,\n",
              "                      0.9112, 0.9080, 0.9083, 0.9098, 0.9096, 0.9064, 0.9101, 0.9093, 0.9101,\n",
              "                      0.9119, 0.9118, 0.9139, 0.9106, 0.9097, 0.9098, 0.9069, 0.9100, 0.9091,\n",
              "                      0.9115, 0.9085, 0.9143, 0.9159, 0.9154, 0.9098, 0.9071, 0.9117, 0.9090,\n",
              "                      0.9081, 0.9088, 0.9077, 0.9084, 0.9084, 0.9074, 0.9061, 0.9089, 0.9113,\n",
              "                      0.9115, 0.9149, 0.9072, 0.9085, 0.9125, 0.9080, 0.9081, 0.9162, 0.9089,\n",
              "                      0.9087, 0.9109, 0.9075, 0.9130, 0.9086, 0.9064, 0.9106, 0.9099, 0.9065,\n",
              "                      0.9088, 0.9143])),\n",
              "             ('inception1.branch2.4.num_batches_tracked', tensor(1)),\n",
              "             ('inception1.branch3.0.weight',\n",
              "              tensor([[[[ 0.0687]],\n",
              "              \n",
              "                       [[-0.0113]],\n",
              "              \n",
              "                       [[-0.1050]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-0.0259]],\n",
              "              \n",
              "                       [[ 0.0351]],\n",
              "              \n",
              "                       [[ 0.0833]]],\n",
              "              \n",
              "              \n",
              "                      [[[-0.0180]],\n",
              "              \n",
              "                       [[ 0.0816]],\n",
              "              \n",
              "                       [[ 0.0812]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-0.0241]],\n",
              "              \n",
              "                       [[-0.0642]],\n",
              "              \n",
              "                       [[-0.1197]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 0.0293]],\n",
              "              \n",
              "                       [[-0.0421]],\n",
              "              \n",
              "                       [[-0.0779]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-0.0605]],\n",
              "              \n",
              "                       [[-0.0523]],\n",
              "              \n",
              "                       [[-0.0856]]],\n",
              "              \n",
              "              \n",
              "                      ...,\n",
              "              \n",
              "              \n",
              "                      [[[-0.0004]],\n",
              "              \n",
              "                       [[-0.0896]],\n",
              "              \n",
              "                       [[-0.0348]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 0.0441]],\n",
              "              \n",
              "                       [[ 0.0656]],\n",
              "              \n",
              "                       [[-0.0562]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 0.0194]],\n",
              "              \n",
              "                       [[ 0.0472]],\n",
              "              \n",
              "                       [[-0.0658]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-0.1230]],\n",
              "              \n",
              "                       [[ 0.0073]],\n",
              "              \n",
              "                       [[-0.0281]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 0.0575]],\n",
              "              \n",
              "                       [[ 0.0287]],\n",
              "              \n",
              "                       [[ 0.1091]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 0.0202]],\n",
              "              \n",
              "                       [[ 0.0891]],\n",
              "              \n",
              "                       [[ 0.0295]]]])),\n",
              "             ('inception1.branch3.0.bias',\n",
              "              tensor([ 0.0818,  0.0679, -0.0817,  0.0404,  0.0909, -0.0717, -0.1166,  0.0326,\n",
              "                      -0.0918,  0.0212, -0.1130, -0.0507,  0.0006,  0.0010,  0.0465,  0.0742])),\n",
              "             ('inception1.branch3.1.weight',\n",
              "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.])),\n",
              "             ('inception1.branch3.1.bias',\n",
              "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.])),\n",
              "             ('inception1.branch3.1.running_mean',\n",
              "              tensor([ 0.0485, -0.0175, -0.0014,  0.0363,  0.0016, -0.0002, -0.0132,  0.0415,\n",
              "                      -0.0045, -0.0308, -0.0089, -0.0370, -0.0025, -0.0021,  0.0385,  0.0311])),\n",
              "             ('inception1.branch3.1.running_var',\n",
              "              tensor([0.9167, 0.9118, 0.9100, 0.9095, 0.9032, 0.9129, 0.9030, 0.9123, 0.9116,\n",
              "                      0.9243, 0.9051, 0.9092, 0.9176, 0.9092, 0.9302, 0.9146])),\n",
              "             ('inception1.branch3.1.num_batches_tracked', tensor(1)),\n",
              "             ('inception1.branch3.3.weight',\n",
              "              tensor([[[[-4.8716e-02,  4.7211e-02, -2.8559e-02, -3.1610e-02,  1.8700e-02],\n",
              "                        [-2.8487e-02, -1.6477e-02, -7.4172e-03,  1.3711e-03, -3.9810e-02],\n",
              "                        [ 4.3988e-02, -2.2103e-02,  4.5368e-02, -2.3961e-02, -3.1051e-02],\n",
              "                        [ 2.8085e-02, -4.3959e-02, -3.9929e-02,  7.1677e-03,  4.0802e-02],\n",
              "                        [ 6.5858e-03, -3.5008e-02,  2.6830e-02, -4.3116e-02, -3.0391e-02]],\n",
              "              \n",
              "                       [[-4.7885e-03,  4.8092e-02,  3.5374e-03, -2.9600e-02, -2.7044e-02],\n",
              "                        [ 1.9077e-02, -1.9100e-02,  2.7616e-02,  1.9368e-02,  4.4318e-02],\n",
              "                        [-3.9322e-02,  1.4926e-02, -1.2585e-02,  3.0586e-02,  2.5385e-02],\n",
              "                        [ 2.3837e-02,  4.3861e-02, -1.4198e-02,  2.2740e-02,  1.3690e-02],\n",
              "                        [ 3.1048e-02,  4.4620e-02,  9.2540e-03, -2.1641e-02,  1.7829e-02]],\n",
              "              \n",
              "                       [[ 2.5832e-02,  3.5334e-02, -2.5645e-02, -2.4794e-02, -3.6137e-02],\n",
              "                        [-2.3870e-02,  6.8266e-03, -3.4230e-02, -3.8466e-02,  4.8362e-02],\n",
              "                        [-2.7206e-02, -4.5974e-02, -3.6835e-02,  3.9896e-02,  1.8140e-02],\n",
              "                        [ 3.2216e-02, -3.4317e-02, -2.4844e-02, -4.1419e-02, -2.7717e-03],\n",
              "                        [ 2.5758e-03, -2.4133e-02,  1.2498e-02,  3.0815e-02, -1.2458e-02]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 2.3898e-02,  2.7523e-02,  3.7384e-02, -4.6041e-02,  1.4571e-02],\n",
              "                        [ 1.3043e-02, -3.6119e-03,  1.0341e-02, -4.4853e-02, -4.6647e-02],\n",
              "                        [-2.5363e-02,  9.2871e-03,  4.5675e-02,  3.6711e-02,  1.3093e-02],\n",
              "                        [-2.3159e-02,  4.3871e-02, -3.4857e-02, -3.5607e-02,  1.7126e-02],\n",
              "                        [-1.7845e-02,  1.0216e-02,  3.4833e-02,  1.7186e-02,  1.1548e-02]],\n",
              "              \n",
              "                       [[ 1.4019e-02,  2.7927e-02, -4.3371e-02,  4.1672e-02,  9.9155e-03],\n",
              "                        [ 2.7597e-02, -4.5123e-03,  1.9897e-02, -1.2113e-02, -3.2979e-02],\n",
              "                        [ 5.8372e-04, -2.9685e-02, -1.6299e-02,  4.2799e-02, -1.4138e-03],\n",
              "                        [ 7.0245e-03,  1.4134e-02, -2.2857e-02, -3.4579e-02, -4.1973e-02],\n",
              "                        [ 2.8693e-02,  2.7149e-02,  2.1325e-02, -4.0688e-02, -1.2893e-02]],\n",
              "              \n",
              "                       [[ 4.6014e-02, -4.7270e-02,  6.1476e-03,  4.1308e-02, -1.1184e-03],\n",
              "                        [ 1.9247e-02, -4.9935e-04, -3.1811e-03, -3.5093e-03,  1.9168e-02],\n",
              "                        [-3.5276e-03,  8.9814e-03,  3.3702e-02, -4.0718e-02, -2.4277e-03],\n",
              "                        [-4.8766e-03,  2.1665e-02,  2.5218e-02, -5.2220e-03, -1.9420e-02],\n",
              "                        [-1.2482e-02, -3.5224e-02, -3.5568e-02,  1.0891e-02,  9.8430e-03]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 4.8115e-03,  1.3346e-02, -3.2087e-02,  4.5279e-02,  4.3679e-02],\n",
              "                        [-9.4937e-03, -3.5645e-02,  2.3430e-02,  1.3712e-03, -5.0047e-03],\n",
              "                        [ 4.5355e-02,  7.7882e-03, -4.7090e-04, -4.4817e-02,  6.1344e-03],\n",
              "                        [ 3.8058e-02,  1.5765e-02,  4.2536e-02,  4.9673e-02,  4.9325e-02],\n",
              "                        [-2.3195e-02, -1.1193e-02,  2.7910e-02, -7.2844e-03,  3.3369e-02]],\n",
              "              \n",
              "                       [[ 2.5205e-03, -4.3343e-02,  4.9734e-02, -2.3696e-02,  1.0687e-02],\n",
              "                        [-4.8570e-02, -3.1808e-02, -2.0553e-02, -2.2720e-02,  6.3155e-03],\n",
              "                        [ 3.7525e-02,  4.3738e-02, -6.2730e-03,  2.8475e-02,  1.2405e-03],\n",
              "                        [-4.9694e-02, -1.0590e-03,  6.0071e-03, -2.2059e-02,  2.1970e-02],\n",
              "                        [ 1.2130e-03, -3.4927e-02,  2.8189e-02,  3.6380e-02, -2.7283e-02]],\n",
              "              \n",
              "                       [[ 8.5904e-03, -2.0923e-02, -1.0001e-02,  1.8437e-02, -2.8977e-02],\n",
              "                        [ 4.9105e-03, -3.0117e-02, -2.0379e-02,  1.0682e-02, -3.5111e-02],\n",
              "                        [ 4.1909e-02,  7.0904e-03, -3.5108e-02, -8.7181e-03,  1.9072e-03],\n",
              "                        [-4.4916e-02,  4.0443e-02, -1.3349e-02, -2.4472e-02,  2.8980e-02],\n",
              "                        [-2.7980e-02, -9.3926e-03, -4.0774e-02,  3.1850e-02,  2.2815e-02]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-4.5225e-02, -3.7092e-02, -4.1011e-02,  3.4367e-02,  4.5531e-02],\n",
              "                        [ 3.6159e-02, -2.5752e-02, -2.2528e-02, -1.9584e-03,  1.2741e-02],\n",
              "                        [-4.9612e-02,  2.6537e-02, -8.9534e-03, -1.3689e-02,  3.2503e-02],\n",
              "                        [ 4.4803e-03, -8.7495e-03, -8.8018e-03,  4.0592e-02,  1.2517e-02],\n",
              "                        [-1.6424e-02,  3.3050e-02, -2.0316e-02,  1.4817e-02, -2.8685e-02]],\n",
              "              \n",
              "                       [[ 2.0421e-02, -4.7951e-02, -4.9979e-02, -4.7502e-02, -1.9116e-02],\n",
              "                        [-3.4630e-02, -4.2290e-02, -4.5128e-02,  2.2098e-02,  3.0936e-02],\n",
              "                        [ 2.3809e-02, -2.5750e-02,  2.3819e-02,  3.1503e-02, -4.2192e-02],\n",
              "                        [ 5.1116e-03, -4.6251e-02,  1.6990e-02, -5.1781e-03,  5.9948e-03],\n",
              "                        [-3.7284e-02,  2.1240e-02,  2.4552e-02, -2.0175e-02,  1.7344e-02]],\n",
              "              \n",
              "                       [[ 3.7731e-02, -1.3059e-02,  2.8418e-02, -9.1268e-04, -1.8044e-03],\n",
              "                        [ 4.8925e-02, -1.6019e-02, -2.6612e-02, -3.6124e-02, -4.3710e-03],\n",
              "                        [-2.0449e-02, -3.2022e-02, -4.8317e-02, -3.9763e-02,  4.0480e-02],\n",
              "                        [-9.3523e-03,  2.8385e-02,  4.7497e-02, -2.0090e-02, -1.4914e-02],\n",
              "                        [ 1.6675e-02, -4.2926e-03,  3.7700e-02,  1.7566e-02, -3.2091e-02]]],\n",
              "              \n",
              "              \n",
              "                      [[[-4.4360e-02, -6.9023e-03,  2.1211e-02,  4.9140e-02,  1.5551e-02],\n",
              "                        [ 2.7170e-02, -4.7652e-02,  3.3174e-02,  1.5391e-03, -3.1120e-02],\n",
              "                        [ 2.0941e-02,  1.0283e-02, -3.7176e-02,  3.1747e-02,  2.5765e-02],\n",
              "                        [ 5.9080e-03, -1.9363e-02,  4.8709e-03,  3.8089e-02,  1.2371e-02],\n",
              "                        [-2.6412e-03,  3.8753e-02,  3.2711e-02,  1.2508e-02, -3.6885e-02]],\n",
              "              \n",
              "                       [[ 7.0360e-03,  2.0588e-04,  4.1608e-02,  2.7281e-02, -4.5291e-02],\n",
              "                        [-2.1033e-02, -1.9868e-02,  3.9289e-02,  3.8884e-03, -4.6308e-02],\n",
              "                        [ 2.6660e-02, -4.3767e-02, -2.7690e-02, -2.7157e-02,  2.9202e-02],\n",
              "                        [-8.8397e-03,  1.1073e-02,  4.9936e-02,  1.1374e-02,  1.5257e-02],\n",
              "                        [-2.9329e-02, -2.7536e-02,  1.6240e-02, -1.2882e-02,  7.3206e-03]],\n",
              "              \n",
              "                       [[-2.3561e-02,  1.7501e-02, -2.8044e-02, -3.0068e-02,  3.7260e-03],\n",
              "                        [-1.8644e-02, -9.5424e-03,  1.0234e-02,  3.1313e-02, -4.1277e-02],\n",
              "                        [ 3.2391e-04, -4.3555e-02,  2.1646e-02,  4.1406e-02, -3.2076e-02],\n",
              "                        [ 3.8422e-02,  3.0337e-02,  1.6853e-02,  2.5977e-02,  4.2404e-02],\n",
              "                        [ 1.7268e-02,  1.1757e-02, -2.6355e-02,  2.5077e-02, -4.3237e-02]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-1.2224e-02, -2.6416e-02, -4.4734e-02, -1.9264e-02, -1.8644e-02],\n",
              "                        [-4.6044e-03,  2.5876e-02, -3.5279e-02,  4.6516e-02, -2.4426e-03],\n",
              "                        [ 3.2870e-02, -4.5587e-02, -1.7112e-02, -2.0030e-02, -1.1224e-02],\n",
              "                        [ 2.4955e-02,  2.9182e-02, -3.7719e-02, -4.9114e-02, -3.9608e-02],\n",
              "                        [-3.1651e-02,  9.2585e-04,  4.5971e-02,  4.3753e-02, -5.4695e-03]],\n",
              "              \n",
              "                       [[ 1.7785e-02,  1.9280e-02, -4.3512e-02,  2.9000e-02, -4.4035e-02],\n",
              "                        [-7.7505e-03,  4.8226e-02, -1.3317e-02, -1.0498e-02, -4.4464e-02],\n",
              "                        [ 4.9164e-02, -1.1448e-02, -2.6260e-02,  3.5102e-02,  1.0319e-02],\n",
              "                        [-7.5920e-03, -4.9847e-02, -2.3881e-02,  2.4787e-02,  4.2390e-03],\n",
              "                        [-3.2579e-02,  1.1759e-02, -3.9283e-02,  1.3262e-02, -1.6906e-02]],\n",
              "              \n",
              "                       [[-4.7433e-02, -1.6466e-02, -3.0418e-02,  4.8666e-02,  3.7964e-02],\n",
              "                        [-4.1669e-02, -1.1119e-02,  4.6808e-03, -1.3639e-02,  4.7310e-02],\n",
              "                        [ 4.6367e-02, -2.1285e-02,  4.0555e-02, -1.9511e-02,  1.2616e-02],\n",
              "                        [ 8.6901e-03, -3.7996e-02,  3.5191e-02, -3.0146e-02, -4.5220e-02],\n",
              "                        [-4.1377e-02,  2.6027e-02, -1.5039e-02,  7.6206e-03, -2.2809e-02]]],\n",
              "              \n",
              "              \n",
              "                      ...,\n",
              "              \n",
              "              \n",
              "                      [[[-1.5026e-02,  2.7892e-02, -3.4877e-02,  3.8181e-02, -1.9896e-03],\n",
              "                        [ 9.5244e-03, -4.8360e-02, -3.5799e-02, -1.9760e-02, -2.7639e-02],\n",
              "                        [ 7.3067e-03,  3.2501e-02, -7.1031e-03,  6.4434e-03,  4.3202e-02],\n",
              "                        [-1.6965e-02,  4.5962e-02, -9.4583e-03,  7.7236e-04, -4.4860e-02],\n",
              "                        [ 4.2564e-02,  4.2473e-02,  1.9700e-02,  2.5161e-04, -4.2862e-02]],\n",
              "              \n",
              "                       [[ 4.8975e-02,  2.4462e-02,  2.9643e-03,  1.5969e-02, -9.9445e-03],\n",
              "                        [ 3.4386e-02,  2.4834e-02, -1.0944e-02, -6.6044e-03,  3.9854e-03],\n",
              "                        [-1.2466e-02,  3.5056e-02, -7.7985e-03, -1.6120e-03,  2.6520e-02],\n",
              "                        [ 4.6993e-02, -1.0560e-02, -1.7468e-02,  3.2758e-02,  1.2469e-02],\n",
              "                        [ 4.8928e-02,  4.4141e-02,  2.1802e-02, -1.8617e-02, -3.1481e-02]],\n",
              "              \n",
              "                       [[-1.6078e-02,  2.8369e-02, -2.8173e-02, -3.5281e-02,  3.0353e-02],\n",
              "                        [-6.1094e-04, -2.3555e-02,  3.1961e-03, -2.9720e-04, -3.6278e-02],\n",
              "                        [ 5.1047e-03, -3.3782e-02, -3.8516e-02,  3.6606e-02, -3.6230e-02],\n",
              "                        [-3.3376e-02, -3.2162e-02, -4.9824e-03,  1.0287e-02, -1.7865e-02],\n",
              "                        [ 1.8474e-02,  3.1055e-02, -1.4707e-02, -3.5036e-02, -3.0463e-02]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-1.7276e-04,  3.0632e-02,  2.4696e-02,  4.2966e-02,  2.7400e-02],\n",
              "                        [-1.6672e-02, -4.1963e-02,  3.7965e-02,  2.1567e-02,  3.2508e-02],\n",
              "                        [ 1.0699e-02, -3.0101e-02, -2.9557e-02,  4.2188e-02,  4.4980e-02],\n",
              "                        [-3.3063e-02, -2.9134e-02, -3.7734e-02, -1.8692e-02, -1.2796e-02],\n",
              "                        [ 3.5430e-02, -1.1497e-02, -4.3611e-02,  3.0603e-02, -5.7050e-03]],\n",
              "              \n",
              "                       [[ 6.5944e-03, -7.4652e-03,  3.3348e-02, -4.2076e-02, -5.7941e-03],\n",
              "                        [ 7.8957e-03,  3.9611e-03, -5.4061e-03,  1.6161e-02, -3.7702e-02],\n",
              "                        [-4.5325e-02, -2.2701e-02, -4.3636e-02, -2.3808e-02, -4.9864e-02],\n",
              "                        [-4.7458e-02,  2.9570e-02, -2.6781e-02, -3.2073e-02,  4.6316e-02],\n",
              "                        [ 3.1833e-02, -7.9241e-03,  1.2275e-02, -3.4711e-02, -1.3347e-03]],\n",
              "              \n",
              "                       [[-8.9038e-05, -3.0725e-02,  4.5486e-02,  3.5385e-02, -2.9259e-03],\n",
              "                        [ 4.8362e-02,  2.0679e-02, -1.1500e-02, -3.7623e-02, -2.1804e-02],\n",
              "                        [-4.0993e-02,  2.0161e-02, -3.7549e-02, -3.6946e-02,  1.1971e-02],\n",
              "                        [-3.5776e-02,  1.9580e-02, -4.6372e-03,  2.9090e-02, -3.0936e-02],\n",
              "                        [-5.9826e-03, -1.3286e-02,  6.0318e-03, -3.3032e-02, -7.2547e-03]]],\n",
              "              \n",
              "              \n",
              "                      [[[-4.5001e-02, -1.1153e-02,  4.9828e-02, -6.8340e-03,  2.1004e-02],\n",
              "                        [-2.5528e-02, -3.2701e-02, -3.7438e-04, -4.9211e-02, -1.7742e-02],\n",
              "                        [-3.2163e-03, -3.9239e-02,  3.1566e-02,  1.3638e-02, -3.9589e-02],\n",
              "                        [ 2.7574e-03, -4.1750e-03,  4.6911e-02,  3.5938e-02,  3.8208e-02],\n",
              "                        [ 6.6618e-03, -2.4714e-02,  1.8978e-02, -3.4697e-02, -3.8337e-03]],\n",
              "              \n",
              "                       [[ 4.4580e-02,  4.1224e-02, -2.0712e-02,  7.8839e-03, -1.7046e-02],\n",
              "                        [-4.6629e-02, -4.1609e-02, -6.0939e-03, -5.7051e-03, -3.4772e-02],\n",
              "                        [-1.0619e-02, -4.0671e-02, -4.1368e-02,  2.1113e-02, -2.0909e-02],\n",
              "                        [-2.7466e-02,  2.9847e-02, -4.1701e-02, -4.3144e-02,  1.4189e-02],\n",
              "                        [ 2.2542e-02,  1.5676e-02,  4.6857e-02, -2.5998e-02, -1.3273e-02]],\n",
              "              \n",
              "                       [[ 1.5572e-02, -8.3170e-03, -3.4780e-02,  4.4508e-02, -3.9654e-02],\n",
              "                        [-2.3051e-02, -3.6898e-02,  4.1039e-03, -2.9897e-02,  7.7644e-03],\n",
              "                        [ 2.1214e-02,  4.6343e-02,  1.1630e-02, -3.5309e-02,  3.6299e-02],\n",
              "                        [ 1.8145e-02, -4.3435e-02, -1.9752e-02,  3.5226e-03,  4.7136e-02],\n",
              "                        [ 7.7845e-03,  4.7023e-02, -3.0541e-02, -4.2459e-02, -2.1307e-02]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-5.4895e-03,  3.2287e-03, -2.6050e-02,  3.6606e-02, -2.9577e-02],\n",
              "                        [-4.2845e-02, -3.9994e-03,  4.1650e-02,  3.8079e-02,  5.5115e-03],\n",
              "                        [-4.8458e-02,  3.5850e-02,  3.3143e-02, -1.2840e-02, -1.0358e-02],\n",
              "                        [-7.7288e-03, -3.4081e-02, -4.5971e-02,  2.1497e-02,  7.2388e-03],\n",
              "                        [ 1.1696e-02,  4.2327e-02, -4.6157e-02, -2.7603e-02,  4.3709e-03]],\n",
              "              \n",
              "                       [[-1.1999e-02,  8.9912e-04,  2.3726e-02,  1.5161e-02, -2.2375e-02],\n",
              "                        [-4.1472e-02,  1.6234e-02,  4.9790e-02, -3.2950e-03, -2.8245e-02],\n",
              "                        [-1.6582e-02, -4.2874e-02, -1.9765e-02, -4.3423e-02, -2.8745e-03],\n",
              "                        [ 2.6898e-02, -2.3141e-02, -3.3384e-02,  2.2846e-02,  1.9974e-02],\n",
              "                        [ 2.8730e-02, -1.6644e-02, -2.2374e-03, -4.8187e-02, -1.4265e-02]],\n",
              "              \n",
              "                       [[ 1.4313e-02, -9.6693e-03, -1.0210e-02,  2.0611e-02,  4.6788e-02],\n",
              "                        [ 3.1194e-02, -3.0406e-02, -8.1902e-04,  4.6954e-02, -4.6741e-05],\n",
              "                        [-3.7964e-02,  3.6645e-02, -4.5318e-02, -2.2364e-02, -2.2682e-02],\n",
              "                        [-3.6836e-02,  1.9701e-02, -1.5079e-02, -1.8505e-02,  4.7437e-02],\n",
              "                        [ 2.2446e-02,  1.5583e-02, -3.0463e-02,  4.1055e-02, -1.3204e-02]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 3.4761e-02,  3.2049e-03,  8.4421e-03,  4.3821e-02, -3.5662e-02],\n",
              "                        [ 3.0804e-02,  3.9075e-02, -9.5774e-03,  4.5870e-02,  1.8772e-02],\n",
              "                        [ 2.8445e-02, -1.6453e-02,  2.6738e-02, -3.7208e-03,  6.2289e-03],\n",
              "                        [ 1.4445e-02,  1.6564e-02,  4.6062e-02,  2.5880e-02, -1.8566e-02],\n",
              "                        [-4.3220e-03, -1.0434e-02,  1.0323e-02,  1.7868e-02,  1.9457e-02]],\n",
              "              \n",
              "                       [[ 3.4158e-02,  1.0644e-02, -4.3777e-02, -4.0287e-02, -4.8921e-02],\n",
              "                        [ 2.5829e-02,  3.9852e-03,  1.1101e-02, -2.4750e-02, -2.9041e-02],\n",
              "                        [-4.6532e-02, -4.3571e-02,  4.6012e-02, -8.1915e-03,  4.0513e-03],\n",
              "                        [ 3.4496e-02,  4.0318e-02, -2.6871e-02, -1.0750e-02,  3.2653e-02],\n",
              "                        [ 4.7996e-02, -8.3799e-04, -1.9933e-02,  2.5480e-02,  5.7731e-04]],\n",
              "              \n",
              "                       [[-4.5589e-02,  3.3518e-03, -3.1141e-02, -1.1881e-02, -1.3462e-02],\n",
              "                        [-2.0410e-02,  4.1685e-03,  2.2739e-02, -1.6724e-02,  3.8042e-02],\n",
              "                        [ 3.0863e-02,  2.8760e-04, -4.8932e-02,  7.9780e-03, -4.5070e-02],\n",
              "                        [ 2.5340e-03, -2.1196e-02,  4.2680e-02, -2.4194e-02, -3.2417e-02],\n",
              "                        [-5.6077e-03, -4.4582e-02,  3.7452e-02, -2.9528e-02, -4.7138e-02]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-6.7242e-03, -4.5461e-02, -2.0766e-02,  1.2305e-03, -2.1029e-02],\n",
              "                        [-3.4592e-02,  5.7826e-03, -7.1597e-03, -1.4741e-02, -6.3206e-03],\n",
              "                        [-3.6998e-02, -2.9886e-02,  3.0191e-02,  1.4089e-02,  2.1989e-02],\n",
              "                        [ 2.8085e-02,  3.4714e-02,  5.6679e-03, -4.4396e-02,  3.2024e-02],\n",
              "                        [ 1.5820e-02, -1.3876e-03, -1.9907e-02, -3.8474e-02,  3.3818e-02]],\n",
              "              \n",
              "                       [[ 3.6938e-02, -3.4750e-02, -1.8906e-03,  4.4044e-02,  4.1480e-02],\n",
              "                        [ 1.9268e-02,  1.5195e-02,  1.6548e-02,  1.9621e-02,  4.3662e-02],\n",
              "                        [ 4.3832e-02,  2.8189e-02,  4.4081e-02, -1.6185e-02,  4.3216e-02],\n",
              "                        [ 2.4951e-02, -1.1539e-02, -3.1705e-02, -5.7132e-03, -4.8934e-02],\n",
              "                        [ 1.6797e-02, -4.2855e-02, -1.7963e-02,  1.9422e-02, -2.4646e-02]],\n",
              "              \n",
              "                       [[ 4.9440e-02, -3.5421e-02,  7.7913e-03, -1.6425e-03, -1.0331e-02],\n",
              "                        [-4.4928e-02, -1.4445e-02,  1.0856e-02,  1.5018e-02,  5.4492e-03],\n",
              "                        [-3.0625e-02, -3.1661e-02,  3.0823e-02, -3.8248e-02,  2.2539e-02],\n",
              "                        [-1.2152e-02, -3.1460e-02,  1.9787e-02,  4.4007e-02,  2.8198e-02],\n",
              "                        [ 1.3328e-02, -3.1242e-02,  3.5645e-02, -3.6016e-02, -4.7690e-02]]]])),\n",
              "             ('inception1.branch3.3.bias',\n",
              "              tensor([ 0.0392, -0.0421, -0.0440,  0.0406, -0.0018, -0.0373, -0.0456,  0.0152,\n",
              "                       0.0265,  0.0405,  0.0085, -0.0385,  0.0227, -0.0419,  0.0294, -0.0284,\n",
              "                       0.0062,  0.0158,  0.0006,  0.0036, -0.0463,  0.0047, -0.0053, -0.0357,\n",
              "                      -0.0443,  0.0285, -0.0118,  0.0482,  0.0287,  0.0279,  0.0252,  0.0007])),\n",
              "             ('inception1.branch3.4.weight',\n",
              "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.])),\n",
              "             ('inception1.branch3.4.bias',\n",
              "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0.])),\n",
              "             ('inception1.branch3.4.running_mean',\n",
              "              tensor([-2.7411e-02,  6.1819e-03, -1.1995e-02, -3.4550e-02,  2.3690e-02,\n",
              "                      -2.8042e-02, -4.2780e-02,  8.5292e-03,  2.5191e-02,  5.3727e-02,\n",
              "                      -1.3697e-02, -3.1257e-02, -4.9334e-05, -4.6713e-03,  2.5329e-02,\n",
              "                      -1.0010e-02,  1.2568e-03, -3.6543e-03, -9.3261e-03, -1.5735e-02,\n",
              "                      -1.2669e-02, -2.1537e-02,  2.8066e-02, -2.0020e-02, -1.5117e-02,\n",
              "                      -1.2143e-02, -1.4971e-03, -3.2141e-02,  1.5026e-03,  6.4002e-03,\n",
              "                      -2.2945e-02,  1.9386e-02])),\n",
              "             ('inception1.branch3.4.running_var',\n",
              "              tensor([0.9083, 0.9117, 0.9061, 0.9125, 0.9119, 0.9113, 0.9182, 0.9085, 0.9115,\n",
              "                      0.9120, 0.9087, 0.9096, 0.9087, 0.9110, 0.9091, 0.9081, 0.9096, 0.9085,\n",
              "                      0.9103, 0.9132, 0.9144, 0.9124, 0.9081, 0.9105, 0.9087, 0.9076, 0.9105,\n",
              "                      0.9123, 0.9088, 0.9128, 0.9105, 0.9154])),\n",
              "             ('inception1.branch3.4.num_batches_tracked', tensor(1)),\n",
              "             ('inception1.branch4.1.weight',\n",
              "              tensor([[[[-0.0923]],\n",
              "              \n",
              "                       [[ 0.0609]],\n",
              "              \n",
              "                       [[-0.0417]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-0.0657]],\n",
              "              \n",
              "                       [[-0.0341]],\n",
              "              \n",
              "                       [[-0.0866]]],\n",
              "              \n",
              "              \n",
              "                      [[[-0.0748]],\n",
              "              \n",
              "                       [[ 0.0579]],\n",
              "              \n",
              "                       [[-0.0597]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-0.0101]],\n",
              "              \n",
              "                       [[-0.0862]],\n",
              "              \n",
              "                       [[-0.0366]]],\n",
              "              \n",
              "              \n",
              "                      [[[-0.0729]],\n",
              "              \n",
              "                       [[-0.1012]],\n",
              "              \n",
              "                       [[-0.1141]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-0.0220]],\n",
              "              \n",
              "                       [[-0.0678]],\n",
              "              \n",
              "                       [[ 0.0574]]],\n",
              "              \n",
              "              \n",
              "                      ...,\n",
              "              \n",
              "              \n",
              "                      [[[ 0.0309]],\n",
              "              \n",
              "                       [[-0.1043]],\n",
              "              \n",
              "                       [[-0.0457]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 0.0991]],\n",
              "              \n",
              "                       [[-0.0764]],\n",
              "              \n",
              "                       [[ 0.0474]]],\n",
              "              \n",
              "              \n",
              "                      [[[-0.1002]],\n",
              "              \n",
              "                       [[ 0.0025]],\n",
              "              \n",
              "                       [[ 0.0891]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-0.0257]],\n",
              "              \n",
              "                       [[-0.0323]],\n",
              "              \n",
              "                       [[ 0.1029]]],\n",
              "              \n",
              "              \n",
              "                      [[[-0.0605]],\n",
              "              \n",
              "                       [[-0.0442]],\n",
              "              \n",
              "                       [[-0.0658]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 0.0681]],\n",
              "              \n",
              "                       [[-0.0660]],\n",
              "              \n",
              "                       [[-0.0885]]]])),\n",
              "             ('inception1.branch4.1.bias',\n",
              "              tensor([-0.0530,  0.0475,  0.0373, -0.1187,  0.1068,  0.0353,  0.0717,  0.0608,\n",
              "                      -0.0083,  0.0451, -0.0118, -0.1111, -0.1218,  0.0821,  0.0336, -0.1105,\n",
              "                       0.0150, -0.0506, -0.0218,  0.0311, -0.0900, -0.0032, -0.0123,  0.0427,\n",
              "                       0.0922, -0.0891, -0.0366,  0.0063,  0.0250, -0.0802, -0.0319, -0.0018])),\n",
              "             ('inception1.branch4.2.weight',\n",
              "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.])),\n",
              "             ('inception1.branch4.2.bias',\n",
              "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0.])),\n",
              "             ('inception1.branch4.2.running_mean',\n",
              "              tensor([-0.0183, -0.0007, -0.0173, -0.0073,  0.0512,  0.0734, -0.0259,  0.1401,\n",
              "                      -0.0572,  0.0343, -0.0147, -0.0092,  0.1005,  0.1075, -0.0867, -0.0767,\n",
              "                       0.0382,  0.0149,  0.0295,  0.0574,  0.0870, -0.0422,  0.0025, -0.0508,\n",
              "                      -0.0545,  0.0405,  0.0597, -0.0726, -0.0684, -0.0220,  0.0098,  0.0695])),\n",
              "             ('inception1.branch4.2.running_var',\n",
              "              tensor([0.9250, 0.9070, 0.9088, 0.9116, 0.9126, 0.9121, 0.9156, 0.9416, 0.9473,\n",
              "                      0.9109, 0.9150, 0.9203, 0.9279, 0.9275, 0.9226, 0.9159, 0.9109, 0.9179,\n",
              "                      0.9138, 0.9520, 0.9296, 0.9178, 0.9111, 0.9202, 0.9230, 0.9113, 0.9149,\n",
              "                      0.9243, 0.9191, 0.9076, 0.9303, 0.9149])),\n",
              "             ('inception1.branch4.2.num_batches_tracked', tensor(1)),\n",
              "             ('inception2.branch1.weight',\n",
              "              tensor([[[[ 0.0246]],\n",
              "              \n",
              "                       [[-0.0129]],\n",
              "              \n",
              "                       [[-0.0612]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-0.0379]],\n",
              "              \n",
              "                       [[ 0.0429]],\n",
              "              \n",
              "                       [[-0.0247]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 0.0482]],\n",
              "              \n",
              "                       [[ 0.0199]],\n",
              "              \n",
              "                       [[-0.0108]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 0.0165]],\n",
              "              \n",
              "                       [[ 0.0242]],\n",
              "              \n",
              "                       [[ 0.0571]]],\n",
              "              \n",
              "              \n",
              "                      [[[-0.0232]],\n",
              "              \n",
              "                       [[-0.0544]],\n",
              "              \n",
              "                       [[-0.0083]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-0.0616]],\n",
              "              \n",
              "                       [[-0.0264]],\n",
              "              \n",
              "                       [[-0.0340]]],\n",
              "              \n",
              "              \n",
              "                      ...,\n",
              "              \n",
              "              \n",
              "                      [[[-0.0306]],\n",
              "              \n",
              "                       [[ 0.0297]],\n",
              "              \n",
              "                       [[ 0.0512]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-0.0020]],\n",
              "              \n",
              "                       [[ 0.0198]],\n",
              "              \n",
              "                       [[ 0.0280]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 0.0365]],\n",
              "              \n",
              "                       [[ 0.0332]],\n",
              "              \n",
              "                       [[-0.0296]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-0.0438]],\n",
              "              \n",
              "                       [[ 0.0584]],\n",
              "              \n",
              "                       [[-0.0497]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 0.0542]],\n",
              "              \n",
              "                       [[-0.0248]],\n",
              "              \n",
              "                       [[-0.0075]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 0.0263]],\n",
              "              \n",
              "                       [[-0.0154]],\n",
              "              \n",
              "                       [[ 0.0204]]]])),\n",
              "             ('inception2.branch1.bias',\n",
              "              tensor([ 0.0126, -0.0211,  0.0605,  0.0407, -0.0320, -0.0301,  0.0475, -0.0320,\n",
              "                       0.0055,  0.0340,  0.0015, -0.0111,  0.0012, -0.0617, -0.0080, -0.0035,\n",
              "                       0.0381,  0.0131, -0.0194,  0.0418, -0.0165, -0.0258, -0.0460, -0.0521,\n",
              "                      -0.0304,  0.0414, -0.0387,  0.0335, -0.0573, -0.0605, -0.0054,  0.0443,\n",
              "                      -0.0474, -0.0322, -0.0566,  0.0109,  0.0437, -0.0611, -0.0514, -0.0199,\n",
              "                      -0.0391, -0.0297, -0.0224,  0.0495,  0.0062,  0.0276, -0.0257,  0.0448,\n",
              "                      -0.0268,  0.0167,  0.0596,  0.0308,  0.0615, -0.0360, -0.0146,  0.0518,\n",
              "                      -0.0118,  0.0531, -0.0581,  0.0251, -0.0434, -0.0400, -0.0608, -0.0570,\n",
              "                       0.0532,  0.0455, -0.0524,  0.0235, -0.0406, -0.0456,  0.0576,  0.0462,\n",
              "                      -0.0059,  0.0191, -0.0065,  0.0342, -0.0107,  0.0547, -0.0068,  0.0004,\n",
              "                       0.0120, -0.0315, -0.0195,  0.0603, -0.0215, -0.0222,  0.0488, -0.0372,\n",
              "                       0.0489, -0.0242, -0.0510,  0.0068,  0.0027,  0.0078,  0.0382,  0.0079,\n",
              "                       0.0597, -0.0142, -0.0393,  0.0536,  0.0336, -0.0145, -0.0148,  0.0384,\n",
              "                      -0.0595, -0.0256,  0.0363, -0.0286, -0.0470, -0.0288,  0.0509,  0.0535,\n",
              "                       0.0285, -0.0107, -0.0415, -0.0395, -0.0424, -0.0217,  0.0056, -0.0386,\n",
              "                       0.0278, -0.0498,  0.0463, -0.0371, -0.0238, -0.0157, -0.0191,  0.0388])),\n",
              "             ('inception2.branch2.0.weight',\n",
              "              tensor([[[[-0.0383]],\n",
              "              \n",
              "                       [[-0.0156]],\n",
              "              \n",
              "                       [[ 0.0363]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-0.0578]],\n",
              "              \n",
              "                       [[ 0.0321]],\n",
              "              \n",
              "                       [[ 0.0118]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 0.0589]],\n",
              "              \n",
              "                       [[-0.0571]],\n",
              "              \n",
              "                       [[ 0.0101]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-0.0178]],\n",
              "              \n",
              "                       [[-0.0151]],\n",
              "              \n",
              "                       [[ 0.0075]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 0.0383]],\n",
              "              \n",
              "                       [[ 0.0208]],\n",
              "              \n",
              "                       [[ 0.0612]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 0.0326]],\n",
              "              \n",
              "                       [[-0.0197]],\n",
              "              \n",
              "                       [[ 0.0488]]],\n",
              "              \n",
              "              \n",
              "                      ...,\n",
              "              \n",
              "              \n",
              "                      [[[-0.0564]],\n",
              "              \n",
              "                       [[ 0.0544]],\n",
              "              \n",
              "                       [[-0.0286]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 0.0015]],\n",
              "              \n",
              "                       [[ 0.0616]],\n",
              "              \n",
              "                       [[-0.0580]]],\n",
              "              \n",
              "              \n",
              "                      [[[-0.0569]],\n",
              "              \n",
              "                       [[-0.0461]],\n",
              "              \n",
              "                       [[ 0.0171]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 0.0563]],\n",
              "              \n",
              "                       [[ 0.0519]],\n",
              "              \n",
              "                       [[ 0.0500]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 0.0360]],\n",
              "              \n",
              "                       [[ 0.0522]],\n",
              "              \n",
              "                       [[-0.0184]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 0.0272]],\n",
              "              \n",
              "                       [[-0.0041]],\n",
              "              \n",
              "                       [[-0.0335]]]])),\n",
              "             ('inception2.branch2.0.bias',\n",
              "              tensor([-0.0053, -0.0258, -0.0146, -0.0103, -0.0421,  0.0155,  0.0548, -0.0133,\n",
              "                      -0.0465,  0.0306,  0.0495,  0.0292,  0.0561,  0.0261,  0.0612, -0.0621,\n",
              "                      -0.0609,  0.0170, -0.0594, -0.0326,  0.0619,  0.0032,  0.0470,  0.0370,\n",
              "                      -0.0051,  0.0555,  0.0445,  0.0425,  0.0216,  0.0586, -0.0431, -0.0392,\n",
              "                       0.0487,  0.0387, -0.0348,  0.0208, -0.0487, -0.0381,  0.0550,  0.0141,\n",
              "                      -0.0283,  0.0478,  0.0306,  0.0230,  0.0614, -0.0144,  0.0037, -0.0036,\n",
              "                       0.0163, -0.0192, -0.0306, -0.0057,  0.0566, -0.0600, -0.0533, -0.0580,\n",
              "                      -0.0221, -0.0312,  0.0508,  0.0575,  0.0565,  0.0621, -0.0414,  0.0350,\n",
              "                      -0.0623,  0.0427, -0.0600, -0.0539,  0.0335,  0.0140, -0.0295,  0.0097,\n",
              "                      -0.0500,  0.0120,  0.0223,  0.0312,  0.0132, -0.0567,  0.0036, -0.0478,\n",
              "                      -0.0567,  0.0391,  0.0531, -0.0212,  0.0153, -0.0289,  0.0474,  0.0103,\n",
              "                       0.0034,  0.0162,  0.0087,  0.0574, -0.0283,  0.0573,  0.0017, -0.0550,\n",
              "                      -0.0355, -0.0375, -0.0447, -0.0314,  0.0196,  0.0067,  0.0350, -0.0339,\n",
              "                       0.0384, -0.0090,  0.0459, -0.0165,  0.0047,  0.0076,  0.0518, -0.0425,\n",
              "                      -0.0153,  0.0105, -0.0573, -0.0365, -0.0410,  0.0543, -0.0464, -0.0610,\n",
              "                       0.0446,  0.0076,  0.0270,  0.0506,  0.0606,  0.0222,  0.0039, -0.0252])),\n",
              "             ('inception2.branch2.1.weight',\n",
              "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1.])),\n",
              "             ('inception2.branch2.1.bias',\n",
              "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0.])),\n",
              "             ('inception2.branch2.1.running_mean',\n",
              "              tensor([ 0.0093, -0.0144,  0.0104, -0.0352, -0.0133,  0.0228,  0.0280, -0.0463,\n",
              "                       0.0280, -0.0346, -0.0200,  0.0417,  0.0575, -0.0209,  0.0413, -0.0075,\n",
              "                       0.0070, -0.0660, -0.0081,  0.0205, -0.0355,  0.0371,  0.0367,  0.0470,\n",
              "                       0.0304,  0.0025, -0.0003, -0.0236, -0.0421, -0.0308, -0.0174,  0.0331,\n",
              "                      -0.0102,  0.0241, -0.0449, -0.0080,  0.0053, -0.0850,  0.0164,  0.0236,\n",
              "                       0.0305,  0.0648,  0.0292, -0.0099, -0.0501,  0.0173, -0.0536, -0.0037,\n",
              "                      -0.0007, -0.0257, -0.0502,  0.0118,  0.0456,  0.0166, -0.0371,  0.0507,\n",
              "                       0.0299, -0.0765,  0.0397,  0.0083,  0.0115, -0.0227, -0.0429, -0.0093,\n",
              "                       0.0056, -0.0313, -0.0311, -0.0484,  0.0355, -0.0905, -0.0102,  0.0650,\n",
              "                      -0.0313, -0.0547, -0.0908, -0.0059,  0.0153, -0.0028,  0.0183, -0.0019,\n",
              "                       0.0414,  0.0302, -0.0138,  0.0362,  0.0326,  0.0859,  0.0056, -0.0018,\n",
              "                      -0.0224,  0.0768,  0.0026,  0.0332, -0.0427,  0.0480, -0.0842,  0.0004,\n",
              "                      -0.0236, -0.0556,  0.0132,  0.0007,  0.0284,  0.0117, -0.0560, -0.0259,\n",
              "                      -0.0110,  0.0038,  0.0451,  0.0226, -0.0871, -0.0358, -0.0107,  0.0092,\n",
              "                      -0.0371,  0.0528, -0.0374,  0.0983,  0.0591, -0.0424, -0.0983,  0.0214,\n",
              "                       0.0228,  0.0386, -0.0556,  0.0339, -0.0016,  0.0660, -0.0614, -0.0063])),\n",
              "             ('inception2.branch2.1.running_var',\n",
              "              tensor([0.9171, 0.9085, 0.9180, 0.9076, 0.9209, 0.9085, 0.9169, 0.9123, 0.9141,\n",
              "                      0.9146, 0.9125, 0.9153, 0.9167, 0.9137, 0.9120, 0.9161, 0.9252, 0.9146,\n",
              "                      0.9160, 0.9184, 0.9167, 0.9134, 0.9132, 0.9129, 0.9174, 0.9102, 0.9094,\n",
              "                      0.9122, 0.9127, 0.9217, 0.9126, 0.9152, 0.9111, 0.9122, 0.9112, 0.9102,\n",
              "                      0.9137, 0.9138, 0.9195, 0.9125, 0.9142, 0.9201, 0.9090, 0.9145, 0.9180,\n",
              "                      0.9251, 0.9194, 0.9122, 0.9099, 0.9141, 0.9142, 0.9111, 0.9166, 0.9146,\n",
              "                      0.9110, 0.9103, 0.9149, 0.9187, 0.9158, 0.9157, 0.9113, 0.9190, 0.9120,\n",
              "                      0.9106, 0.9172, 0.9102, 0.9217, 0.9187, 0.9190, 0.9327, 0.9206, 0.9204,\n",
              "                      0.9207, 0.9152, 0.9142, 0.9119, 0.9111, 0.9191, 0.9092, 0.9149, 0.9105,\n",
              "                      0.9149, 0.9124, 0.9172, 0.9126, 0.9117, 0.9091, 0.9111, 0.9091, 0.9128,\n",
              "                      0.9143, 0.9084, 0.9151, 0.9233, 0.9261, 0.9106, 0.9086, 0.9110, 0.9125,\n",
              "                      0.9109, 0.9095, 0.9115, 0.9193, 0.9075, 0.9113, 0.9102, 0.9109, 0.9205,\n",
              "                      0.9240, 0.9184, 0.9125, 0.9094, 0.9123, 0.9184, 0.9093, 0.9196, 0.9234,\n",
              "                      0.9313, 0.9243, 0.9153, 0.9101, 0.9133, 0.9135, 0.9140, 0.9083, 0.9124,\n",
              "                      0.9141, 0.9088])),\n",
              "             ('inception2.branch2.1.num_batches_tracked', tensor(1)),\n",
              "             ('inception2.branch2.3.weight',\n",
              "              tensor([[[[-6.5784e-03,  1.1579e-02, -1.5272e-02],\n",
              "                        [-1.8232e-02, -1.7162e-02,  1.0816e-02],\n",
              "                        [-1.6303e-02, -1.1916e-02, -2.4973e-02]],\n",
              "              \n",
              "                       [[ 5.4478e-03, -1.5608e-02, -1.1184e-02],\n",
              "                        [-1.6692e-02,  1.7223e-02,  2.7141e-02],\n",
              "                        [ 1.4351e-02, -1.9610e-02,  8.0080e-03]],\n",
              "              \n",
              "                       [[-2.3142e-02,  2.3309e-02,  2.3075e-02],\n",
              "                        [-1.6493e-02, -2.0444e-02,  7.3013e-03],\n",
              "                        [ 1.7367e-02,  1.5577e-03,  2.7376e-02]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 6.5024e-03,  1.9136e-02,  1.3451e-02],\n",
              "                        [ 1.4292e-02,  2.1006e-02,  1.3978e-02],\n",
              "                        [-6.6895e-03, -2.6542e-02,  1.0032e-02]],\n",
              "              \n",
              "                       [[ 1.1417e-02, -1.7906e-02, -2.2912e-02],\n",
              "                        [ 1.1458e-02, -1.3275e-02, -8.5974e-03],\n",
              "                        [ 6.9944e-04,  2.6783e-02, -2.9213e-02]],\n",
              "              \n",
              "                       [[ 1.2036e-02,  1.8532e-02, -1.5283e-02],\n",
              "                        [ 1.5947e-02,  2.8242e-02, -1.4326e-02],\n",
              "                        [-9.6264e-03,  4.2521e-03,  2.9108e-02]]],\n",
              "              \n",
              "              \n",
              "                      [[[-1.2631e-02, -2.5633e-02,  2.9101e-02],\n",
              "                        [-2.6568e-03,  4.3930e-04,  2.6195e-02],\n",
              "                        [ 2.3023e-02,  5.5250e-03,  6.9846e-03]],\n",
              "              \n",
              "                       [[-1.0563e-02, -2.1860e-02,  1.5565e-02],\n",
              "                        [-2.6283e-02,  1.8489e-02, -2.3621e-02],\n",
              "                        [-1.2299e-02,  1.2298e-02, -5.3243e-03]],\n",
              "              \n",
              "                       [[-2.6006e-02, -9.6328e-03,  1.7694e-02],\n",
              "                        [-5.1710e-03,  1.2601e-02,  1.6052e-02],\n",
              "                        [ 2.9213e-02,  2.4735e-02, -2.7208e-02]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-1.4934e-02,  1.4646e-02, -9.0744e-04],\n",
              "                        [-2.5379e-02, -2.0926e-03, -5.5479e-03],\n",
              "                        [ 2.5698e-02,  2.3967e-02, -1.3779e-02]],\n",
              "              \n",
              "                       [[-1.5444e-02, -3.9487e-03, -1.0302e-02],\n",
              "                        [-1.8061e-02,  4.3137e-03, -1.2532e-02],\n",
              "                        [-9.9411e-03, -4.2377e-04,  2.2049e-02]],\n",
              "              \n",
              "                       [[-5.8613e-04, -1.6118e-02, -9.3506e-03],\n",
              "                        [ 9.2625e-03, -2.7175e-02,  5.5184e-03],\n",
              "                        [ 1.1455e-02, -1.6548e-02, -2.1512e-02]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 1.1761e-02,  7.1682e-03, -1.3173e-02],\n",
              "                        [-1.1766e-02,  1.4220e-03, -1.1416e-02],\n",
              "                        [-1.1739e-02,  8.9720e-03,  2.2915e-02]],\n",
              "              \n",
              "                       [[-1.8398e-02, -2.4431e-02, -2.7966e-02],\n",
              "                        [-3.7508e-03, -1.5621e-02,  1.3931e-02],\n",
              "                        [ 1.0013e-02,  9.8936e-03,  1.5873e-02]],\n",
              "              \n",
              "                       [[-8.1916e-03,  2.4061e-02, -2.3420e-02],\n",
              "                        [-1.6665e-02,  8.0099e-03,  1.7878e-02],\n",
              "                        [ 4.0045e-03,  1.9289e-02, -7.5877e-03]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-1.1792e-02,  2.2114e-02, -1.7941e-02],\n",
              "                        [ 1.2747e-02, -2.1251e-03,  1.2997e-02],\n",
              "                        [ 1.7565e-02,  2.2490e-02, -2.2930e-02]],\n",
              "              \n",
              "                       [[ 4.0099e-03, -2.8937e-02,  6.3438e-05],\n",
              "                        [-2.4789e-02,  2.6957e-02,  1.9386e-02],\n",
              "                        [-7.5195e-03, -3.8467e-03,  1.7982e-02]],\n",
              "              \n",
              "                       [[-3.1161e-04,  2.8653e-02,  1.7703e-02],\n",
              "                        [ 2.0674e-03, -2.0539e-02,  2.5508e-02],\n",
              "                        [ 1.1286e-02,  1.6403e-02,  9.8212e-03]]],\n",
              "              \n",
              "              \n",
              "                      ...,\n",
              "              \n",
              "              \n",
              "                      [[[ 7.0310e-03,  7.3548e-03,  2.8530e-02],\n",
              "                        [-1.4305e-02, -2.3174e-02,  2.0118e-03],\n",
              "                        [-1.0622e-02,  1.3429e-02,  7.7775e-03]],\n",
              "              \n",
              "                       [[-1.8403e-02, -2.0862e-02, -8.9456e-03],\n",
              "                        [ 1.7830e-02,  1.9417e-02,  9.2562e-03],\n",
              "                        [-1.6896e-03,  1.9942e-02,  8.0127e-03]],\n",
              "              \n",
              "                       [[ 2.7565e-02,  1.6084e-02,  1.3557e-02],\n",
              "                        [ 7.1805e-03,  2.2158e-02, -2.4220e-02],\n",
              "                        [-2.2298e-02, -1.8504e-02, -2.8074e-02]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 3.6442e-03, -1.0126e-03, -1.2521e-02],\n",
              "                        [ 6.0635e-03,  9.3610e-03, -2.2087e-02],\n",
              "                        [-7.0516e-03, -2.1127e-03, -1.0166e-02]],\n",
              "              \n",
              "                       [[-1.9425e-02, -2.3118e-02, -5.7745e-03],\n",
              "                        [ 1.9215e-02,  9.6311e-03, -2.1788e-02],\n",
              "                        [-7.3955e-03, -2.3260e-02,  2.6783e-02]],\n",
              "              \n",
              "                       [[-1.9125e-02,  4.0783e-04,  2.6842e-02],\n",
              "                        [-3.8239e-03, -1.1304e-02,  2.5298e-02],\n",
              "                        [ 1.6813e-02,  2.6242e-02,  8.3730e-04]]],\n",
              "              \n",
              "              \n",
              "                      [[[-1.0988e-02, -2.0105e-02, -2.4809e-02],\n",
              "                        [-2.3990e-02, -6.9788e-03,  3.1195e-03],\n",
              "                        [ 3.3578e-03, -7.7453e-03, -1.4817e-02]],\n",
              "              \n",
              "                       [[ 2.4788e-03, -1.2573e-02, -1.3026e-02],\n",
              "                        [-2.3520e-02, -1.6939e-02, -1.5449e-03],\n",
              "                        [ 3.2707e-03, -2.0925e-03, -7.8965e-03]],\n",
              "              \n",
              "                       [[ 8.6244e-03, -1.9144e-02, -9.0945e-03],\n",
              "                        [ 1.5692e-03,  1.6357e-02, -1.0803e-02],\n",
              "                        [-2.7079e-02,  1.8415e-02, -2.8879e-02]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-2.5353e-02,  1.1172e-02, -5.7791e-03],\n",
              "                        [ 1.0993e-02,  2.9407e-02, -2.8482e-02],\n",
              "                        [ 2.3753e-02,  2.5910e-02,  2.5994e-02]],\n",
              "              \n",
              "                       [[-1.6028e-04,  1.9466e-02,  2.1054e-02],\n",
              "                        [-2.3192e-02, -2.8613e-02, -2.1034e-02],\n",
              "                        [-2.5516e-02,  2.0996e-02,  2.3602e-02]],\n",
              "              \n",
              "                       [[-1.3179e-02,  2.4850e-02,  1.1721e-02],\n",
              "                        [ 1.5322e-02, -1.6963e-02,  5.2529e-03],\n",
              "                        [-2.8855e-02, -1.9476e-03,  2.1193e-02]]],\n",
              "              \n",
              "              \n",
              "                      [[[-2.8865e-02,  2.3673e-02, -7.0831e-03],\n",
              "                        [-2.1607e-02,  4.3950e-03, -1.7790e-02],\n",
              "                        [ 1.4617e-02, -2.1782e-02,  2.4852e-02]],\n",
              "              \n",
              "                       [[ 1.9819e-02, -1.6736e-02, -1.6404e-02],\n",
              "                        [-2.0717e-02,  5.7235e-04, -1.9014e-02],\n",
              "                        [ 2.4318e-02, -5.8944e-03, -1.7276e-02]],\n",
              "              \n",
              "                       [[-2.4019e-02, -2.3616e-02, -2.3863e-02],\n",
              "                        [ 2.4810e-02, -2.5711e-03,  2.7173e-02],\n",
              "                        [-5.6811e-03,  2.3543e-02, -1.6302e-02]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-4.4200e-03,  2.9072e-02, -5.3725e-03],\n",
              "                        [-2.7577e-02, -1.4787e-02,  1.5608e-02],\n",
              "                        [-2.5902e-02, -3.3342e-03, -2.2837e-02]],\n",
              "              \n",
              "                       [[ 2.4574e-02,  8.9627e-03, -2.4021e-02],\n",
              "                        [-7.1056e-04,  8.0922e-03,  2.4962e-03],\n",
              "                        [-1.1803e-02, -3.9447e-03, -1.2492e-02]],\n",
              "              \n",
              "                       [[ 1.2958e-02,  2.3337e-02,  1.0022e-02],\n",
              "                        [-1.5102e-02, -1.4623e-02,  2.6495e-02],\n",
              "                        [-1.4690e-02, -1.9256e-02, -2.8590e-02]]]])),\n",
              "             ('inception2.branch2.3.bias',\n",
              "              tensor([ 0.0138,  0.0163, -0.0152,  0.0123, -0.0246,  0.0185, -0.0282,  0.0004,\n",
              "                       0.0099,  0.0130,  0.0230, -0.0204,  0.0055, -0.0283,  0.0279,  0.0156,\n",
              "                      -0.0094,  0.0053,  0.0056,  0.0096, -0.0048,  0.0057,  0.0282, -0.0005,\n",
              "                       0.0060, -0.0291, -0.0280,  0.0025,  0.0066,  0.0174,  0.0001, -0.0096,\n",
              "                      -0.0207,  0.0281, -0.0293, -0.0023, -0.0101, -0.0259,  0.0183,  0.0010,\n",
              "                       0.0035,  0.0208,  0.0063, -0.0286,  0.0229,  0.0117, -0.0218, -0.0242,\n",
              "                       0.0064, -0.0064,  0.0229, -0.0272,  0.0174, -0.0215, -0.0063,  0.0051,\n",
              "                      -0.0100, -0.0082, -0.0243,  0.0284, -0.0090,  0.0137,  0.0091, -0.0166,\n",
              "                      -0.0284, -0.0167, -0.0162,  0.0017,  0.0195,  0.0095, -0.0178,  0.0263,\n",
              "                       0.0031, -0.0273, -0.0009,  0.0102, -0.0093, -0.0129, -0.0268,  0.0133,\n",
              "                       0.0257, -0.0102,  0.0059,  0.0177, -0.0193, -0.0269, -0.0247,  0.0178,\n",
              "                       0.0067, -0.0267, -0.0111,  0.0008, -0.0038,  0.0064, -0.0149, -0.0249,\n",
              "                      -0.0161, -0.0087,  0.0261,  0.0108, -0.0061,  0.0290,  0.0044,  0.0034,\n",
              "                      -0.0142, -0.0059,  0.0247,  0.0280, -0.0113, -0.0245, -0.0123,  0.0012,\n",
              "                      -0.0232,  0.0255, -0.0056, -0.0142,  0.0081,  0.0080, -0.0154, -0.0003,\n",
              "                      -0.0250, -0.0075,  0.0216, -0.0120,  0.0107, -0.0271, -0.0218, -0.0217,\n",
              "                       0.0237, -0.0207, -0.0189, -0.0150,  0.0191,  0.0137,  0.0284, -0.0287,\n",
              "                       0.0166,  0.0197,  0.0087,  0.0063, -0.0051,  0.0086, -0.0123, -0.0036,\n",
              "                       0.0013, -0.0059, -0.0159,  0.0081, -0.0086,  0.0092, -0.0146,  0.0243,\n",
              "                       0.0157,  0.0254, -0.0284, -0.0293,  0.0252,  0.0014, -0.0065, -0.0288,\n",
              "                      -0.0271, -0.0205,  0.0289,  0.0038,  0.0118,  0.0071, -0.0286, -0.0011,\n",
              "                       0.0122,  0.0235, -0.0197, -0.0143,  0.0080, -0.0077, -0.0111,  0.0115,\n",
              "                       0.0013, -0.0110, -0.0139,  0.0104,  0.0281,  0.0003, -0.0084,  0.0101,\n",
              "                       0.0195,  0.0279, -0.0266, -0.0095,  0.0203,  0.0257,  0.0108, -0.0146])),\n",
              "             ('inception2.branch2.4.weight',\n",
              "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.])),\n",
              "             ('inception2.branch2.4.bias',\n",
              "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.])),\n",
              "             ('inception2.branch2.4.running_mean',\n",
              "              tensor([-6.3197e-05,  2.3185e-02, -2.5530e-02,  9.5876e-03,  2.0243e-02,\n",
              "                       1.7006e-02,  3.3658e-02, -1.0935e-02,  2.3020e-02, -1.3076e-02,\n",
              "                       1.0283e-02,  2.5010e-03,  1.9326e-02,  5.1525e-03,  3.8906e-02,\n",
              "                       2.2845e-02,  1.3550e-02,  1.3213e-02, -1.3843e-02,  8.4461e-04,\n",
              "                       6.1386e-03,  1.6112e-02, -1.1691e-04,  2.0438e-02, -6.2564e-03,\n",
              "                      -3.1839e-02, -5.6727e-03,  2.7542e-03, -7.2895e-03, -3.8623e-03,\n",
              "                       1.9642e-02,  1.5005e-02,  2.4040e-02, -2.4324e-02,  2.9071e-03,\n",
              "                      -1.0577e-02, -2.6091e-03, -7.2822e-03, -1.2775e-02,  1.4802e-02,\n",
              "                       1.7107e-02,  1.3514e-02, -3.0168e-04, -4.0082e-02,  6.1966e-03,\n",
              "                      -6.6652e-03, -2.4737e-02, -1.2802e-02,  2.0181e-02,  4.0961e-03,\n",
              "                      -1.0680e-02, -8.4599e-03,  2.1641e-02, -1.4007e-02,  2.0137e-02,\n",
              "                       1.0376e-02, -2.9340e-02, -5.9395e-03,  1.1392e-02, -8.4665e-03,\n",
              "                      -2.3460e-02,  4.6686e-02,  2.0022e-02,  1.1063e-02, -2.1520e-02,\n",
              "                       1.4220e-02,  1.7735e-03,  7.7954e-03,  1.5526e-02, -1.1564e-02,\n",
              "                      -1.4888e-03,  2.6072e-03,  1.6093e-02,  8.7330e-04, -4.4635e-03,\n",
              "                      -1.7763e-02,  3.8166e-03, -1.6638e-02, -1.7137e-02,  1.6344e-03,\n",
              "                       2.9599e-02,  6.5573e-04,  1.1990e-02,  1.7138e-02, -1.0327e-02,\n",
              "                      -3.6928e-02,  1.7336e-02,  1.3856e-02, -6.2802e-03,  1.9263e-02,\n",
              "                       4.7550e-03, -3.6846e-03, -1.9040e-04, -4.7324e-02, -2.0902e-02,\n",
              "                      -1.0524e-02,  4.4264e-03, -2.5382e-03, -1.8669e-02, -1.6922e-03,\n",
              "                       2.1285e-04, -5.7804e-03, -7.4272e-03,  8.0289e-03, -1.0685e-02,\n",
              "                      -9.6927e-03,  2.4396e-02,  5.6368e-04,  1.7896e-02,  1.5680e-03,\n",
              "                       5.7732e-03, -1.6889e-03, -3.1922e-02,  4.5750e-02, -1.1518e-02,\n",
              "                      -1.0347e-02,  7.2551e-03, -1.3173e-02,  5.0182e-03,  1.5294e-02,\n",
              "                      -7.2729e-03, -2.2889e-02, -1.2829e-03,  3.5658e-02,  2.5179e-02,\n",
              "                      -7.6698e-03,  1.1577e-02, -3.0965e-02, -9.4853e-03, -1.0758e-03,\n",
              "                      -9.1685e-03, -1.3257e-02,  2.2014e-02,  1.8230e-02,  4.0345e-02,\n",
              "                       1.0672e-02,  3.3321e-02,  4.5087e-02,  2.4791e-02, -1.2511e-02,\n",
              "                       2.4548e-02, -4.9313e-03, -1.6845e-02,  1.3829e-02, -4.8672e-03,\n",
              "                       7.4779e-03, -1.6547e-02,  1.1359e-02, -2.6778e-02, -1.7738e-02,\n",
              "                       1.8235e-02,  2.0184e-02,  3.6356e-02, -1.4041e-02,  9.1390e-03,\n",
              "                       1.3480e-02, -1.7668e-02,  5.8292e-03, -2.5593e-02, -1.1467e-02,\n",
              "                       1.0438e-04,  4.5900e-03,  2.0104e-02,  1.4119e-02,  1.1481e-03,\n",
              "                      -4.2838e-02,  7.3382e-04,  2.7397e-02, -2.6967e-02, -1.4627e-02,\n",
              "                      -3.2823e-02,  3.8192e-02, -1.6637e-02,  2.2746e-02,  2.1570e-02,\n",
              "                      -1.0146e-02, -8.0718e-03, -2.7331e-02,  5.1175e-03, -1.0262e-02,\n",
              "                       1.1748e-02, -8.8174e-03,  1.8886e-02, -2.3053e-03,  2.0723e-04,\n",
              "                       9.9256e-03, -2.6626e-02, -3.2068e-02,  3.7689e-02,  1.4366e-02,\n",
              "                      -2.7387e-04, -1.4988e-02])),\n",
              "             ('inception2.branch2.4.running_var',\n",
              "              tensor([0.9130, 0.9129, 0.9084, 0.9094, 0.9113, 0.9105, 0.9137, 0.9074, 0.9120,\n",
              "                      0.9104, 0.9089, 0.9088, 0.9134, 0.9085, 0.9110, 0.9091, 0.9085, 0.9106,\n",
              "                      0.9101, 0.9085, 0.9083, 0.9089, 0.9157, 0.9086, 0.9094, 0.9138, 0.9096,\n",
              "                      0.9084, 0.9083, 0.9080, 0.9098, 0.9122, 0.9107, 0.9106, 0.9086, 0.9108,\n",
              "                      0.9115, 0.9110, 0.9117, 0.9109, 0.9105, 0.9129, 0.9116, 0.9141, 0.9081,\n",
              "                      0.9082, 0.9102, 0.9114, 0.9133, 0.9083, 0.9134, 0.9112, 0.9089, 0.9106,\n",
              "                      0.9101, 0.9099, 0.9090, 0.9088, 0.9088, 0.9118, 0.9105, 0.9117, 0.9133,\n",
              "                      0.9119, 0.9096, 0.9090, 0.9082, 0.9114, 0.9107, 0.9125, 0.9111, 0.9091,\n",
              "                      0.9113, 0.9086, 0.9077, 0.9112, 0.9140, 0.9109, 0.9099, 0.9099, 0.9090,\n",
              "                      0.9059, 0.9092, 0.9109, 0.9095, 0.9086, 0.9090, 0.9106, 0.9120, 0.9081,\n",
              "                      0.9098, 0.9101, 0.9138, 0.9125, 0.9110, 0.9119, 0.9089, 0.9095, 0.9094,\n",
              "                      0.9091, 0.9091, 0.9106, 0.9108, 0.9082, 0.9159, 0.9119, 0.9104, 0.9113,\n",
              "                      0.9111, 0.9074, 0.9110, 0.9109, 0.9093, 0.9113, 0.9102, 0.9087, 0.9103,\n",
              "                      0.9102, 0.9119, 0.9093, 0.9100, 0.9089, 0.9086, 0.9098, 0.9101, 0.9089,\n",
              "                      0.9100, 0.9144, 0.9103, 0.9091, 0.9111, 0.9096, 0.9107, 0.9093, 0.9099,\n",
              "                      0.9150, 0.9091, 0.9091, 0.9092, 0.9115, 0.9094, 0.9113, 0.9083, 0.9086,\n",
              "                      0.9100, 0.9083, 0.9092, 0.9098, 0.9107, 0.9093, 0.9096, 0.9099, 0.9132,\n",
              "                      0.9093, 0.9099, 0.9086, 0.9104, 0.9096, 0.9115, 0.9079, 0.9085, 0.9099,\n",
              "                      0.9083, 0.9125, 0.9112, 0.9090, 0.9105, 0.9084, 0.9111, 0.9132, 0.9140,\n",
              "                      0.9114, 0.9073, 0.9119, 0.9108, 0.9085, 0.9083, 0.9110, 0.9097, 0.9093,\n",
              "                      0.9127, 0.9115, 0.9129, 0.9103, 0.9092, 0.9079, 0.9122, 0.9101, 0.9117,\n",
              "                      0.9119, 0.9123, 0.9092])),\n",
              "             ('inception2.branch2.4.num_batches_tracked', tensor(1)),\n",
              "             ('inception2.branch3.0.weight',\n",
              "              tensor([[[[-0.0111]],\n",
              "              \n",
              "                       [[ 0.0005]],\n",
              "              \n",
              "                       [[ 0.0256]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 0.0327]],\n",
              "              \n",
              "                       [[-0.0368]],\n",
              "              \n",
              "                       [[ 0.0338]]],\n",
              "              \n",
              "              \n",
              "                      [[[-0.0120]],\n",
              "              \n",
              "                       [[-0.0170]],\n",
              "              \n",
              "                       [[ 0.0546]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 0.0482]],\n",
              "              \n",
              "                       [[ 0.0119]],\n",
              "              \n",
              "                       [[-0.0196]]],\n",
              "              \n",
              "              \n",
              "                      [[[-0.0229]],\n",
              "              \n",
              "                       [[-0.0265]],\n",
              "              \n",
              "                       [[-0.0524]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-0.0591]],\n",
              "              \n",
              "                       [[-0.0595]],\n",
              "              \n",
              "                       [[-0.0557]]],\n",
              "              \n",
              "              \n",
              "                      ...,\n",
              "              \n",
              "              \n",
              "                      [[[-0.0143]],\n",
              "              \n",
              "                       [[-0.0534]],\n",
              "              \n",
              "                       [[-0.0014]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 0.0216]],\n",
              "              \n",
              "                       [[-0.0036]],\n",
              "              \n",
              "                       [[-0.0292]]],\n",
              "              \n",
              "              \n",
              "                      [[[-0.0553]],\n",
              "              \n",
              "                       [[ 0.0236]],\n",
              "              \n",
              "                       [[-0.0400]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 0.0498]],\n",
              "              \n",
              "                       [[ 0.0538]],\n",
              "              \n",
              "                       [[-0.0462]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 0.0185]],\n",
              "              \n",
              "                       [[ 0.0387]],\n",
              "              \n",
              "                       [[ 0.0361]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 0.0364]],\n",
              "              \n",
              "                       [[ 0.0523]],\n",
              "              \n",
              "                       [[-0.0170]]]])),\n",
              "             ('inception2.branch3.0.bias',\n",
              "              tensor([-0.0073,  0.0282,  0.0352, -0.0504,  0.0094,  0.0307, -0.0015, -0.0511,\n",
              "                      -0.0319, -0.0002, -0.0566, -0.0552, -0.0550, -0.0486, -0.0341, -0.0022,\n",
              "                       0.0133, -0.0012,  0.0531, -0.0049, -0.0614, -0.0056,  0.0451, -0.0558,\n",
              "                      -0.0159, -0.0608, -0.0548, -0.0474,  0.0370, -0.0057, -0.0096,  0.0547])),\n",
              "             ('inception2.branch3.1.weight',\n",
              "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.])),\n",
              "             ('inception2.branch3.1.bias',\n",
              "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0.])),\n",
              "             ('inception2.branch3.1.running_mean',\n",
              "              tensor([ 0.0218, -0.0030,  0.0267, -0.0007, -0.0449,  0.0596, -0.0682,  0.0186,\n",
              "                       0.0082, -0.0243, -0.0692, -0.0230,  0.0473, -0.0488, -0.0045,  0.0336,\n",
              "                      -0.0179,  0.0052,  0.0968, -0.0289, -0.0088,  0.0335, -0.0294, -0.0718,\n",
              "                       0.0055, -0.0234, -0.0376,  0.0077, -0.0403, -0.0296,  0.0323,  0.0308])),\n",
              "             ('inception2.branch3.1.running_var',\n",
              "              tensor([0.9112, 0.9120, 0.9093, 0.9102, 0.9148, 0.9118, 0.9152, 0.9104, 0.9095,\n",
              "                      0.9101, 0.9120, 0.9209, 0.9103, 0.9150, 0.9128, 0.9109, 0.9073, 0.9118,\n",
              "                      0.9290, 0.9159, 0.9100, 0.9150, 0.9144, 0.9153, 0.9172, 0.9115, 0.9359,\n",
              "                      0.9153, 0.9213, 0.9076, 0.9207, 0.9103])),\n",
              "             ('inception2.branch3.1.num_batches_tracked', tensor(1)),\n",
              "             ('inception2.branch3.3.weight',\n",
              "              tensor([[[[ 2.4435e-02, -2.6078e-02,  3.1872e-02,  2.1426e-02, -1.6212e-02],\n",
              "                        [ 2.7829e-02, -1.5139e-02, -2.3290e-02,  7.4170e-03,  2.0718e-02],\n",
              "                        [ 1.6707e-02, -4.3648e-03, -6.0763e-03,  2.5714e-02,  1.7088e-02],\n",
              "                        [-1.5900e-03,  2.7514e-02,  1.6396e-02,  2.1674e-02, -5.3800e-03],\n",
              "                        [-1.9524e-02, -1.9875e-02, -1.2668e-02,  1.6958e-02, -2.8971e-02]],\n",
              "              \n",
              "                       [[ 2.4860e-02,  3.1083e-02,  8.2644e-03, -5.6035e-03, -1.6960e-02],\n",
              "                        [-2.2984e-02, -1.1108e-02, -3.1195e-02, -1.7857e-02, -3.2364e-02],\n",
              "                        [ 2.8663e-02,  6.4159e-03, -1.8136e-02, -1.3475e-02, -1.6201e-02],\n",
              "                        [ 9.4630e-03, -2.2049e-02,  2.2697e-02, -1.7157e-02, -2.2007e-02],\n",
              "                        [-2.2373e-02, -9.9097e-03,  2.5924e-02, -2.4731e-03,  3.1691e-02]],\n",
              "              \n",
              "                       [[-4.2208e-04,  2.5279e-02,  1.8877e-02,  1.0359e-02,  3.5351e-02],\n",
              "                        [ 2.1901e-03,  2.3689e-02,  2.3979e-02, -2.8972e-02,  2.7169e-02],\n",
              "                        [ 3.3535e-02,  1.9709e-02,  2.9716e-02,  2.6493e-02,  1.8725e-02],\n",
              "                        [-2.4789e-03, -2.3987e-02, -6.3129e-03,  3.5586e-03,  4.2649e-03],\n",
              "                        [ 2.7064e-02, -2.1146e-02,  1.2416e-02,  6.7457e-03, -1.0098e-02]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 1.5735e-03,  1.9173e-03,  2.5635e-02, -2.1985e-02,  1.9949e-02],\n",
              "                        [ 1.7887e-02,  1.0834e-02, -3.4766e-02,  5.2523e-03, -1.0574e-02],\n",
              "                        [ 1.3135e-02,  3.1926e-03, -4.5216e-03,  3.5231e-02, -2.9762e-02],\n",
              "                        [ 2.4811e-03, -2.1328e-02, -7.2018e-03,  2.0168e-02,  2.6710e-02],\n",
              "                        [-3.2609e-02,  2.2749e-02, -1.2582e-02,  2.5061e-02,  3.3967e-02]],\n",
              "              \n",
              "                       [[ 1.1151e-02, -2.7236e-02, -4.1135e-03,  9.8793e-03, -2.8189e-02],\n",
              "                        [ 5.4458e-03,  9.5697e-03,  4.3376e-03,  1.8106e-02, -2.7680e-02],\n",
              "                        [-5.5961e-04,  2.5237e-02,  3.0403e-02, -1.0671e-02,  5.6464e-03],\n",
              "                        [-1.1595e-02, -3.8178e-03, -2.0310e-02, -2.7360e-02, -1.0019e-02],\n",
              "                        [ 2.9832e-02,  1.0997e-02,  8.7955e-03, -2.0334e-02, -4.4393e-03]],\n",
              "              \n",
              "                       [[-9.3459e-03,  8.0026e-03,  1.6385e-02,  3.1243e-02, -2.0490e-02],\n",
              "                        [ 3.2680e-02, -2.7737e-02, -3.0352e-03,  1.5065e-02, -2.5485e-02],\n",
              "                        [-9.1480e-03,  1.5420e-02, -2.7793e-02,  2.5497e-02,  1.1352e-02],\n",
              "                        [-1.6904e-02, -3.3104e-02,  1.6568e-02,  5.2946e-03,  2.3114e-02],\n",
              "                        [ 1.1787e-02, -2.1427e-02,  2.1328e-02, -1.4530e-03,  3.0443e-02]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 7.2339e-03,  1.9408e-02,  1.7673e-02,  1.7312e-02, -9.5153e-03],\n",
              "                        [ 1.8050e-02,  8.0722e-03,  3.2691e-02, -1.0740e-03, -1.9503e-02],\n",
              "                        [ 1.8039e-02,  1.3164e-02,  2.4260e-02, -1.8483e-02, -1.3588e-02],\n",
              "                        [ 2.6582e-02, -4.9348e-03, -9.6376e-03,  5.6427e-03, -1.8776e-02],\n",
              "                        [ 8.4765e-03, -2.2090e-02,  5.5906e-03, -1.6578e-02, -7.2200e-03]],\n",
              "              \n",
              "                       [[-3.1829e-02, -3.7823e-03, -1.0683e-02,  3.3656e-02, -1.6727e-03],\n",
              "                        [ 2.9664e-02, -1.3621e-02, -1.2689e-02, -8.6398e-03, -2.4161e-02],\n",
              "                        [-2.2774e-02,  2.8490e-03,  1.2845e-02,  2.9861e-02,  7.9279e-03],\n",
              "                        [-2.0258e-02,  3.0949e-02, -3.0257e-02, -5.3290e-03,  3.3895e-02],\n",
              "                        [-1.6301e-02, -2.5746e-02,  9.2241e-03,  3.2458e-02, -1.9792e-02]],\n",
              "              \n",
              "                       [[-3.3105e-02,  1.1987e-03,  8.8173e-03, -2.8909e-02,  9.4099e-03],\n",
              "                        [-1.3653e-02, -2.8731e-02,  5.6422e-03,  2.8910e-02, -5.2669e-03],\n",
              "                        [ 3.5306e-02, -3.3182e-03,  7.1453e-04,  1.8352e-02,  3.2695e-02],\n",
              "                        [ 1.9511e-02, -3.0595e-02, -1.9185e-02,  2.4041e-02, -1.7840e-02],\n",
              "                        [-2.9755e-02, -2.4947e-02,  2.0832e-03, -4.0445e-03,  1.3841e-02]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-2.5916e-02, -3.3237e-02,  1.9437e-02, -3.4417e-02,  1.0824e-02],\n",
              "                        [-1.1248e-02, -2.2772e-03, -1.8953e-02,  1.9676e-02,  7.3845e-03],\n",
              "                        [-2.6740e-02,  1.1404e-02,  3.0303e-02,  2.4481e-02, -1.4298e-02],\n",
              "                        [ 2.5089e-02, -6.0101e-03,  2.1806e-02,  1.9520e-03, -2.9637e-02],\n",
              "                        [-4.3919e-03, -2.7126e-02, -2.6511e-02,  1.3290e-02, -7.9130e-03]],\n",
              "              \n",
              "                       [[ 1.4549e-03, -3.0114e-02, -3.1935e-03, -1.2172e-03, -1.0140e-02],\n",
              "                        [-6.9139e-03,  3.0407e-02, -3.0692e-02,  3.3556e-02, -6.4760e-04],\n",
              "                        [-6.5792e-03, -1.3950e-02,  2.8582e-02, -4.8862e-03, -1.8133e-02],\n",
              "                        [-1.8548e-03,  2.6510e-02, -2.6680e-03, -2.7718e-02,  8.9935e-03],\n",
              "                        [-7.0365e-03,  1.1924e-02,  1.9468e-02, -1.4201e-03, -1.7667e-04]],\n",
              "              \n",
              "                       [[ 6.2430e-03, -3.2033e-02,  1.6275e-02, -2.0957e-02,  2.7886e-02],\n",
              "                        [-3.3177e-02,  1.9641e-02, -2.3526e-02,  4.6582e-03, -1.8724e-02],\n",
              "                        [-1.9353e-03,  3.2072e-02, -1.7335e-03,  5.9051e-03,  7.8391e-03],\n",
              "                        [ 3.4298e-02,  1.9645e-02,  2.2714e-02, -2.8144e-02, -1.5374e-03],\n",
              "                        [-1.5147e-03,  3.4247e-02,  2.1802e-02, -1.6287e-02,  1.9679e-03]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 2.8111e-02, -6.9030e-03,  1.1657e-02, -3.2138e-02,  1.2930e-02],\n",
              "                        [ 3.6687e-03,  1.9064e-02,  2.3312e-02, -2.8729e-02,  2.9775e-02],\n",
              "                        [ 1.1466e-02,  2.1383e-02, -3.5188e-02, -2.8220e-03,  1.6651e-02],\n",
              "                        [ 1.1176e-02, -1.2874e-02, -1.7096e-02,  1.0341e-02, -3.8432e-03],\n",
              "                        [ 1.6347e-02,  2.3467e-02,  1.5525e-02, -1.0406e-02,  4.3102e-03]],\n",
              "              \n",
              "                       [[ 1.3624e-02,  1.7004e-02, -1.0066e-02, -1.2686e-02,  3.9311e-03],\n",
              "                        [-3.2551e-02,  2.0783e-03, -2.1174e-03,  3.0306e-02, -8.4719e-03],\n",
              "                        [-7.6157e-03, -2.9308e-02, -2.2940e-02,  1.0055e-03,  2.8726e-02],\n",
              "                        [-3.2288e-02, -3.3778e-03, -1.7316e-02,  3.1679e-02, -3.0717e-02],\n",
              "                        [ 6.9721e-04,  1.7302e-02,  1.0221e-03,  1.8010e-02, -1.4261e-02]],\n",
              "              \n",
              "                       [[ 2.3940e-02, -1.4918e-02,  2.4448e-02,  9.2683e-03,  2.9220e-02],\n",
              "                        [-2.0916e-02, -7.7201e-03, -2.5893e-02, -8.1254e-03,  9.4108e-03],\n",
              "                        [-3.1971e-02, -3.4556e-02,  2.6372e-02, -2.7416e-02,  3.2738e-02],\n",
              "                        [-2.6937e-02, -5.7648e-03,  2.8366e-02, -2.2643e-02,  1.5050e-02],\n",
              "                        [-1.6811e-02,  8.0513e-03, -3.7158e-03,  3.3292e-02,  2.2247e-02]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-1.5522e-02, -1.0198e-02, -9.5550e-03, -2.1745e-02,  6.2442e-04],\n",
              "                        [-3.1696e-02,  1.1106e-02,  3.0663e-02, -2.3371e-02,  2.7737e-02],\n",
              "                        [-3.4146e-02,  1.7145e-02, -3.0791e-02,  1.9137e-02,  1.4073e-02],\n",
              "                        [ 1.7416e-02, -1.4478e-02, -3.2633e-03,  2.7677e-02, -1.8052e-02],\n",
              "                        [-7.3252e-03,  2.3837e-02, -1.6459e-02,  3.4092e-02,  1.1818e-02]],\n",
              "              \n",
              "                       [[-1.6393e-02,  5.6553e-03,  9.3213e-04, -2.2662e-02, -1.7355e-02],\n",
              "                        [-3.2091e-02,  2.8242e-02, -1.4767e-02,  7.3605e-03,  1.1044e-02],\n",
              "                        [-2.6475e-02,  3.0135e-02,  2.3999e-02, -2.0675e-02, -2.9027e-02],\n",
              "                        [-1.4549e-03,  1.4357e-02, -8.2908e-03,  5.8709e-03,  3.2440e-02],\n",
              "                        [ 3.4120e-02, -5.4881e-03,  2.3447e-03,  1.3691e-02,  1.0795e-02]],\n",
              "              \n",
              "                       [[-2.1611e-02, -3.2672e-02,  2.4767e-02,  1.2320e-02, -3.0211e-02],\n",
              "                        [-1.5449e-02, -1.2576e-03,  2.6197e-02,  2.5041e-03,  2.9170e-02],\n",
              "                        [ 2.0303e-02, -2.2420e-02, -9.7513e-03,  1.8077e-02,  3.0290e-02],\n",
              "                        [ 2.0728e-02, -1.6257e-03,  1.6708e-02,  3.4268e-02,  2.2629e-02],\n",
              "                        [ 5.3283e-03, -4.4777e-03, -1.5192e-02,  2.9101e-03,  3.1986e-02]]],\n",
              "              \n",
              "              \n",
              "                      ...,\n",
              "              \n",
              "              \n",
              "                      [[[ 1.1729e-03, -3.2047e-02,  1.3661e-03, -1.8806e-02,  3.0593e-02],\n",
              "                        [ 1.9192e-02,  6.0752e-03,  1.3133e-02, -1.2432e-02, -2.3622e-02],\n",
              "                        [ 8.6812e-04,  1.5952e-02, -3.0218e-02, -2.7636e-02, -5.6857e-03],\n",
              "                        [ 1.5483e-02, -1.8787e-02,  1.0486e-03,  2.6668e-02, -1.7150e-02],\n",
              "                        [-2.1109e-02, -1.9689e-02,  3.1657e-02, -1.7794e-02,  1.9388e-02]],\n",
              "              \n",
              "                       [[ 8.0547e-03,  1.6534e-02, -1.9897e-02, -3.3821e-02, -1.7761e-03],\n",
              "                        [-2.0973e-02, -3.4138e-02, -2.9443e-02, -1.8556e-02, -2.2310e-02],\n",
              "                        [ 9.2272e-03, -6.8715e-03,  1.6784e-02, -3.3091e-02, -7.5061e-03],\n",
              "                        [-4.0870e-03,  2.8125e-03, -1.6479e-02, -3.2452e-02, -2.9883e-02],\n",
              "                        [-1.8100e-02,  3.0107e-02,  2.7054e-02, -2.2399e-02,  2.0859e-02]],\n",
              "              \n",
              "                       [[-6.5469e-03, -1.8234e-03,  7.1735e-03, -2.4522e-02,  1.1481e-02],\n",
              "                        [-7.8467e-03, -2.6369e-02,  8.2280e-03, -6.4732e-03,  1.9682e-02],\n",
              "                        [-9.8715e-03, -2.1750e-02,  2.9857e-02, -3.0813e-02, -4.6613e-03],\n",
              "                        [ 3.1009e-02,  1.2995e-02, -1.1412e-02, -2.6551e-02, -1.7100e-02],\n",
              "                        [ 1.2580e-02,  2.4320e-02,  1.3325e-02, -3.1987e-02, -2.3414e-02]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-2.6574e-02, -3.6969e-03, -1.4952e-02,  1.7698e-02,  1.6908e-02],\n",
              "                        [-1.9154e-02,  1.1455e-02, -1.0592e-02, -3.4322e-02,  3.4573e-02],\n",
              "                        [ 1.3490e-02, -3.0615e-02, -3.2109e-02,  2.3232e-02, -2.8999e-02],\n",
              "                        [ 1.2478e-02,  1.2395e-02,  1.8277e-02,  2.6957e-02,  2.1442e-02],\n",
              "                        [ 2.6952e-02,  7.4722e-03,  2.8226e-02, -1.9590e-03,  2.4811e-02]],\n",
              "              \n",
              "                       [[-1.9025e-02, -3.9046e-03,  7.0127e-03, -1.6594e-02, -2.9846e-02],\n",
              "                        [-3.0553e-02,  2.4043e-02,  5.4140e-03,  1.3178e-02,  2.6329e-02],\n",
              "                        [ 2.6147e-02,  1.0016e-02, -8.3273e-03, -1.2676e-02,  6.9349e-03],\n",
              "                        [-2.1740e-02, -3.8326e-03, -8.0845e-03, -1.3325e-02, -3.4987e-02],\n",
              "                        [ 2.8710e-02, -3.2568e-02, -1.8957e-02, -3.0130e-02,  3.3275e-02]],\n",
              "              \n",
              "                       [[-2.1853e-02, -2.3632e-02,  2.5207e-02,  1.2240e-02, -2.4031e-02],\n",
              "                        [-9.8274e-03, -1.6225e-02, -1.4132e-03, -1.0638e-02, -1.7545e-02],\n",
              "                        [-3.1312e-02, -1.5652e-02, -2.3457e-02,  5.6711e-03, -2.4801e-02],\n",
              "                        [-1.0224e-02, -2.1256e-02, -3.2926e-02, -6.7638e-03, -2.3595e-03],\n",
              "                        [-3.2856e-02,  3.1138e-02, -9.6676e-03,  2.2317e-02,  2.8182e-03]]],\n",
              "              \n",
              "              \n",
              "                      [[[-3.2034e-02, -2.1610e-02, -2.5242e-02, -1.8104e-02, -1.7594e-03],\n",
              "                        [ 2.7906e-02, -6.3863e-04, -1.2609e-02, -1.2538e-02,  1.2091e-02],\n",
              "                        [-2.1471e-02,  3.3705e-03, -1.5888e-03,  1.3469e-02,  1.4585e-02],\n",
              "                        [-1.3931e-02,  3.4009e-02,  3.3395e-02, -3.0995e-02,  2.6559e-02],\n",
              "                        [ 8.3495e-03, -8.9423e-03, -3.4214e-02,  2.9457e-02, -2.5848e-02]],\n",
              "              \n",
              "                       [[ 2.3539e-02, -3.3461e-02,  8.6676e-03,  3.1815e-04, -1.7553e-02],\n",
              "                        [-2.2744e-02,  2.8013e-02, -6.7790e-03,  1.6778e-02,  4.5135e-03],\n",
              "                        [ 2.4148e-02, -2.4718e-02, -1.1081e-02,  3.5962e-03,  1.9336e-02],\n",
              "                        [ 3.5278e-02, -1.5484e-02,  9.4410e-03, -1.7168e-02, -1.2591e-02],\n",
              "                        [-1.4901e-02, -1.7242e-02,  3.2678e-02, -1.7932e-02, -7.0670e-03]],\n",
              "              \n",
              "                       [[ 4.1091e-03,  3.2549e-02,  1.3117e-02, -3.0381e-02, -2.6750e-02],\n",
              "                        [ 7.3138e-03, -2.9238e-02, -7.3772e-04,  2.9823e-02,  1.1720e-02],\n",
              "                        [ 7.8903e-03,  3.3410e-02,  2.1698e-02,  1.6218e-04, -4.4431e-03],\n",
              "                        [-3.4969e-02,  2.7342e-02,  1.7157e-02,  1.2122e-02,  3.4796e-02],\n",
              "                        [ 1.8043e-02,  3.4268e-02,  1.2147e-02, -2.0417e-03,  6.4318e-03]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-2.3833e-02,  1.0481e-02,  2.0771e-02, -3.3891e-02,  2.9709e-02],\n",
              "                        [ 3.0487e-02,  1.8237e-03,  1.6778e-02,  1.0758e-02, -1.5036e-02],\n",
              "                        [ 2.1245e-02, -1.5343e-02, -1.5033e-02, -4.6261e-03,  5.8966e-03],\n",
              "                        [ 9.8422e-03, -2.5288e-02, -6.4167e-03,  1.4345e-02,  5.2145e-03],\n",
              "                        [-3.5046e-02,  2.9996e-02,  2.6879e-02,  2.4124e-02,  1.6270e-02]],\n",
              "              \n",
              "                       [[ 6.1187e-03, -3.3499e-02, -7.6964e-03,  9.6377e-03, -1.5469e-02],\n",
              "                        [-6.7922e-04,  2.4097e-02, -1.2699e-02, -1.6142e-02, -3.3661e-02],\n",
              "                        [-1.0327e-02,  4.7865e-03,  2.2930e-02,  3.4839e-02, -7.8484e-03],\n",
              "                        [-2.3843e-04, -7.3436e-03,  2.0019e-02,  4.7329e-03, -1.7342e-02],\n",
              "                        [ 3.5319e-02, -2.9712e-02,  4.4820e-03,  3.3287e-02, -1.7580e-04]],\n",
              "              \n",
              "                       [[-8.9571e-03, -1.2334e-03,  1.5362e-02,  7.1942e-03,  2.6041e-02],\n",
              "                        [-2.6636e-02,  3.0226e-02, -1.7938e-02,  2.9052e-02,  1.5909e-02],\n",
              "                        [ 2.3520e-02,  3.1174e-03, -3.1972e-02, -8.1279e-03,  2.1255e-04],\n",
              "                        [ 3.5034e-02, -2.4440e-02, -1.0830e-02,  2.7128e-02, -9.2131e-03],\n",
              "                        [ 7.1613e-04, -9.4516e-03,  1.4486e-02, -2.9298e-03,  3.1387e-02]]],\n",
              "              \n",
              "              \n",
              "                      [[[-3.2706e-02,  1.4387e-02,  3.0586e-02, -3.0073e-02, -3.9306e-03],\n",
              "                        [-2.4268e-02,  1.1976e-02, -2.9122e-02,  3.0308e-02,  2.6596e-02],\n",
              "                        [-2.5414e-02,  3.0208e-02, -2.0262e-02,  9.3208e-03,  3.3040e-02],\n",
              "                        [ 3.3335e-02, -2.6125e-02,  4.8783e-03, -1.1195e-02,  6.0665e-04],\n",
              "                        [-1.2073e-03,  2.6912e-02, -4.0983e-03,  2.9444e-02, -1.0055e-02]],\n",
              "              \n",
              "                       [[-9.3177e-03,  4.6150e-04,  1.8443e-02,  1.1977e-02, -2.2160e-02],\n",
              "                        [-1.7516e-02,  4.7180e-03, -1.1415e-02,  8.6394e-03, -1.0004e-03],\n",
              "                        [-3.9919e-03, -1.9693e-02, -1.1442e-02, -1.3276e-02, -1.0317e-02],\n",
              "                        [ 1.6547e-03,  9.7718e-03, -2.2557e-02,  8.0882e-03,  2.3164e-02],\n",
              "                        [-3.4988e-02, -5.3664e-03, -3.2811e-02,  1.1936e-02,  1.4782e-02]],\n",
              "              \n",
              "                       [[-6.1050e-03,  2.5357e-02, -1.7537e-02, -6.4177e-03, -1.7945e-02],\n",
              "                        [ 2.7630e-02, -7.1917e-03,  1.2822e-02, -2.2792e-02, -2.2065e-02],\n",
              "                        [-5.1398e-03,  3.1908e-02,  3.7045e-03, -2.4081e-02, -1.8836e-02],\n",
              "                        [ 5.3470e-03, -1.4218e-02, -3.2308e-02, -2.7048e-03,  1.3025e-02],\n",
              "                        [-8.5761e-03, -9.5036e-03, -2.1523e-02, -4.0770e-03, -1.8771e-03]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 1.9009e-02,  1.9136e-02, -2.1754e-02,  2.3600e-02, -2.5031e-02],\n",
              "                        [ 2.1729e-02,  7.2002e-05, -6.2719e-05, -1.3564e-02,  3.1690e-02],\n",
              "                        [-2.1538e-03,  3.0834e-02, -2.1828e-02, -3.1268e-02,  2.4246e-02],\n",
              "                        [-3.1147e-02,  2.6639e-02, -1.2840e-02,  1.4990e-02,  3.4476e-02],\n",
              "                        [-2.7961e-02, -3.4819e-02,  1.8939e-02, -3.3029e-02, -1.1783e-02]],\n",
              "              \n",
              "                       [[ 3.4837e-02, -3.0464e-02, -1.4206e-02,  3.3864e-02, -2.6834e-02],\n",
              "                        [ 2.4757e-02,  3.0759e-02, -3.4746e-02, -2.8855e-02, -1.3459e-02],\n",
              "                        [-3.1102e-02, -3.3977e-02,  3.3226e-02, -2.8458e-02,  1.9399e-03],\n",
              "                        [-9.9097e-03, -9.5118e-03, -1.6334e-02, -2.5741e-02, -4.0916e-03],\n",
              "                        [ 1.4459e-02, -1.8735e-02,  1.3073e-02, -2.7767e-02, -1.3942e-02]],\n",
              "              \n",
              "                       [[ 5.7930e-04, -5.5091e-03,  4.8567e-03, -3.7709e-03,  2.4466e-02],\n",
              "                        [ 3.4205e-02, -1.0015e-02,  2.1283e-02,  3.1702e-02, -1.9196e-02],\n",
              "                        [-1.8980e-02,  1.2090e-02, -1.7392e-02, -1.8056e-02, -1.6463e-02],\n",
              "                        [ 1.9302e-02,  5.9468e-04, -3.4030e-02,  2.2209e-02, -8.7145e-03],\n",
              "                        [ 1.2392e-03, -8.9698e-03, -2.1179e-02,  1.1961e-02,  2.5787e-02]]]])),\n",
              "             ('inception2.branch3.3.bias',\n",
              "              tensor([ 1.4075e-02,  2.8840e-02,  1.8198e-02, -3.1030e-02, -6.8860e-03,\n",
              "                      -3.6411e-03, -1.8274e-02, -1.3852e-03, -2.1292e-02, -3.5356e-03,\n",
              "                      -2.4192e-02,  8.4506e-03,  3.1011e-02,  2.0172e-02,  1.2829e-02,\n",
              "                       2.6919e-02,  3.2942e-02,  1.8987e-03, -1.9643e-02,  2.0109e-02,\n",
              "                       2.3292e-02, -7.8166e-03,  3.3865e-03, -2.4729e-02, -3.4130e-02,\n",
              "                      -5.7496e-03,  1.8712e-02, -2.3623e-02, -3.2390e-02,  2.9168e-02,\n",
              "                      -1.4435e-02, -3.2037e-02, -1.4552e-02, -2.6208e-02, -2.4883e-02,\n",
              "                       1.2933e-02, -2.5380e-02, -1.9398e-02,  5.8072e-03,  2.4550e-02,\n",
              "                       1.3796e-02,  3.2947e-02, -2.2633e-02, -2.4816e-02,  2.6312e-02,\n",
              "                       2.3267e-02, -1.9840e-02,  1.2086e-02, -3.2128e-02,  8.8452e-03,\n",
              "                      -9.2433e-03,  3.2781e-02,  1.7592e-02,  2.4258e-02,  3.4773e-02,\n",
              "                      -2.5498e-03,  3.3746e-04,  2.5533e-02, -1.1483e-02, -3.0828e-02,\n",
              "                      -1.3451e-02,  3.1344e-02, -3.4536e-02, -7.2117e-03, -2.1299e-02,\n",
              "                      -2.6714e-02,  2.5880e-02,  3.0280e-02, -2.4174e-04,  2.7544e-02,\n",
              "                      -2.3709e-02,  1.2660e-02,  1.1531e-02,  1.0349e-02, -1.0510e-02,\n",
              "                      -3.2370e-02,  1.9716e-02, -2.7147e-02,  3.3125e-02, -7.5444e-03,\n",
              "                      -1.6547e-02,  1.6489e-02,  3.4241e-02, -4.0971e-05, -5.0560e-04,\n",
              "                      -9.2729e-03, -1.3503e-02, -1.8223e-02,  2.0175e-02,  1.0073e-02,\n",
              "                      -2.1058e-02, -2.4221e-02,  1.7988e-03,  2.8218e-02, -1.7012e-02,\n",
              "                       2.6052e-03])),\n",
              "             ('inception2.branch3.4.weight',\n",
              "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1.])),\n",
              "             ('inception2.branch3.4.bias',\n",
              "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.])),\n",
              "             ('inception2.branch3.4.running_mean',\n",
              "              tensor([ 0.0207,  0.0073,  0.0146, -0.0217,  0.0022,  0.0159, -0.0026,  0.0071,\n",
              "                      -0.0234, -0.0184,  0.0076, -0.0121,  0.0036, -0.0177, -0.0323,  0.0104,\n",
              "                       0.0010, -0.0105, -0.0139, -0.0176,  0.0127,  0.0030, -0.0118,  0.0006,\n",
              "                      -0.0210, -0.0227,  0.0084,  0.0260, -0.0225,  0.0163, -0.0005, -0.0113,\n",
              "                      -0.0084,  0.0061, -0.0017, -0.0127,  0.0242, -0.0259,  0.0173, -0.0105,\n",
              "                      -0.0032,  0.0040,  0.0118,  0.0137, -0.0243,  0.0037, -0.0047, -0.0005,\n",
              "                      -0.0144,  0.0212,  0.0254, -0.0027, -0.0113,  0.0112, -0.0074,  0.0223,\n",
              "                      -0.0025, -0.0026, -0.0184,  0.0131, -0.0225, -0.0065, -0.0290,  0.0124,\n",
              "                      -0.0467, -0.0049,  0.0111,  0.0167,  0.0285,  0.0289, -0.0055,  0.0151,\n",
              "                       0.0139,  0.0021,  0.0031, -0.0035,  0.0470,  0.0149,  0.0284,  0.0193,\n",
              "                       0.0060,  0.0081,  0.0043,  0.0345,  0.0318, -0.0124, -0.0256, -0.0080,\n",
              "                       0.0221, -0.0241, -0.0189,  0.0249,  0.0056, -0.0323, -0.0012, -0.0053])),\n",
              "             ('inception2.branch3.4.running_var',\n",
              "              tensor([0.9102, 0.9084, 0.9108, 0.9080, 0.9087, 0.9102, 0.9094, 0.9074, 0.9094,\n",
              "                      0.9092, 0.9095, 0.9113, 0.9097, 0.9095, 0.9099, 0.9094, 0.9091, 0.9088,\n",
              "                      0.9088, 0.9103, 0.9115, 0.9084, 0.9098, 0.9082, 0.9094, 0.9085, 0.9101,\n",
              "                      0.9093, 0.9076, 0.9086, 0.9109, 0.9094, 0.9079, 0.9090, 0.9115, 0.9113,\n",
              "                      0.9101, 0.9086, 0.9079, 0.9099, 0.9105, 0.9121, 0.9085, 0.9096, 0.9084,\n",
              "                      0.9107, 0.9076, 0.9075, 0.9107, 0.9086, 0.9089, 0.9072, 0.9107, 0.9093,\n",
              "                      0.9105, 0.9092, 0.9086, 0.9074, 0.9089, 0.9125, 0.9168, 0.9088, 0.9109,\n",
              "                      0.9093, 0.9113, 0.9086, 0.9095, 0.9094, 0.9092, 0.9113, 0.9110, 0.9092,\n",
              "                      0.9086, 0.9084, 0.9086, 0.9083, 0.9091, 0.9101, 0.9087, 0.9115, 0.9083,\n",
              "                      0.9080, 0.9100, 0.9108, 0.9111, 0.9074, 0.9093, 0.9116, 0.9099, 0.9080,\n",
              "                      0.9102, 0.9094, 0.9102, 0.9097, 0.9083, 0.9085])),\n",
              "             ('inception2.branch3.4.num_batches_tracked', tensor(1)),\n",
              "             ('inception2.branch4.1.weight',\n",
              "              tensor([[[[ 0.0561]],\n",
              "              \n",
              "                       [[ 0.0379]],\n",
              "              \n",
              "                       [[-0.0295]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 0.0369]],\n",
              "              \n",
              "                       [[ 0.0322]],\n",
              "              \n",
              "                       [[-0.0035]]],\n",
              "              \n",
              "              \n",
              "                      [[[-0.0530]],\n",
              "              \n",
              "                       [[ 0.0300]],\n",
              "              \n",
              "                       [[-0.0046]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-0.0262]],\n",
              "              \n",
              "                       [[-0.0500]],\n",
              "              \n",
              "                       [[ 0.0391]]],\n",
              "              \n",
              "              \n",
              "                      [[[-0.0051]],\n",
              "              \n",
              "                       [[-0.0495]],\n",
              "              \n",
              "                       [[-0.0350]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-0.0296]],\n",
              "              \n",
              "                       [[ 0.0216]],\n",
              "              \n",
              "                       [[ 0.0530]]],\n",
              "              \n",
              "              \n",
              "                      ...,\n",
              "              \n",
              "              \n",
              "                      [[[-0.0548]],\n",
              "              \n",
              "                       [[-0.0329]],\n",
              "              \n",
              "                       [[-0.0435]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 0.0207]],\n",
              "              \n",
              "                       [[ 0.0177]],\n",
              "              \n",
              "                       [[-0.0354]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 0.0209]],\n",
              "              \n",
              "                       [[ 0.0131]],\n",
              "              \n",
              "                       [[ 0.0616]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-0.0127]],\n",
              "              \n",
              "                       [[ 0.0438]],\n",
              "              \n",
              "                       [[ 0.0563]]],\n",
              "              \n",
              "              \n",
              "                      [[[-0.0139]],\n",
              "              \n",
              "                       [[ 0.0588]],\n",
              "              \n",
              "                       [[-0.0149]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-0.0507]],\n",
              "              \n",
              "                       [[ 0.0346]],\n",
              "              \n",
              "                       [[ 0.0553]]]])),\n",
              "             ('inception2.branch4.1.bias',\n",
              "              tensor([-0.0394, -0.0306,  0.0015, -0.0094,  0.0291, -0.0292, -0.0190,  0.0067,\n",
              "                      -0.0338, -0.0602, -0.0548,  0.0586, -0.0511,  0.0428,  0.0159, -0.0191,\n",
              "                       0.0531,  0.0480,  0.0308,  0.0538, -0.0155, -0.0577,  0.0540,  0.0596,\n",
              "                      -0.0245, -0.0492,  0.0005, -0.0567, -0.0611, -0.0339,  0.0351,  0.0264,\n",
              "                       0.0068,  0.0381,  0.0395,  0.0089,  0.0384,  0.0476,  0.0550,  0.0589,\n",
              "                      -0.0274, -0.0575, -0.0446,  0.0400,  0.0203, -0.0221,  0.0109,  0.0116,\n",
              "                       0.0457, -0.0535, -0.0077, -0.0523, -0.0623, -0.0061,  0.0012,  0.0190,\n",
              "                      -0.0527,  0.0503,  0.0527, -0.0324, -0.0462, -0.0004,  0.0362,  0.0417])),\n",
              "             ('inception2.branch4.2.weight',\n",
              "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1.])),\n",
              "             ('inception2.branch4.2.bias',\n",
              "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.])),\n",
              "             ('inception2.branch4.2.running_mean',\n",
              "              tensor([-0.1407, -0.0565,  0.0484, -0.2375, -0.0450, -0.0648,  0.0423,  0.1477,\n",
              "                       0.1010, -0.0632, -0.0149,  0.1306, -0.0819, -0.0253, -0.0571,  0.0085,\n",
              "                       0.0223, -0.0378,  0.0513, -0.0393,  0.0683, -0.0900, -0.1553,  0.1624,\n",
              "                      -0.0679, -0.0254,  0.0286, -0.0013,  0.1247,  0.0401, -0.1578, -0.0585,\n",
              "                      -0.0242, -0.0342, -0.0743,  0.0894,  0.0609, -0.0117,  0.0029, -0.0387,\n",
              "                      -0.1740,  0.1343,  0.0721, -0.0276, -0.0630, -0.0962, -0.0728, -0.1306,\n",
              "                      -0.1275, -0.0872,  0.0331, -0.1754,  0.0524, -0.0529, -0.1355, -0.0215,\n",
              "                      -0.0143, -0.0062, -0.0538, -0.0416,  0.0480, -0.0967, -0.1050,  0.0511])),\n",
              "             ('inception2.branch4.2.running_var',\n",
              "              tensor([0.9292, 0.9096, 0.9313, 0.9562, 0.9104, 0.9149, 0.9134, 0.9468, 0.9355,\n",
              "                      0.9093, 0.9226, 0.9202, 0.9240, 0.9132, 0.9124, 0.9170, 0.9087, 0.9161,\n",
              "                      0.9076, 0.9082, 0.9166, 0.9362, 0.9239, 0.9318, 0.9140, 0.9091, 0.9120,\n",
              "                      0.9090, 0.9356, 0.9092, 0.9518, 0.9237, 0.9109, 0.9149, 0.9240, 0.9240,\n",
              "                      0.9195, 0.9094, 0.9174, 0.9128, 0.9348, 0.9162, 0.9106, 0.9153, 0.9116,\n",
              "                      0.9178, 0.9145, 0.9322, 0.9276, 0.9248, 0.9067, 0.9404, 0.9096, 0.9091,\n",
              "                      0.9466, 0.9101, 0.9135, 0.9183, 0.9142, 0.9125, 0.9084, 0.9213, 0.9182,\n",
              "                      0.9144])),\n",
              "             ('inception2.branch4.2.num_batches_tracked', tensor(1)),\n",
              "             ('classifier.1.weight',\n",
              "              tensor([[ 9.1467e-03,  8.3548e-03,  6.5978e-03,  ...,  1.9761e-03,\n",
              "                       -1.4680e-03, -5.4043e-03],\n",
              "                      [-6.6392e-04,  3.9360e-03,  9.7449e-03,  ...,  7.6888e-04,\n",
              "                        8.4582e-03,  6.7546e-05],\n",
              "                      [-4.8797e-03,  5.7541e-03,  6.1815e-03,  ...,  7.5819e-03,\n",
              "                        3.3858e-03, -3.4921e-03],\n",
              "                      ...,\n",
              "                      [-1.0800e-02,  8.1848e-03, -8.6961e-03,  ...,  8.1189e-03,\n",
              "                       -9.6210e-03,  5.9399e-03],\n",
              "                      [-5.6097e-03, -2.0596e-03, -2.9760e-04,  ...,  8.4341e-03,\n",
              "                       -5.1738e-03,  1.9524e-03],\n",
              "                      [ 9.7869e-03,  5.5067e-03, -3.2578e-03,  ..., -1.3078e-03,\n",
              "                        2.7961e-03, -6.5840e-03]])),\n",
              "             ('classifier.1.bias',\n",
              "              tensor([ 1.0315e-02, -5.3193e-03,  2.9398e-03, -6.3379e-03,  1.1044e-02,\n",
              "                       8.9230e-03,  5.9640e-03,  4.4906e-03,  1.1383e-02, -1.2612e-04,\n",
              "                      -2.6752e-03, -3.0339e-03, -1.6229e-04, -1.0892e-02, -5.5827e-03,\n",
              "                      -3.8261e-03, -3.7289e-03,  6.9006e-03, -3.2356e-03,  2.5297e-03,\n",
              "                      -8.0610e-03,  3.4475e-03,  6.5727e-03, -9.0494e-03, -7.6810e-03,\n",
              "                      -8.4566e-03, -4.7121e-03,  2.1366e-03,  4.4355e-04, -9.0958e-03,\n",
              "                       2.9366e-03,  6.4062e-03, -6.3451e-03, -1.1375e-02, -5.0337e-03,\n",
              "                       8.2558e-04, -5.9142e-03, -7.6193e-03, -5.5964e-03, -4.3107e-03,\n",
              "                       5.3124e-03, -5.9795e-04,  7.4971e-03,  2.5693e-03, -5.4016e-03,\n",
              "                      -6.7416e-03, -9.1474e-03,  1.0028e-02, -4.1789e-03,  4.0105e-03,\n",
              "                       1.6789e-03, -9.0891e-03,  1.0598e-02,  1.1312e-03,  5.0551e-04,\n",
              "                      -7.2159e-03,  4.7982e-03, -7.6013e-03,  8.6642e-03, -6.9033e-03,\n",
              "                      -9.9874e-03,  3.9660e-03,  6.0716e-03, -6.3438e-03,  1.1273e-02,\n",
              "                      -4.3086e-03, -4.0883e-03, -4.6059e-05,  3.5173e-03,  1.0964e-02,\n",
              "                       6.3975e-03,  3.2492e-03, -9.5164e-03, -1.3863e-03,  6.7407e-03,\n",
              "                      -7.2841e-03, -4.4974e-04, -1.1264e-02,  9.4333e-03,  4.0798e-03,\n",
              "                      -4.8292e-03,  3.4723e-03,  5.6910e-03, -5.4535e-03, -1.9051e-03,\n",
              "                       9.7788e-03, -4.3790e-03, -4.9612e-03, -7.7596e-03,  8.0730e-03,\n",
              "                       1.5505e-03, -1.1406e-02,  2.8958e-03, -8.0592e-03, -2.1984e-03,\n",
              "                       4.2501e-03, -9.3824e-03,  5.8556e-03, -7.1971e-03, -1.1179e-02,\n",
              "                      -6.7478e-03, -8.1986e-03,  1.1228e-02, -1.1150e-02,  5.7382e-03,\n",
              "                       1.2895e-03, -9.5400e-03, -5.7411e-03, -4.8329e-04,  5.8483e-03,\n",
              "                      -1.0637e-02, -3.4361e-03, -6.1333e-03, -1.1350e-03, -6.5518e-03,\n",
              "                       7.2593e-03, -3.0096e-03,  9.9615e-03,  1.0644e-02, -1.0797e-03,\n",
              "                       9.8819e-03, -5.8647e-03, -4.9876e-03,  6.6702e-03,  6.8070e-04,\n",
              "                       9.2383e-03,  1.0073e-02,  1.2193e-03,  9.8858e-03,  1.6908e-04,\n",
              "                      -1.0447e-02, -1.0373e-02, -2.0938e-04,  1.0440e-02,  7.8262e-03,\n",
              "                       1.0304e-02,  4.2360e-03,  7.2367e-03,  4.1227e-03,  9.7289e-03,\n",
              "                       1.0716e-03,  6.4856e-03,  1.5697e-03, -8.3504e-03, -1.0785e-02,\n",
              "                       9.0897e-03,  7.0314e-03, -6.8370e-04,  9.4848e-03,  5.0405e-03,\n",
              "                      -6.2603e-03,  4.0211e-03, -6.8494e-03,  5.6973e-03, -7.4048e-03,\n",
              "                       3.8225e-03,  1.0503e-02, -7.9741e-03,  3.3633e-03,  7.6984e-03,\n",
              "                      -1.3446e-03, -8.9445e-04,  1.1803e-03, -7.7506e-03,  8.5840e-03,\n",
              "                      -1.1849e-03,  2.4977e-03,  1.1300e-03, -6.7552e-03,  7.0084e-03,\n",
              "                       1.2384e-03, -9.1713e-03,  1.5824e-05,  4.0300e-03,  5.4399e-03,\n",
              "                      -9.9397e-03,  1.1311e-02,  1.6703e-03,  8.3357e-03, -8.0198e-03,\n",
              "                       8.0994e-03,  1.9050e-04, -5.5891e-03, -5.7755e-03, -1.4759e-04,\n",
              "                       5.4347e-03, -9.8513e-03, -5.9819e-03,  4.9095e-03,  4.9940e-03,\n",
              "                      -2.9801e-03, -4.9792e-03,  5.2300e-03,  4.1708e-03,  5.5638e-03,\n",
              "                      -8.3382e-03, -2.4551e-03, -7.0616e-03,  2.6615e-03, -1.0698e-03,\n",
              "                       3.9481e-03, -2.1142e-04,  8.3712e-03, -5.0579e-03,  8.6840e-03,\n",
              "                      -2.3148e-04, -5.8971e-03,  2.1587e-03, -2.9237e-03,  1.0271e-02,\n",
              "                      -7.7304e-03, -4.1743e-03, -1.7495e-03, -1.4066e-03,  4.4056e-03,\n",
              "                      -4.7015e-03,  8.1093e-03,  6.1884e-03,  1.2428e-03, -1.0627e-02,\n",
              "                      -5.4262e-03,  4.7264e-03,  8.7553e-03, -8.5294e-03,  5.9623e-03,\n",
              "                       7.8004e-03,  1.9428e-03, -2.1482e-03,  5.3388e-03,  2.6375e-03,\n",
              "                       8.0965e-03, -6.9324e-03, -2.7849e-03,  9.5090e-03, -7.4031e-03,\n",
              "                      -1.4816e-03,  3.4682e-03,  4.8954e-03,  3.0290e-03,  6.4332e-03,\n",
              "                       8.5628e-03, -6.2412e-03, -6.2427e-03, -1.0491e-02, -1.2548e-03,\n",
              "                       4.9899e-03, -6.3005e-03, -1.4020e-03,  1.0858e-02, -1.2118e-04,\n",
              "                      -9.3757e-03, -2.1066e-03,  8.6068e-03,  2.3636e-03,  7.3576e-04,\n",
              "                       4.2008e-04, -3.5529e-03, -9.5784e-03,  6.4755e-03, -5.2765e-03,\n",
              "                       1.6515e-03,  4.3855e-03,  1.1095e-02,  6.5961e-04,  6.3536e-03,\n",
              "                       1.0594e-02, -2.8622e-03,  5.2589e-03, -8.9128e-03, -4.3965e-03,\n",
              "                      -9.1743e-03,  6.5193e-03,  1.2833e-03,  7.4552e-03,  6.5287e-03,\n",
              "                      -2.3015e-03, -7.4757e-03, -1.8751e-03,  8.9540e-04, -4.1401e-03,\n",
              "                      -1.0504e-02,  6.1412e-03, -6.0682e-03, -5.5238e-03, -6.2322e-03,\n",
              "                      -1.0888e-02, -6.8011e-03, -7.4954e-04,  1.0434e-02,  2.5199e-04,\n",
              "                       8.8263e-03, -6.6453e-03,  6.0315e-04,  6.2153e-03,  7.1852e-04,\n",
              "                       6.7602e-03,  7.7921e-04, -8.2381e-03,  6.1664e-03, -4.9274e-03,\n",
              "                       1.0083e-02, -3.7515e-03, -5.3999e-03,  1.7673e-03,  3.3829e-03,\n",
              "                      -4.6991e-03, -1.0612e-02, -4.5751e-03,  6.7258e-03,  1.0926e-02,\n",
              "                       4.2413e-03, -8.7296e-03, -7.1158e-03,  1.6416e-04, -6.7484e-03,\n",
              "                      -1.1067e-02, -9.5703e-03, -9.6634e-04,  2.6436e-03,  4.6477e-03,\n",
              "                       1.4762e-04, -8.5238e-03, -1.0111e-02, -4.1608e-03, -9.3893e-03,\n",
              "                       7.6517e-03,  7.3424e-03, -6.1199e-03, -9.2938e-03,  1.0248e-04,\n",
              "                      -7.2937e-03, -9.3347e-03, -6.1652e-03, -9.3467e-03, -6.8385e-03,\n",
              "                       7.2251e-03,  1.1406e-02,  1.4511e-03,  5.4641e-03, -4.0882e-03,\n",
              "                      -6.7763e-03,  9.7344e-03,  7.3371e-03,  6.2235e-03, -3.9187e-04,\n",
              "                       3.5312e-04, -6.8221e-03,  7.1864e-03, -6.6620e-03, -8.3177e-03,\n",
              "                      -1.1223e-02, -7.4192e-03, -2.1976e-03, -9.1809e-03,  2.2435e-03,\n",
              "                       8.3823e-03, -6.6069e-03, -5.4510e-04,  2.6543e-03, -5.9831e-03,\n",
              "                       9.8844e-03,  9.3988e-03, -5.1382e-03,  2.1754e-03,  1.0241e-02,\n",
              "                      -5.5372e-03, -1.3393e-03, -3.2160e-04,  7.5185e-03,  4.7076e-03,\n",
              "                       1.0491e-02, -5.9786e-03,  1.0094e-02, -1.0791e-02,  1.0006e-02,\n",
              "                      -9.6214e-03,  1.5247e-04,  1.1201e-02,  1.7426e-03,  9.8770e-03,\n",
              "                       9.6311e-04,  3.0794e-03,  1.1004e-02, -6.6093e-03, -7.5725e-03,\n",
              "                      -1.0369e-02, -3.3307e-03, -1.9647e-03,  1.3292e-04, -1.1404e-03,\n",
              "                       7.4856e-05,  7.6100e-03,  5.5667e-03, -1.0848e-02,  4.5266e-03,\n",
              "                      -4.3373e-03,  8.5709e-03, -1.0485e-03, -9.3030e-03, -2.3824e-03,\n",
              "                       1.1134e-02, -9.0179e-03,  1.1694e-03,  4.1906e-03, -1.6135e-03,\n",
              "                      -7.1669e-03,  1.7722e-03,  8.1950e-03,  2.3361e-03,  8.1501e-03,\n",
              "                      -8.4313e-03, -6.5814e-03,  6.1468e-03,  5.5813e-04,  2.8116e-03,\n",
              "                      -5.2775e-03, -1.0363e-02,  2.3039e-03,  7.0469e-03, -1.7435e-03,\n",
              "                      -9.8025e-03, -6.3391e-03, -9.9997e-03, -4.4347e-03,  4.0251e-03,\n",
              "                      -5.3623e-03,  5.1412e-03, -8.4482e-03,  1.4046e-03,  8.5295e-04,\n",
              "                       9.0008e-04, -8.7101e-03, -7.9393e-03,  6.5670e-03, -4.7608e-03,\n",
              "                       8.5334e-03,  1.3097e-03,  2.0429e-03,  7.2600e-03,  1.0100e-02,\n",
              "                       1.0769e-02,  2.9041e-03, -5.3639e-03, -1.6777e-03, -9.8035e-03,\n",
              "                      -1.0490e-02, -7.6399e-03, -8.7103e-03, -2.7354e-03, -6.7081e-03,\n",
              "                       1.1082e-02,  1.0545e-02, -5.4910e-04,  6.0460e-03,  8.5033e-03,\n",
              "                      -6.7354e-03,  1.0130e-02,  6.9613e-03,  2.2989e-03, -6.4135e-03,\n",
              "                       1.6494e-03, -1.2520e-03, -2.2873e-03,  9.5768e-03, -8.5952e-03,\n",
              "                       8.2311e-03, -5.2000e-03, -5.4741e-04,  3.2856e-03, -7.4782e-03,\n",
              "                      -8.3484e-03, -2.1032e-03,  8.6983e-04,  1.4647e-03, -6.4913e-03,\n",
              "                       5.6879e-03, -5.5387e-03,  3.6553e-03, -4.3758e-03, -6.3579e-03,\n",
              "                       4.3048e-03, -1.1219e-02,  1.1355e-02, -7.0139e-03,  7.6574e-03,\n",
              "                       7.8735e-03, -1.6303e-03, -6.2094e-04,  1.1847e-03,  6.6296e-03,\n",
              "                      -1.0286e-02, -5.6578e-03, -6.9988e-03, -1.0197e-03, -1.0303e-02,\n",
              "                      -8.3993e-03, -1.0496e-02, -1.8211e-03, -9.8253e-03, -8.1853e-03,\n",
              "                      -4.5638e-03, -8.4423e-03,  1.9628e-03, -9.1642e-03, -8.4392e-03,\n",
              "                       6.2530e-03,  5.5504e-03, -9.5431e-03, -6.0176e-03,  7.5461e-03,\n",
              "                      -3.5857e-03, -7.1382e-03])),\n",
              "             ('classifier.4.weight',\n",
              "              tensor([[ 0.0381, -0.0232,  0.0137,  ...,  0.0433,  0.0169, -0.0027],\n",
              "                      [ 0.0245, -0.0383,  0.0258,  ..., -0.0322,  0.0236,  0.0058],\n",
              "                      [ 0.0252, -0.0238,  0.0146,  ...,  0.0077,  0.0314,  0.0160],\n",
              "                      ...,\n",
              "                      [-0.0160,  0.0067,  0.0124,  ...,  0.0265,  0.0245, -0.0111],\n",
              "                      [ 0.0435,  0.0364,  0.0210,  ...,  0.0021,  0.0092, -0.0131],\n",
              "                      [ 0.0103,  0.0420, -0.0171,  ...,  0.0183, -0.0141,  0.0054]])),\n",
              "             ('classifier.4.bias',\n",
              "              tensor([-0.0206,  0.0412,  0.0124, -0.0388, -0.0276,  0.0396,  0.0428, -0.0212,\n",
              "                      -0.0188, -0.0254]))])"
            ]
          },
          "execution_count": 16,
          "metadata": {},
          "output_type": "execute_result"
        }
      ],
      "source": [
        "model.state_dict()"
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "wHD02aNt4pNv"
      },
      "source": [
        "# 设置交叉熵损失函数，SGD优化器"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 17,
      "metadata": {
        "ExecuteTime": {
          "end_time": "2025-06-26T01:43:40.023837Z",
          "start_time": "2025-06-26T01:43:40.019952Z"
        },
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "J1dvP3ES4pNv",
        "outputId": "b3bc7688-31a1-4fee-8a45-dca723eafa16"
      },
      "outputs": [
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "损失函数: CrossEntropyLoss()\n"
          ]
        }
      ],
      "source": [
        "model = InceptionNetV1()\n",
        "# 定义损失函数和优化器\n",
        "loss_fn = nn.CrossEntropyLoss()  # 交叉熵损失函数，适用于多分类问题，里边会做softmax，还有会把0-9标签转换成one-hot编码\n",
        "\n",
        "print(\"损失函数:\", loss_fn)\n",
        "\n"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 18,
      "metadata": {
        "ExecuteTime": {
          "end_time": "2025-06-26T01:43:40.035848Z",
          "start_time": "2025-06-26T01:43:40.032419Z"
        },
        "id": "qUeLZMIE4pNv"
      },
      "outputs": [],
      "source": [
        "model = InceptionNetV1()\n",
        "\n",
        "optimizer = torch.optim.SGD(model.parameters(), lr=0.001, momentum=0.9)  # SGD优化器，学习率为0.01，动量为0.9"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 19,
      "metadata": {
        "ExecuteTime": {
          "end_time": "2025-06-26T01:45:37.732814Z",
          "start_time": "2025-06-26T01:43:40.035848Z"
        },
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 123,
          "referenced_widgets": [
            "be06c2b352c14f5c9aee8a16d5b11e24",
            "32813af8951f4c2b8dd4171a5937c5a9",
            "9cb464da4e09479292889a917a7f436a",
            "d36666c817b841f99576af80feb4a7ee",
            "12492b59648c43e4bdbe100ddb3a3702",
            "1ee3d0c200c64a17a46ee92c07e0e1b7",
            "7ab573b1c9fd48a4bb67890514744bb0",
            "8a284f1425f343c7b02cf5e060519df4",
            "c97b171f55714254a3bd3d0d73882031",
            "6e4a4ef865da440d92b72bcf10f8b877",
            "e82290e7a17643cdb2eaf7df030265f1"
          ]
        },
        "id": "qI1L-GG94pNv",
        "outputId": "eec5f542-8a40-4900-9562-4aa58ba68cb9"
      },
      "outputs": [
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "使用设备: cpu\n",
            "训练开始，共35200步\n"
          ]
        },
        {
          "data": {
            "application/vnd.jupyter.widget-view+json": {
              "model_id": "7a9685f32e8b4d1b8f1ba2bb156e96e3",
              "version_major": 2,
              "version_minor": 0
            },
            "text/plain": [
              "  0%|          | 0/35200 [00:00<?, ?it/s]"
            ]
          },
          "metadata": {},
          "output_type": "display_data"
        },
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([8, 256, 16, 16])\n",
            "inception2: torch.Size([8, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n",
            "inception1: torch.Size([64, 256, 16, 16])\n",
            "inception2: torch.Size([64, 480, 8, 8])\n"
          ]
        },
        {
          "ename": "KeyboardInterrupt",
          "evalue": "",
          "output_type": "error",
          "traceback": [
            "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
            "\u001b[1;31mKeyboardInterrupt\u001b[0m                         Traceback (most recent call last)",
            "Cell \u001b[1;32mIn[19], line 8\u001b[0m\n\u001b[0;32m      4\u001b[0m early_stopping\u001b[38;5;241m=\u001b[39mEarlyStopping(patience\u001b[38;5;241m=\u001b[39m\u001b[38;5;241m5\u001b[39m, delta\u001b[38;5;241m=\u001b[39m\u001b[38;5;241m0.001\u001b[39m)\n\u001b[0;32m      5\u001b[0m model_saver\u001b[38;5;241m=\u001b[39mModelSaver(save_dir\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mmodel_weights\u001b[39m\u001b[38;5;124m'\u001b[39m, save_best_only\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mTrue\u001b[39;00m)\n\u001b[1;32m----> 8\u001b[0m model, history \u001b[38;5;241m=\u001b[39m \u001b[43mtrain_classification_model\u001b[49m\u001b[43m(\u001b[49m\u001b[43mmodel\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mtrain_loader\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mval_loader\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mloss_fn\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43moptimizer\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mdevice\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mnum_epochs\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;241;43m50\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mearly_stopping\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mearly_stopping\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mmodel_saver\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mmodel_saver\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mtensorboard_logger\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mNone\u001b[39;49;00m\u001b[43m)\u001b[49m\n",
            "File \u001b[1;32md:\\BaiduSyncdisk\\pytorch\\chapter_6_AI\\wangdao_deeplearning_train.py:179\u001b[0m, in \u001b[0;36mtrain_classification_model\u001b[1;34m(model, train_loader, val_loader, criterion, optimizer, device, num_epochs, tensorboard_logger, model_saver, early_stopping, eval_step)\u001b[0m\n\u001b[0;32m    176\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m tqdm(total\u001b[38;5;241m=\u001b[39mnum_epochs \u001b[38;5;241m*\u001b[39m \u001b[38;5;28mlen\u001b[39m(train_loader)) \u001b[38;5;28;01mas\u001b[39;00m pbar:\n\u001b[0;32m    177\u001b[0m     \u001b[38;5;28;01mfor\u001b[39;00m epoch_id \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mrange\u001b[39m(num_epochs):\n\u001b[0;32m    178\u001b[0m         \u001b[38;5;66;03m# 训练\u001b[39;00m\n\u001b[1;32m--> 179\u001b[0m \u001b[43m        \u001b[49m\u001b[38;5;28;43;01mfor\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mdatas\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mlabels\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;129;43;01min\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mtrain_loader\u001b[49m\u001b[43m:\u001b[49m\n\u001b[0;32m    180\u001b[0m \u001b[43m            \u001b[49m\u001b[43mdatas\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m \u001b[49m\u001b[43mdatas\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mto\u001b[49m\u001b[43m(\u001b[49m\u001b[43mdevice\u001b[49m\u001b[43m)\u001b[49m\u001b[43m  \u001b[49m\u001b[38;5;66;43;03m# 数据放到device上\u001b[39;49;00m\n\u001b[0;32m    181\u001b[0m \u001b[43m            \u001b[49m\u001b[43mlabels\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m \u001b[49m\u001b[43mlabels\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mto\u001b[49m\u001b[43m(\u001b[49m\u001b[43mdevice\u001b[49m\u001b[43m)\u001b[49m\u001b[43m  \u001b[49m\u001b[38;5;66;43;03m# 标签放到device上\u001b[39;49;00m\n",
            "File \u001b[1;32m~\\AppData\\Roaming\\Python\\Python312\\site-packages\\torch\\utils\\data\\dataloader.py:701\u001b[0m, in \u001b[0;36m_BaseDataLoaderIter.__next__\u001b[1;34m(self)\u001b[0m\n\u001b[0;32m    698\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_sampler_iter \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[0;32m    699\u001b[0m     \u001b[38;5;66;03m# TODO(https://github.com/pytorch/pytorch/issues/76750)\u001b[39;00m\n\u001b[0;32m    700\u001b[0m     \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_reset()  \u001b[38;5;66;03m# type: ignore[call-arg]\u001b[39;00m\n\u001b[1;32m--> 701\u001b[0m data \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_next_data\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m    702\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_num_yielded \u001b[38;5;241m+\u001b[39m\u001b[38;5;241m=\u001b[39m \u001b[38;5;241m1\u001b[39m\n\u001b[0;32m    703\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m (\n\u001b[0;32m    704\u001b[0m     \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_dataset_kind \u001b[38;5;241m==\u001b[39m _DatasetKind\u001b[38;5;241m.\u001b[39mIterable\n\u001b[0;32m    705\u001b[0m     \u001b[38;5;129;01mand\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_IterableDataset_len_called \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[0;32m    706\u001b[0m     \u001b[38;5;129;01mand\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_num_yielded \u001b[38;5;241m>\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_IterableDataset_len_called\n\u001b[0;32m    707\u001b[0m ):\n",
            "File \u001b[1;32m~\\AppData\\Roaming\\Python\\Python312\\site-packages\\torch\\utils\\data\\dataloader.py:757\u001b[0m, in \u001b[0;36m_SingleProcessDataLoaderIter._next_data\u001b[1;34m(self)\u001b[0m\n\u001b[0;32m    755\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m_next_data\u001b[39m(\u001b[38;5;28mself\u001b[39m):\n\u001b[0;32m    756\u001b[0m     index \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_next_index()  \u001b[38;5;66;03m# may raise StopIteration\u001b[39;00m\n\u001b[1;32m--> 757\u001b[0m     data \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_dataset_fetcher\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mfetch\u001b[49m\u001b[43m(\u001b[49m\u001b[43mindex\u001b[49m\u001b[43m)\u001b[49m  \u001b[38;5;66;03m# may raise StopIteration\u001b[39;00m\n\u001b[0;32m    758\u001b[0m     \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_pin_memory:\n\u001b[0;32m    759\u001b[0m         data \u001b[38;5;241m=\u001b[39m _utils\u001b[38;5;241m.\u001b[39mpin_memory\u001b[38;5;241m.\u001b[39mpin_memory(data, \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_pin_memory_device)\n",
            "File \u001b[1;32m~\\AppData\\Roaming\\Python\\Python312\\site-packages\\torch\\utils\\data\\_utils\\fetch.py:50\u001b[0m, in \u001b[0;36m_MapDatasetFetcher.fetch\u001b[1;34m(self, possibly_batched_index)\u001b[0m\n\u001b[0;32m     48\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mauto_collation:\n\u001b[0;32m     49\u001b[0m     \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mhasattr\u001b[39m(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdataset, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m__getitems__\u001b[39m\u001b[38;5;124m\"\u001b[39m) \u001b[38;5;129;01mand\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdataset\u001b[38;5;241m.\u001b[39m__getitems__:\n\u001b[1;32m---> 50\u001b[0m         data \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mdataset\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m__getitems__\u001b[49m\u001b[43m(\u001b[49m\u001b[43mpossibly_batched_index\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m     51\u001b[0m     \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m     52\u001b[0m         data \u001b[38;5;241m=\u001b[39m [\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdataset[idx] \u001b[38;5;28;01mfor\u001b[39;00m idx \u001b[38;5;129;01min\u001b[39;00m possibly_batched_index]\n",
            "File \u001b[1;32m~\\AppData\\Roaming\\Python\\Python312\\site-packages\\torch\\utils\\data\\dataset.py:420\u001b[0m, in \u001b[0;36mSubset.__getitems__\u001b[1;34m(self, indices)\u001b[0m\n\u001b[0;32m    418\u001b[0m     \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdataset\u001b[38;5;241m.\u001b[39m__getitems__([\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mindices[idx] \u001b[38;5;28;01mfor\u001b[39;00m idx \u001b[38;5;129;01min\u001b[39;00m indices])  \u001b[38;5;66;03m# type: ignore[attr-defined]\u001b[39;00m\n\u001b[0;32m    419\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m--> 420\u001b[0m     \u001b[38;5;28;01mreturn\u001b[39;00m [\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mdataset\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mindices\u001b[49m\u001b[43m[\u001b[49m\u001b[43midx\u001b[49m\u001b[43m]\u001b[49m\u001b[43m]\u001b[49m \u001b[38;5;28;01mfor\u001b[39;00m idx \u001b[38;5;129;01min\u001b[39;00m indices]\n",
            "Cell \u001b[1;32mIn[8], line 28\u001b[0m, in \u001b[0;36mCIFAR10Dataset.__getitem__\u001b[1;34m(self, idx)\u001b[0m\n\u001b[0;32m     26\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m__getitem__\u001b[39m(\u001b[38;5;28mself\u001b[39m, idx):\n\u001b[0;32m     27\u001b[0m     img_path \u001b[38;5;241m=\u001b[39m os\u001b[38;5;241m.\u001b[39mpath\u001b[38;5;241m.\u001b[39mjoin(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mimg_dir, \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mimg_names[idx] \u001b[38;5;241m+\u001b[39m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124m.png\u001b[39m\u001b[38;5;124m'\u001b[39m) \u001b[38;5;66;03m#图片路径\u001b[39;00m\n\u001b[1;32m---> 28\u001b[0m     image \u001b[38;5;241m=\u001b[39m \u001b[43mImage\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mopen\u001b[49m\u001b[43m(\u001b[49m\u001b[43mimg_path\u001b[49m\u001b[43m)\u001b[49m \u001b[38;5;66;03m#打开图片\u001b[39;00m\n\u001b[0;32m     29\u001b[0m     label \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mlabels[idx]\n\u001b[0;32m     31\u001b[0m     \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mtransform:\n",
            "File \u001b[1;32mc:\\Program Files\\Python312\\Lib\\site-packages\\PIL\\Image.py:3465\u001b[0m, in \u001b[0;36mopen\u001b[1;34m(fp, mode, formats)\u001b[0m\n\u001b[0;32m   3462\u001b[0m     filename \u001b[38;5;241m=\u001b[39m os\u001b[38;5;241m.\u001b[39mfspath(fp)\n\u001b[0;32m   3464\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m filename:\n\u001b[1;32m-> 3465\u001b[0m     fp \u001b[38;5;241m=\u001b[39m \u001b[43mbuiltins\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mopen\u001b[49m\u001b[43m(\u001b[49m\u001b[43mfilename\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mrb\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m)\u001b[49m\n\u001b[0;32m   3466\u001b[0m     exclusive_fp \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mTrue\u001b[39;00m\n\u001b[0;32m   3467\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n",
            "\u001b[1;31mKeyboardInterrupt\u001b[0m: "
          ]
        }
      ],
      "source": [
        "device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\")\n",
        "print(f\"使用设备: {device}\")\n",
        "model = model.to(device) #将模型移动到GPU\n",
        "early_stopping=EarlyStopping(patience=5, delta=0.001)\n",
        "model_saver=ModelSaver(save_dir='model_weights', save_best_only=True)\n",
        "\n",
        "\n",
        "model, history = train_classification_model(model, train_loader, val_loader, loss_fn, optimizer, device, num_epochs=50, early_stopping=early_stopping, model_saver=model_saver, tensorboard_logger=None)\n",
        "\n"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 26,
      "metadata": {
        "ExecuteTime": {
          "end_time": "2025-06-26T01:45:37.737721Z",
          "start_time": "2025-06-26T01:45:37.732814Z"
        },
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "pJWn5FRH4pNv",
        "outputId": "bef1e508-d9ed-4843-9f49-c57b2450496d"
      },
      "outputs": [
        {
          "data": {
            "text/plain": [
              "[{'loss': 0.003491724608466029, 'acc': 100.0, 'step': 10401},\n",
              " {'loss': 0.003816040698438883, 'acc': 100.0, 'step': 10402},\n",
              " {'loss': 0.0022664566058665514, 'acc': 100.0, 'step': 10403},\n",
              " {'loss': 0.0018923301249742508, 'acc': 100.0, 'step': 10404},\n",
              " {'loss': 0.0029820818454027176, 'acc': 100.0, 'step': 10405},\n",
              " {'loss': 0.004303900524973869, 'acc': 100.0, 'step': 10406},\n",
              " {'loss': 0.0022796019911766052, 'acc': 100.0, 'step': 10407},\n",
              " {'loss': 0.0030815221834927797, 'acc': 100.0, 'step': 10408},\n",
              " {'loss': 0.003537941724061966, 'acc': 100.0, 'step': 10409},\n",
              " {'loss': 0.002074974589049816, 'acc': 100.0, 'step': 10410},\n",
              " {'loss': 0.0021730950102210045, 'acc': 100.0, 'step': 10411},\n",
              " {'loss': 0.0026771053671836853, 'acc': 100.0, 'step': 10412},\n",
              " {'loss': 0.0021046821493655443, 'acc': 100.0, 'step': 10413},\n",
              " {'loss': 0.0016319683054462075, 'acc': 100.0, 'step': 10414},\n",
              " {'loss': 0.0023830805439502, 'acc': 100.0, 'step': 10415},\n",
              " {'loss': 0.0014363321242854, 'acc': 100.0, 'step': 10416},\n",
              " {'loss': 0.002298145554959774, 'acc': 100.0, 'step': 10417},\n",
              " {'loss': 0.002464946359395981, 'acc': 100.0, 'step': 10418},\n",
              " {'loss': 0.0033617469016462564, 'acc': 100.0, 'step': 10419},\n",
              " {'loss': 0.0027641693595796824, 'acc': 100.0, 'step': 10420},\n",
              " {'loss': 0.003921723924577236, 'acc': 100.0, 'step': 10421},\n",
              " {'loss': 0.0020040380768477917, 'acc': 100.0, 'step': 10422},\n",
              " {'loss': 0.0023411663714796305, 'acc': 100.0, 'step': 10423},\n",
              " {'loss': 0.0020229367073625326, 'acc': 100.0, 'step': 10424},\n",
              " {'loss': 0.0016176165081560612, 'acc': 100.0, 'step': 10425},\n",
              " {'loss': 0.0014053364284336567, 'acc': 100.0, 'step': 10426},\n",
              " {'loss': 0.0022467602975666523, 'acc': 100.0, 'step': 10427},\n",
              " {'loss': 0.0028117455076426268, 'acc': 100.0, 'step': 10428},\n",
              " {'loss': 0.0017628881614655256, 'acc': 100.0, 'step': 10429},\n",
              " {'loss': 0.0020254808478057384, 'acc': 100.0, 'step': 10430},\n",
              " {'loss': 0.002358924364671111, 'acc': 100.0, 'step': 10431},\n",
              " {'loss': 0.0019077338511124253, 'acc': 100.0, 'step': 10432},\n",
              " {'loss': 0.0017173555679619312, 'acc': 100.0, 'step': 10433},\n",
              " {'loss': 0.0028551078867167234, 'acc': 100.0, 'step': 10434},\n",
              " {'loss': 0.001715813996270299, 'acc': 100.0, 'step': 10435},\n",
              " {'loss': 0.0024141171015799046, 'acc': 100.0, 'step': 10436},\n",
              " {'loss': 0.0017758128233253956, 'acc': 100.0, 'step': 10437},\n",
              " {'loss': 0.0019963092636317015, 'acc': 100.0, 'step': 10438},\n",
              " {'loss': 0.002457571681588888, 'acc': 100.0, 'step': 10439},\n",
              " {'loss': 0.0023125477600842714, 'acc': 100.0, 'step': 10440},\n",
              " {'loss': 0.0020277928560972214, 'acc': 100.0, 'step': 10441},\n",
              " {'loss': 0.0026837456971406937, 'acc': 100.0, 'step': 10442},\n",
              " {'loss': 0.0026223028544336557, 'acc': 100.0, 'step': 10443},\n",
              " {'loss': 0.0015754913911223412, 'acc': 100.0, 'step': 10444},\n",
              " {'loss': 0.0015004329616203904, 'acc': 100.0, 'step': 10445},\n",
              " {'loss': 0.0014760923804715276, 'acc': 100.0, 'step': 10446},\n",
              " {'loss': 0.0030167759396135807, 'acc': 100.0, 'step': 10447},\n",
              " {'loss': 0.0025633256882429123, 'acc': 100.0, 'step': 10448},\n",
              " {'loss': 0.002551351673901081, 'acc': 100.0, 'step': 10449},\n",
              " {'loss': 0.0030058836564421654, 'acc': 100.0, 'step': 10450},\n",
              " {'loss': 0.0035295237321406603, 'acc': 100.0, 'step': 10451},\n",
              " {'loss': 0.0018499366706237197, 'acc': 100.0, 'step': 10452},\n",
              " {'loss': 0.0026926135178655386, 'acc': 100.0, 'step': 10453},\n",
              " {'loss': 0.001155345467850566, 'acc': 100.0, 'step': 10454},\n",
              " {'loss': 0.002534798113629222, 'acc': 100.0, 'step': 10455},\n",
              " {'loss': 0.00293498276732862, 'acc': 100.0, 'step': 10456},\n",
              " {'loss': 0.0024838484823703766, 'acc': 100.0, 'step': 10457},\n",
              " {'loss': 0.002520022913813591, 'acc': 100.0, 'step': 10458},\n",
              " {'loss': 0.0014982324792072177, 'acc': 100.0, 'step': 10459},\n",
              " {'loss': 0.0018371138721704483, 'acc': 100.0, 'step': 10460},\n",
              " {'loss': 0.0021160792093724012, 'acc': 100.0, 'step': 10461},\n",
              " {'loss': 0.0019232832128182054, 'acc': 100.0, 'step': 10462},\n",
              " {'loss': 0.0023400878999382257, 'acc': 100.0, 'step': 10463},\n",
              " {'loss': 0.002586106536909938, 'acc': 100.0, 'step': 10464},\n",
              " {'loss': 0.0019052830757573247, 'acc': 100.0, 'step': 10465},\n",
              " {'loss': 0.0027190411929041147, 'acc': 100.0, 'step': 10466},\n",
              " {'loss': 0.0024313766043633223, 'acc': 100.0, 'step': 10467},\n",
              " {'loss': 0.0016618946101516485, 'acc': 100.0, 'step': 10468},\n",
              " {'loss': 0.0013367269420996308, 'acc': 100.0, 'step': 10469},\n",
              " {'loss': 0.003011970315128565, 'acc': 100.0, 'step': 10470},\n",
              " {'loss': 0.0018634084844961762, 'acc': 100.0, 'step': 10471},\n",
              " {'loss': 0.003616468980908394, 'acc': 100.0, 'step': 10472},\n",
              " {'loss': 0.0023980180267244577, 'acc': 100.0, 'step': 10473},\n",
              " {'loss': 0.0022974475286900997, 'acc': 100.0, 'step': 10474},\n",
              " {'loss': 0.0017363885417580605, 'acc': 100.0, 'step': 10475},\n",
              " {'loss': 0.002192792249843478, 'acc': 100.0, 'step': 10476},\n",
              " {'loss': 0.0013506278628483415, 'acc': 100.0, 'step': 10477},\n",
              " {'loss': 0.0021264718379825354, 'acc': 100.0, 'step': 10478},\n",
              " {'loss': 0.0017523939022794366, 'acc': 100.0, 'step': 10479},\n",
              " {'loss': 0.0017105976585298777, 'acc': 100.0, 'step': 10480},\n",
              " {'loss': 0.0025257074739784002, 'acc': 100.0, 'step': 10481},\n",
              " {'loss': 0.002196722663938999, 'acc': 100.0, 'step': 10482},\n",
              " {'loss': 0.0029061215464025736, 'acc': 100.0, 'step': 10483},\n",
              " {'loss': 0.0017808187985792756, 'acc': 100.0, 'step': 10484},\n",
              " {'loss': 0.0015815834049135447, 'acc': 100.0, 'step': 10485},\n",
              " {'loss': 0.00326604675501585, 'acc': 100.0, 'step': 10486},\n",
              " {'loss': 0.002798824803903699, 'acc': 100.0, 'step': 10487},\n",
              " {'loss': 0.0021325622219592333, 'acc': 100.0, 'step': 10488},\n",
              " {'loss': 0.006164507009088993, 'acc': 100.0, 'step': 10489},\n",
              " {'loss': 0.0018026498146355152, 'acc': 100.0, 'step': 10490},\n",
              " {'loss': 0.0016174401389434934, 'acc': 100.0, 'step': 10491},\n",
              " {'loss': 0.0020800866186618805, 'acc': 100.0, 'step': 10492},\n",
              " {'loss': 0.002972046844661236, 'acc': 100.0, 'step': 10493},\n",
              " {'loss': 0.0030487366020679474, 'acc': 100.0, 'step': 10494},\n",
              " {'loss': 0.001615959801711142, 'acc': 100.0, 'step': 10495},\n",
              " {'loss': 0.0015539713203907013, 'acc': 100.0, 'step': 10496},\n",
              " {'loss': 0.002975056879222393, 'acc': 100.0, 'step': 10497},\n",
              " {'loss': 0.0023341104388237, 'acc': 100.0, 'step': 10498},\n",
              " {'loss': 0.0017738008173182607, 'acc': 100.0, 'step': 10499}]"
            ]
          },
          "execution_count": 26,
          "metadata": {},
          "output_type": "execute_result"
        }
      ],
      "source": [
        "history['train'][-100:-1]"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 27,
      "metadata": {
        "ExecuteTime": {
          "end_time": "2025-06-26T01:45:37.741226Z",
          "start_time": "2025-06-26T01:45:37.737721Z"
        },
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "pMjJdQ2l4pNw",
        "outputId": "61763ecc-04ee-4e3d-f4bf-5e8effa8a79c"
      },
      "outputs": [
        {
          "data": {
            "text/plain": [
              "[{'loss': 2.3045929691314697, 'acc': 9.12, 'step': 0},\n",
              " {'loss': 1.238996053314209, 'acc': 55.2, 'step': 500},\n",
              " {'loss': 0.8884034400939942, 'acc': 68.5, 'step': 1000},\n",
              " {'loss': 0.798821385192871, 'acc': 72.0, 'step': 1500},\n",
              " {'loss': 0.7209422531604767, 'acc': 74.56, 'step': 2000},\n",
              " {'loss': 0.716133192205429, 'acc': 76.2, 'step': 2500},\n",
              " {'loss': 0.7522935387611389, 'acc': 74.5, 'step': 3000},\n",
              " {'loss': 0.7045076326847076, 'acc': 76.7, 'step': 3500},\n",
              " {'loss': 0.7004623531341553, 'acc': 77.4, 'step': 4000},\n",
              " {'loss': 0.6616050040721894, 'acc': 79.54, 'step': 4500},\n",
              " {'loss': 0.655171839094162, 'acc': 80.06, 'step': 5000},\n",
              " {'loss': 0.6894081469178199, 'acc': 80.16, 'step': 5500},\n",
              " {'loss': 0.6748722618579864, 'acc': 81.16, 'step': 6000},\n",
              " {'loss': 0.6891608224391937, 'acc': 80.96, 'step': 6500},\n",
              " {'loss': 0.6997112890720367, 'acc': 80.96, 'step': 7000},\n",
              " {'loss': 0.7185570673942566, 'acc': 80.74, 'step': 7500},\n",
              " {'loss': 0.7153246128082276, 'acc': 81.36, 'step': 8000},\n",
              " {'loss': 0.7157424350500107, 'acc': 81.34, 'step': 8500},\n",
              " {'loss': 0.7296340623855591, 'acc': 81.0, 'step': 9000},\n",
              " {'loss': 0.7395104514598847, 'acc': 81.04, 'step': 9500},\n",
              " {'loss': 0.744106754732132, 'acc': 81.06, 'step': 10000}]"
            ]
          },
          "execution_count": 27,
          "metadata": {},
          "output_type": "execute_result"
        }
      ],
      "source": [
        "history['val'][-1000:-1]"
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "NcujMCRC4pNw"
      },
      "source": [
        "# 绘制损失曲线和准确率曲线"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 28,
      "metadata": {
        "ExecuteTime": {
          "end_time": "2025-06-26T01:45:37.816716Z",
          "start_time": "2025-06-26T01:45:37.744941Z"
        },
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 466
        },
        "id": "3xZ57j-C4pNw",
        "outputId": "cc0820e6-c0fd-477f-b44c-9e430c58e7d0"
      },
      "outputs": [
        {
          "data": {
            "image/png": "iVBORw0KGgoAAAANSUhEUgAAAzoAAAHBCAYAAAChe85HAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjAsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvlHJYcgAAAAlwSFlzAAAPYQAAD2EBqD+naQAAiDhJREFUeJzs3XeUVPX9//HnzOzsbK+wBVh67wiIgJ0mKAG7Br9KYokJmBiiJuRnCZZgTOyNGGONBCtoFJUVRATpRQHp0mEX2GV7mdmZ+/vj7g4sdfvdnXk9zrln5t655T27986d93yazTAMAxERERERkQBitzoAERERERGRuqZER0REREREAo4SHRERERERCThKdEREREREJOAo0RERERERkYCjREdERERERAKOEh0REREREQk4SnRERERERCTgKNEREREREZGAE2J1AFXh8/k4cOAA0dHR2Gw2q8MREQkahmGQn59PixYtsNv121gF3ZdERKxT5XuTUQ0vvfSS0atXLyM6OtqIjo42zjvvPGPu3Lln3Oa9994zunTpYrhcLqNnz57GZ599Vp1DGoZhGHv37jUATZo0adJk0bR3795qf3YHMt2XNGnSpMn66Wz3pmqV6LRq1YrHH3+cTp06YRgGb775JuPGjWPt2rX06NHjpPW/++47brzxRqZPn84VV1zBzJkzGT9+PGvWrKFnz55VPm50dDQAe/fuJSYmpjohA+DxeJg3bx4jR47E6XRWe3uRM9H5JfXNynMsLy+PtLQ0/+ewmHRfksZO55jUJ6vPr6rem6qV6IwdO7bS/GOPPcbLL7/MsmXLTpnoPPvss1x22WXce++9ADzyyCOkp6fzwgsvMGPGjCoft6JaQExMTI1vKBEREcTExOhilzqn80vqW2M4x1Q9qzLdl6Sx0zkm9amxnF9nuzfVuI2O1+vl/fffp7CwkMGDB59ynaVLlzJlypRKy0aNGsWcOXPOuO/S0lJKS0v983l5eYD5R/V4PNWOtWKbmmwrcjY6v6S+WXmO6bwWEZGmqtqJzvr16xk8eDAlJSVERUUxe/Zsunfvfsp1MzIySE5OrrQsOTmZjIyMMx5j+vTpTJs27aTl8+bNIyIioroh+6Wnp9d4W5Gz0fkl9c2Kc6yoqKjBjykiIlIXqp3odOnShXXr1pGbm8sHH3zALbfcwjfffHPaZKcmpk6dWqkkqKIe3siRI2tcRSA9PZ0RI0ao+FbqnM4vqW9WnmMVJeoiIiJNTbUTndDQUDp27AhA//79WblyJc8++yz//Oc/T1o3JSWFzMzMSssyMzNJSUk54zFcLhcul+uk5U6ns1Y3+dpuL3ImwXh+GYZBWVkZXq/X6lACmtfrJSQkBK/XW+ddPDscDkJCQk5bzznYzum6dKbrw+PxEBISQklJia6fRuhs14WINA21HkfH5/NVak9zvMGDBzN//nzuvvtu/7L09PTTtukRkabD7XZz8OBBVW1qAIZhkJKSwt69e+vli1dERASpqamEhobW+b6D1dmuj/r+n0rt6boQafqqlehMnTqV0aNH07p1a/Lz85k5cyYLFy7kyy+/BODmm2+mZcuWTJ8+HYDf/e53XHTRRTz55JNcfvnlzJo1i1WrVvHKK6/U/TsRkQbj8/nYuXMnDoeDFi1aEBoaqi9r9cjn81FQUEBUVFSdlugYhoHb7ebw4cPs3LmTTp06aVDQOlCV66O+/qdSe7ouRAJHtRKdQ4cOcfPNN3Pw4EFiY2Pp3bs3X375JSNGjABgz549lT4MhgwZwsyZM7n//vv585//TKdOnZgzZ061xtARkcbH7Xbj8/lIS0urVQchUjU+nw+3201YWFidf+EKDw/H6XSye/du/zGkdqpyfdTn/1RqT9eFSGCoVqLz73//+4yvL1y48KRl1157Lddee221ghKRpkFf0AKD/o/1Q3/Xpk3/P5GmT1exiIiIiIgEHCU6IiI11LZtW5555pk62dfChQux2Wzk5OTUyf5ErFaX14eISE0o0RGRoHLxxRdX6gmyNlauXMkdd9xRJ/uSqlu0aBFjx46lRYsW2Gw25syZU+l1wzB48MEHSU1NJTw8nOHDh7Nt27ZK62RnZzNhwgRiYmKIi4vj1ltvpaCgoAHfReOk60NEAokSHRGR41SMfVIVzZs3V2cMFigsLKRPnz68+OKLp3z9iSee4LnnnmPGjBksX76cyMhIRo0aRUlJiX+dCRMmsHHjRtLT0/n0009ZtGiRvpRXga4PEWlKlOiISNCYOHEi33zzDc8++yw2mw2bzcYbb7yBzWbj888/p3///rhcLhYvXsyOHTsYN24cycnJREVFMXDgQL766qtK+zuxao7NZuPVV1/lyiuvJCIigk6dOvHJJ5/UON4PP/yQHj16EB4eTu/evXnqqacqvf7SSy/RqVMnwsLCSE5O5pprrvG/9sEHH9CrVy/Cw8NJTExk+PDhFBYW1jiWxmT06NE8+uijXHnllSe9ZhgGzzzzDPfffz/jxo2jd+/evPXWWxw4cMBf8rNp0ya++OILXn31VQYNGsT555/P888/z6xZszhw4EADv5vGozFfH16vl1tvvZV27doRHh5Oly5dePbZZ09a77XXXqNHjx64XC5SU1OZPHmy/7WcnBx+9atfkZycTFhYGD179uTTTz+t2R9LRJqEWg8Y2th9u+0wf/1sE5FldsZYHYxIgDIMg2KPNaO7hzsdVR7D59lnn2Xr1q307NmThx9+GICNGzcC8Kc//Yl//OMftG/fnvj4ePbu3cuYMWN47LHHcLlcvPXWW4wdO5YtW7bQunXr0x5j2rRpPPHEE/z973/n+eefZ8KECezevZuEhIRqva/Vq1dz3XXX8Ze//IVrr72WBQsWcM8999CsWTMmTpzIqlWr+O1vf8vbb7/NkCFDyM7O5ttvvwXg4MGD3HjjjTzxxBNceeWV5Ofn8+2332IYRrViaIp27txJRkYGw4cP9y+LjY1l0KBBLF26lBtuuIGlS5cSFxfHgAED/OsMHz4cu93O8uXLT5lAlZaWVhocOy8vDwCPx4PH46m0rsfjwTAMfD4fPp8POPkaMQyDYrcXR6mn3segquo18vTTT7N161Z69OjBtGnTgMrXxxNPPFHp+rjssst45JFHcLlcvP3224wdO5ZNmzZVuj4q/g4Vpk2bxuOPP87f/vY3XnjhBSZMmMDOnTvPen2UlZXRsmVL3n33XRITE/nuu++48847SU5O5rrrrgPg5Zdf5p577mH69Olcdtll5Obm8t133/n/D6NHjyY/P5+33nqLDh068OOPP2Kz2SrFdzyfz4dhGHg8HhwOx1n/fo1NxXl54vl5JoZhsHZvLu+t3sfG/XkE/ieG1JRhGOQXOHhxx5Iaf4b1bBnD41fWbMiZqp7XAZ/olHkNNmXk0ypSgxmK1Jdij5fuD35pybF/fHgUEaFV+yiLjY0lNDSUiIgIUlJSANi8eTMADz/8sH9MMICEhAT69Onjn3/kkUeYPXs2n3zySaVfiU80ceJEbrzxRgD++te/8txzz7FixQouu+yyar2vp556imHDhvHAAw/g8/lISUlh586d/P3vf2fixIns2bOHyMhIrrjiCqKjo2nTpg39+vUDzESnrKyMq666ijZt2gDQq1evah2/qcrIyAAgOTm50vLk5GT/axkZGSQlJVV6PSQkhISEBP86J5o+fbr/y//x5s2bd1L1rJCQEFJSUigoKMDtdgNQ7PYy+KllNXtTtbR0ynmEh579i7rNZsNutxMSEuJ/TxXJ3R//+EcGDRrkX7ddu3a0a9fOP3/PPffw4Ycf8t577/mrAPp8PkpKSvxJIcANN9zA5Zdf7t/n888/z8KFCyslpqczZcoU//OxY8eyaNEi/vvf//qvrccee4xJkyYxceJEAFJSUujSpQt5eXksWLCAFStWsHz5cjp27AjAhRdeCFApvuO53W6Ki4tZtGhRlavrNUbp6elnXafQAyuP2FiaaSejWN+XpKpsHCyqeU0Bb3E+c+fuqdG2RUVFVVov4BOduAgnYF7EIiKnc/yv+wAFBQX85S9/4bPPPvMnDsXFxezZc+YP5d69e/ufR0ZGEhMTw6FDh6odz6ZNmxg3blylZUOGDOHZZ5/F6/UyYsQI2rRpQ/v27bnsssu47LLL/FWC+vTpw7Bhw+jVqxejRo1i5MiRXHPNNcTHx1c7DjFNnTq10hftvLw80tLSGDlyJDExMZXWLSkpYe/evURFRfkHmgxxW/dFOTomuso/BoSEhBAaGup/TxUJzwUXXFDpfRYUFDBt2jTmzp1b6fo4fPiwfz273U5YWFil7QYMGOCfj4mJISYmhoKCgpP+hqfy0ksv8frrr7Nnzx6Ki4txu9307dvXf40dPHiQ0aNHn3Jf27Zto1WrVpxzzjlV+juA+X8MDw/nwgsvbJIDhno8HtLT0xkxYgROp/Ok1w3DYPnOo7y7ah9f/piJx2uW34Q57YzumcKo7kmEOZteSZY0jLKyMtasXsM5/c8hJKRm6URsmJOeLc9+7Z/K6X6gOFHAJzoJkaEAFDbdH2NEGr1wp4MfHx5l2bHrQmRkZKX5e+65h/T0dP7xj3/QsWNHwsPDueaaa/y/0J/OiV8ozlQ1pjaio6NZs2YNCxcuZN68eTz44IP85S9/YeXKlcTFxZGens53333HvHnzeP755/l//+//sXz58kq/wgeiipK6zMxMUlNT/cszMzPp27evf50Tk8+ysjKys7P925/I5XLhcrlOWu50Ok/6n3u9Xn/pSMWgk5EuZ6VrxOfzkZ+XT3RMdL0PTFmd6p1wrGQHjg2aGR1dOc777rvvlNeHx+OptN7x+wLz73ji68cf53RmzZrFvffey5NPPsngwYOJjo7m73//O8uXL8dut/uv3+P/5serSNiq87e22+3YbLZT/o+bkhPjP5xfyger9/Huyj3syjr2q3j31BhuPDeNn/VtSWx4032/0jA8Hg+FOwwu6pJsyfVR1WMGfKITX57ouH02SjzeJv1hJdJY2Wy2Kv9ibLXQ0FC83rO3J1qyZAkTJ070t9coKChg165d9RzdMd26dWPJkiWVln333Xd07tzZ314gJCSE4cOHM3z4cB566CHi4uJYsGABV111FTabjaFDhzJ06FAefPBB2rRpw+zZsyuVSgSidu3akZKSwvz58/2JTV5eHsuXL+fXv/41AIMHDyYnJ4fVq1fTv39/ABYsWIDP56tUPasunXiN+Hw+ykIdRISG1HuiUx2N9fpYsmQJQ4YM4Te/+Y1/2Y4dO/zPo6Ojadu2LfPnz+eSSy45afvevXuzb98+tm7dSufOnestzsbK6zP4dtthZq3Yy1ebMinzmaU3Ua4Qfta3BTcMTKNXy9h6by8m0tCaxjeTWoh2hRBit1HmMzha5CE6oukVP4tI3Wnbti3Lly9n165dREVFnba0pVOnTnz00UeMHTsWm83mbyvTUP7whz8wcOBAHnnkEa699lq+/vprXnzxRV566SUAPv30U3766ScuvPBC4uPjmTt3Lj6fjy5durB8+XLmz5/PyJEjSUpKYvny5Rw+fJhu3bo1WPz1qaCggO3bt/vnd+7cybp160hISKB169bcfffdPProo3Tq1Il27drxwAMP0KJFC8aPHw+YSeRll13G7bffzowZM/B4PEyePJkbbriBFi1aWPSuGofGen106tSJt956iy+//JJ27drx9ttvs3LlykollH/5y1+48847SUpK8nc8sGTJEu666y4uuugiLrzwQq6++mqeeuopOnbsyObNm7HZbNVuP9eU5JTC81/v4MM1B9ifU+xf3q91HDcObM3lvVOJdAX8V0EJYo3nZ6R6YrPZ/O10corUUEck2N1zzz04HA66d+9O8+bNT9vm5qmnniI+Pp4hQ4YwduxYRo0aVa36/bV1zjnn8N577zFr1ix69+7NX//6V6ZNm+ZvaB0XF8dHH33EpZdeSrdu3ZgxYwb//e9/6dGjBzExMSxatIgxY8bQuXNn7r//fp588klGjx7dYPHXp1WrVtGvXz9/5wtTpkyhX79+PPjgg4BZrequu+7ijjvuYODAgRQUFPDFF19Uamfxzjvv0LVrV4YNG8aYMWM4//zzeeWVVyx5P41JY70+fvWrX3HVVVdx/fXXM2jQILKysiqV7gDccsstPPPMM7z00kv06NGDK664otJAsR9++CEDBw7kxhtvpHv37tx3331VKr1qasq8PuZtzOD2t9fwlzUOnluwg/05xcSGO5k4pC1f3H0Bs38zlOsGpinJkYBnM5pAf6N5eXnExsaSm5tbpQaLJxrx1EK2HSrkzYn9uajrqetfi9SUx+Nh7ty5jBkzJmiqRpaUlLBz507atWvXJBvpNjU+n4+8vDxiYmLqpZrTmf6ftf38DVRn+rtU5fqo7/+p1F5T+5zbk1XEu6v28P6qfRzKP9YV+rlt4/n5oDZc1jNFnQtInbH6u09V701BkcrHRYQChRxViY6IiIgECHeZj3k/ZjBrxV4Wbz/iX54YGcqV/VqQXLCdiVcPDJof4UROFBSJTry/6tqZe0sSEakvd955J//5z39O+dpNN93EjBkzGjgikcZD10f17DhcwKwVe/hwzX6yC83vNjYbnN+xGTee25rh3ZKxGV7mzt1+lj2JBLYgSXTMnteyVaIjIhZ5+OGHueeee075mqqESbDT9XF2JR4vc9cfZNaKvazYle1fnhzj4roBaVw3II20hGOD13o8gdf+SKS6giTRMUt0VHVNRKySlJREUlKS1WGINEq6Pk5v08E8Zq3Yw+y1+8krMQcFtNvgki5J3Hhuay7u0pwQh9p5iZxKUCU6qromIiIijV1haRmf/nCA/67Yy7q9Of7lLePCuX5gGtcOaEVqbLh1AYo0EUGS6JhV11SiIyIiIo2RYRj8sC+XWSv38Mm6AxS6zapnIXYbI7onc8O5rbmgYzPsdg3qKVJVQZHoxPmrrqlER0RERBqP3GIPH6/bz39X7GXTwTz/8nbNIrl+YBpXn9OK5tEuCyMUabqCItGJ14ChIiIi0kgYhsGq3Uf574o9zF1/kBKPD4DQEDuje6Zww8DWnNc+AZtNpTcitREciU6kqq6JiIiItbIL3Xy0Zh+zVu5l+6EC//LOyVHcMLA1V53TsnzsPxGpC8GR6ISbJTpFbi8lHq9GBhaRGmvbti133303d99991nXtdlszJ49m/Hjx9d7XCKNQXWuj2Dh8xks/SmL/67Yw7yNmbi9ZulNuNPBFb1TueHc1pzTOk6lNyL1ICgSneiwEOwY+LCRU+QhJVaJjoiIiNSvzRl5/Ort1ezOKvIv69kyhhsGtuZnfVsQE+a0MDqRwBcUiY7NZiPSCfkes9g4JTbM6pBEREQkwP3t883szioiyhXCuL4tuPHc1vRsGWt1WCJBI2hGmIosT+nU85pI8HrllVdo0aIFPp+v0vJx48bxy1/+kh07djBu3DiSk5OJiopi4MCBfPXVV3V2/PXr13PppZcSHh5OYmIid9xxBwUFx+rpL1y4kHPPPZfIyEji4uIYOnQou3fv9m87bNgwoqOjiYmJoX///qxatarOYhNp6OvjqaeeolevXkRGRpKWlsZvfvObStcDwJIlS7j44ouJiIggPj6eUaNGcfToUQB8Ph9PPPEEHTt2xOVy0bp1ax577LEax1PX8ks8LNmeBcCHvx7CY1f2UpIj0sCU6IhI7RkGuAutmQyjymFee+21ZGVl8fXXX/uXZWdn88UXXzBhwgQKCgoYM2YM8+fPZ+3atVx22WWMHTuWPXv21PpPVFhYyKhRo4iPj2flypW8//77fPXVV0yePBmAsrIyxo8fz0UXXcQPP/zA0qVLueOOO/z19u+44w5atmzJypUrWb16NX/6059wOlXtpck41TXiKWpU10hDXx92u53nnnuOjRs38uabb7JgwQLuu+8+/+vr1q1j2LBhdO/enaVLl7J48WLGjh2L12uOLzN16lQef/xxHnjgAX788UdmzpxJcnJyjWKpDwu3HMbt9dG+WSSdk6OsDkckKAVF1TWASKcB2DhaqERHpM55iuCvLaw59p8PQGhklVaNj49n9OjRzJw5k2HDhgHwwQcf0KxZMy655BLsdjt9+vTxr//II48we/ZsPvnkE39CUlMzZ86kpKSEt956i8hIM94XXniBsWPH8re//Q2n00lubi5XXHEFHTp0AKBbt26A+cv1/v37ue++++jatSsAnTp1qlU80sBOuEbsQFxDHbuK10hDXx/Hd1jQtm1bHn30Ue68805eeuklAJ544gkGDBjgnwfo0aMHAPn5+Tz77LO88MIL3HLLLQB06NCB888/v9px1JcvN2YAMLJHijoaELFI0JXoZBeqi2mRYDZhwgQ+/PBDSktLAXjnnXe44YYbsNvtFBQUcM8999CtWzfi4uKIiopi06ZNdVKis2nTJvr06eNPcgCGDh2Kz+djy5YtJCQkMHHiREaNGsXYsWN59tlnOXjwoH/d3/zmN9xxxx0MHz6cxx9/nB07dtQ6JpETNeT18dVXXzFs2DBatmxJdHQ0//d//0dWVhZFRWbD/YoSnVPZtGkTpaWlp33daqVlXhZuOQzAqB6Np5RJJNgEUYmO+aiqayL1wBlh/mps1bGrYezYsRiGwWeffcbAgQP59ttvefrppwG45557SE9P5x//+AcdO3YkPDyca665Bre7YT43Xn/9dX7729/yxRdf8O6773L//feTnp7Oueeey5/+9CcmTpzI559/zueff85DDz3ErFmzuPLKKxskNqmlE64Rn89HXn4+MdHR2O31/JtjNa6Rhro+du3axRVXXMGvf/1rHnvsMRISEli8eDG33norbrebiIgIwsPDT7v9mV5rDL7bnkVBaRnJMS76tIqzOhyRoBU0iU5UiFlHWYmOSD2w2apcfcxqYWFhXHXVVbzzzjts376dLl26cM455wBmw+eJEyf6k4eCggJ27dpVJ8ft1q0bb7zxBoWFhf5SnSVLlmC32+nSpYt/vX79+tGvXz+mTp3K4MGDmTlzJueeey4AnTt3pmvXrvz+97/nxhtv5PXXX1ei01SceI34fOD0msvqO9Gphoa6PlavXo3P5+PJJ5/0J3rvvfdepXV69+7N/PnzmTZt2knbd+rUifDwcObPn89tt91Woxjqk7/aWvcU7HZVWxOxSuP5dK1nx6quKdERCXYTJkzgs88+47XXXmPChAn+5Z06deKjjz5i3bp1fP/99/z85z8/qQeq2hwzLCyMW265hQ0bNvD1119z11138X//938kJyezc+dOpk6dytKlS9m9ezfz5s1j27ZtdOvWjeLiYu69914WLlzI7t27WbJkCStXrvS34RGpSw1xfXTs2BGPx8Pzzz/PTz/9xNtvv82MGTMqrTN16lRWrlzJb37zG3744Qc2b97Myy+/zJEjRwgLC+OPf/wj9913H2+99RY7duxg2bJl/Pvf/67Ve68LXp9B+o+ZAIzqkWJxNCLBLXgSnfKqazlFaqMjEuwuvfRSEhIS2LJlCz//+c/9y5966ini4+MZMmQIY8eOZdSoUf5fs2srIiKCL7/8kuzsbAYOHMg111zDsGHDeOGFF/yvb968mauvvprOnTtzxx13MGnSJH71q1/hcDjIzs5m4sSJdO7cmeuuu47Ro0ef8pdukdpqiOujT58+PPXUU/ztb3+jZ8+evPPOO0yfPr3SOp07d2bevHl8//33nHvuuQwePJiPP/6YkBDzl8sHHniAP/zhDzz44IN069aN66+/nkOHDtX8jdeR1buPklXoJjbcyaD2CVaHIxLUbIZRjb5ZLZKXl0dsbCy5ubnExMRUe3uPx8NL787l6Q0htIwLZ8mfLq2HKCVYeTwe5s6dy5gxY4Kmu9+SkhJ27txJu3btCAvTALz1zefzkZeXR0xMTL205zjT/7O2n7+B6kx/l6pcH/X9P5Xaq+nn3MP/+5HXluzkqn4teer6vvUX4FkE471JGo7V51dV701B8+mqcXRERESkPhmGUalbaRGxVvAkOuXJZpHbS4nHa20wItLkvfPOO0RFRZ1yqhjrQyRYBev1sfFAHvtziglz2rmoc3OrwxEJekHT61q4Axx2G16fQU6Rh5RYh9UhiUgT9rOf/YxBgwad8jVVE5FgF6zXx7zy0pyLOjcnPFTfM0SsFjSJjs0GceFOsgrdZBe6SYlVuwIRqbno6Giio6OtDkOkUQrW6+PLjeptTaQxCZqqawDxEeavSDlqpyMiIiJ1aNeRQrZk5hNitzGsa7LV4YgIwZboRIYCkK1ER6RONIFOG6UK9H+sH/q7Nm3V/f9VdEJwXvtEYiMCt3qeSFMSVIlOXLj5wXNUg4aK1EpFHfuioiKLI5G6UPF/DOS2Ew1J10dgqO51UZHojOqh0hyRxiJo2ugAJJR3vXZUg4aK1IrD4SAuLs4/OF9ERAQ2m83iqAKXz+fD7XZTUlJSp2OuGIZBUVERhw4dIi4uDodDjafrQlWuj/r6n0rt1eS6OJRXwpo9OQCM6K72OSKNRVAlOvER5VXXVKIjUmspKebNvDGMRB7oDMOguLiY8PDwekko4+Li/P9PqRtnuz7q+38qtVed62Lej2YnBH3T4tTZkUgjElSJTlxERYmOEh2R2rLZbKSmppKUlITHo1LS+uTxeFi0aBEXXnhhnVcvczqdKsmpB2e7Purzfyq1V93r4li1Nf1gINKYBFWiEx+hqmsidc3hcOiLcj1zOByUlZURFhamL8VNzOmuD/1PA0dukYelO7IAtc8RaWyCqmJwRdU1dUYgIiIidWHBlkzKfAadk6No3zzK6nBE5DhBlehUVF1TGx0RERGpC19u0CChIo1VUCU6CeUlOhowVERERGqrxOPlm62HASU6Io1RUCU6FSU6hW4vJR6vxdGIiIhIU7Zo62GKPV5axoXTo0WM1eGIyAmqlehMnz6dgQMHEh0dTVJSEuPHj2fLli1n3OaNN97AZrNVmsLCrOl6MdoVgsNuduOZow4JREREpBa+3GhWWxvZI1ndhIs0QtVKdL755hsmTZrEsmXLSE9Px+PxMHLkSAoLC8+4XUxMDAcPHvRPu3fvrlXQNWW3247reU3V10RERKRmyrw+5m9W+xyRxqxa3Ut/8cUXlebfeOMNkpKSWL16NRdeeOFpt7PZbI1mMLq4iFCOFLjV85qIiIjU2Iqd2eQUeUiIDGVg2wSrwxGRU6hVG53c3FwAEhLOfIEXFBTQpk0b0tLSGDduHBs3bqzNYWulokOCbJXoiIiISA1VDBI6vFuSv1q8iDQuNR4w1OfzcffddzN06FB69ux52vW6dOnCa6+9Ru/evcnNzeUf//gHQ4YMYePGjbRq1eqU25SWllJaWuqfz8vLA8yRpGsyAnvFNh6Ph9hw8y0fyS/RaO5SJ44/v0Tqg5XnmM5raep8PoN/ffsTEa4QbhrUuk7a0vh8hr99jqqtiTReNU50Jk2axIYNG1i8ePEZ1xs8eDCDBw/2zw8ZMoRu3brxz3/+k0ceeeSU20yfPp1p06adtHzevHlERETUNGTS09MpyLIDdlas20D8kfU13pfIidLT060OQQKcFedYUVFRgx9TpC49mb6FF7/eAUCpx8ttF7Sv9T5/2J9LRl4JkaEOhnZsVuv9iUj9qFGiM3nyZD799FMWLVp02lKZ03E6nfTr14/t27efdp2pU6cyZcoU/3xeXh5paWmMHDmSmJjqd9/o8XhIT09nxIgR/Biyi6WHdtKsZTvGjOla7X2JnOj488vpdFodjgQgK8+xihJ1kabog9X7/EkOwGNzN9E2MZLh3ZNrtd+KamsXd00izOmo1b5EpP5UK9ExDIO77rqL2bNns3DhQtq1a1ftA3q9XtavX8+YMWNOu47L5cLlcp203Ol01uom73Q6aRZtdm2dV1KmL6VSp2p7foqcjRXnmM5paapW7Mxm6kc/ADDpkg5kF3r474o9/HbWWj64cwjdazHuTUWio2prIo1btRKdSZMmMXPmTD7++GOio6PJyDAv9NjYWMLDwwG4+eabadmyJdOnTwfg4Ycf5rzzzqNjx47k5OTw97//nd27d3PbbbfV8VupmopBQ7M1jo6IiEhA2p1VyK/eXoXHazCmVwp/GNEFr2GwO6uQ73ZkcdubK5kzeShJ0dUf12/7oXx+OlxIqMPOJV2a10P0IlJXqtXr2ssvv0xubi4XX3wxqamp/undd9/1r7Nnzx4OHjzonz969Ci333473bp1Y8yYMeTl5fHdd9/RvXv3unsX1ZAQafa6pu6lRUREAk9usYdfvrGSo0UeereK5clr+2K323A67Lw8oT/tm0VyILeE299aTYnHW+39V3RCMKRjItFhKvEUacyqXXXtbBYuXFhp/umnn+bpp5+uVlD1Kb4i0VH30iIiIgHF4/Ux6Z017DhcSGpsGK/ePIDw0GNtaGIjnLw2cSDjX1rC93tz+MN73/P8jf2wV6N7aFVbE2k6ajWOTlMUH6ESHRERkUBjGAYPfbKRxduPEBHq4NVbBpAUc3LVtLbNIplxU3+cDhufrT/IM19trfIxDuQU88O+XGw2GN6tdh0aiEj9C7pEp2LA0EK3l9Ky6hdZi4iISOPz+pJdzFy+B5sNnr2hHz1axJ523fPaJ/LYlb0AeG7Bduas3V+lY8wrL80Z0Cae5tEnd5okIo1L0CU60WEh/hGMc9QhgYiISJO3YHMmj372IwB/Ht2NEVXoPvq6AWnceVEHAO774AdW7co+6zYaJFSkaQm6RMdutxEXXt7zmqqviYiINGmbDuZx18y1+Ay4YWAat11Q9aEv7hvVhVE9knF7fdzx9mr2ZJ1+gNzsQjcrypMhJToiTUPQJTpwXIcESnRERESarEP5Jdz25ioK3V6GdEjkkfE9sdmq3rGA3W7j6ev70rNlDNmFbm59cyV5Jaeu7fHVpky8PoPuqTGkJUTU1VsQkXoUlIlORTudo6q6JiIi0iSVeLzc8dZq9ucU075ZJC9P6I/TUf2vNRGhIbx680CSY1xsO1TA5JlrKfP6TlpvnnpbE2lygjLROTZoqEp0REREmhrDMLjn/e9ZtzeH2HAn/544kNiImo9pkxIbxr9vGUi408GirYd5+NMfK71eWFrGom1HABjVU72tiTQVQZnoaNBQERGRpuvpr7bx6Q8HcTpszLipP+2aRdZ6nz1bxvL09X2x2eCtpbt587td/te+2XoYd5mPNokRdEmOrvWxRKRhBGWio0FDRUREmqY5a/fz3PxtADx2ZS8Gd0iss31f1jOFP17WFYBp/9vIwi2HgMqDhFanDZCIWCs4E53y4m2V6IiIiDQdq3dnc98HPwDwq4vac92AtDo/xq8ubM+1/VvhM2DyzLVs2J/Lgs1mwjOqh6qtiTQlQZromCU62eqMQEREpEnYm13EHW+txu31MbJ7Mn8c1bVejmOz2Xjsyl6c2y6BgtIyrv/nUvJLymge7aJfWny9HFNE6keI1QFYoaKNTo6qromIiDSInw4X8K9vf8JdZtRo+1W7s8kqdNOjRQzP3NAXu73+qpCFhtj55039ufKlJewqH1tnRPfkej2miNS9oEx04ipKdFR1TUREpN55vD4mzVzLpoN5tdpPcoyLf98ykIjQ+v/6Eh8Zyr8nDuTKF5eQV1LG5b1S6/2YIlK3gjLRUa9rIiIiDeffi3ey6WAecRFO7ryoAzUpFwlx2BnTK4WU2LA6j+90OjSPYvakoWzNyGdox2YNdlwRqRvBmeiUl+gUur2UlnlxhTgsjkhERCQw7c4q5JmvtgJw/+XduaZ/K4sjqp4OzaPo0DzK6jBEpAaCsjOC6LAQKqrZ5qhDAhERkXphGAb3z9lAicfHkA6JXH1OS6tDEpEgEviJztFd2Na8SYujy/yL7HbbsZ7XVH1NRESkXsxZt59vtx3BFWLnr1f20hg0ErjK3OApMR+9HvCWgc8LRs0635C6EfhV1w6sJeTzP9A+shPwsH9xfGQoWYVuDRoqIiJSD7IL3Tzy6SYAfjusE22bRVockTSI4qOQ+SOEuCA0ClxR5mNoFDia+NdOw4C8/XBkKxzZVv5Y/jz/YBV2YAObHWy28ucV83YIi4XIZhDZ/Lip2amfO8Pr+53WjmGYyZ7PA153edLnKV9WVp4IusEZAc061msoTfyMq4JYczCxCHdWpcXHBg1V1TUREZG69uhnP5Jd6KZrSjR3XNje6nCkvpTkwu6lsOtb2LkIMtYDpynFCAmH0Mjy5Cf6WBLkf4w2H+21+HoaEmruxxVzwuNxz0NCz7wPTwlk7zhFQrMdPIU1jw0DDO+p/zyeoiomS5h/oxMTH58XDJ85+Z97Ky/3v+Y9YT2jPLaKR99Zn4f4fIwoLiJk6z3lScxxyYzhrdr7aHsBTPy0auvWUNAkOmGeo5R5PeA0E5xjg4aqREdEJJB4vV7+8pe/8J///IeMjAxatGjBxIkTuf/++/1VpwzD4KGHHuJf//oXOTk5DB06lJdffplOnTpZHH1gWLztCB+t2Y/NBtOv6oXTEfg15YNGaf6xxGbXt3Dwe/ML8PFiW5uP7nwoLTC/AAOUFZtT0ZGGjflEIWEnJz+uGDPOI1vh6G5Om6zZQyChPTTrXHlKaFeeoB2fEFClpAGfF0pyoPAIFB4+bjpy8nOvG9wF5nR0VwP8sU7NBkQAVLW8wB4Cdic4nOZzhxPC4+otvgqBn+hENsdwhGLzus1MOawDcNygoWqjIyISUP72t7/x8ssv8+abb9KjRw9WrVrFL37xC2JjY/ntb38LwBNPPMFzzz3Hm2++Sbt27XjggQcYNWoUP/74I2FhDdd9cSAqdnv58+z1ANwyuC39WsdbHJHUSmkB7F0GO7+FXYvhwNqTf7FP6ABtz4d2F5qP0SmVXy8rNffjzgd34bHnpeVf2CvNF1a9ROBEhmEmAqV5ZkJWUv5YMVWUxpSVmFPh4dPvyxULzSsSmU7HEpr4tuaXdCsYhvneCo8clwQdMtsF2exgL68GZ3OUzzuOzZ/2tVNUpavC87IyL0u++44hF1yE0xVROYFxhB57XpHcWNQ+L/ATHbsdYlrC0Z3Y8vZBczPRiVOJjohIQPruu+8YN24cl19+OQBt27blv//9LytWrADM0pxnnnmG+++/n3HjxgHw1ltvkZyczJw5c7jhhhssiz0QPLdgG3uyi0iNDeOeUV2sDid4GT4c3lKzzUyJAd7SY20jyiqel5a3ofCUL3Mfm3L2mInN/tVmu4rjxbc1qx21vcBMbGLP0pteiMucIhPr7e1WibesPKHKPyERKn+EY0lNZHPLvpyfls1mtuUJi4XEDpaGYng85ERmQEovf22pxijwEx3AiG2F7ehOyN3nX5YQWdFGR4mOiEggGTJkCK+88gpbt26lc+fOfP/99yxevJinnnoKgJ07d5KRkcHw4cP928TGxjJo0CCWLl16ykSntLSU0tJS/3xeXh4AHo8Hj6f6bT0rtqnJto3Z5ox8Xln0EwAPXd4Vl90IuPfYqJTkQc5ubDm7seXsgpw92I7uxpa7m5CcvVzhLYUfan8YIzYNo835+Nqcj9FmKMSeMBZSU/ofh0SZU2TqmdcrKzvz60HO6s+wqh43KBIdYswL0pa337+ooo3OUY2jIyISUP70pz+Rl5dH165dcTgceL1eHnvsMSZMmABARkYGAMnJyZW2S05O9r92ounTpzNt2rSTls+bN4+IiIgax5qenl7jbRsbnwFPr3fg9dnom+CjdOcq5u60OqqmzWaUEeHOIqL0MBHuQ0SWHibCfZjI0kNEuA8T6q1aw3gDGz5biDnZQ4499y9z4rM58Nmc/nXcjkiyo7pwOKobxa7m5o72Aft+oE6yJwkIVn2GFRUVVWm9oEh0jJjyItXcvf5lxxIdleiIiASS9957j3feeYeZM2fSo0cP1q1bx913302LFi245ZZbarTPqVOnMmXKFP98Xl4eaWlpjBw5kpiYmGrvz+PxkJ6ezogRI3A24mof1fHm0t3sKdxCdFgIL942lKRol9UhNV0lediXvYB9xT+xnaWXLyOiGUZcG4hrjRHXFiPefO6JasX8pd9z6cjLcIaG+ath2an6IIotavcuJIBZ/RlWUap+NsGR6JQXsdpyjyvRiVSiIyISiO69917+9Kc/+aug9erVi927dzN9+nRuueUWUlLMhtKZmZmkph6rvpKZmUnfvn1PuU+Xy4XLdfIXd6fTWaubfG23byz25xTz1FfbAfjT6K60TIiyOKImqqwUVr0G3zwBxdnmspAws01MXBvzMb4txLfxL7O5ojhVSxLD48Hr2IzTFR4Q55g0TlZ9hlX1mEGR6FR0MW3LO76NTnmio3F0REQCSlFREXZ75d+sHQ4HPp/ZBW67du1ISUlh/vz5/sQmLy+P5cuX8+tf/7qhw23yDMPgwTkbKHJ7GdAmnhsHtrY6pKbH54MNH8CCR8xOAAASO8Hwh6DL5WbHSiJSbUGR6ByrurbP7JrPZvMPGFpQWoa7zEdoiD5EREQCwdixY3nsscdo3bo1PXr0YO3atTz11FP88pe/BMBms3H33Xfz6KOP0qlTJ3/30i1atGD8+PHWBt8Efb4hg/mbD+F02Jh+VS/s9kbWU1Vjt30+fPVQ+UCbQFQKXPwn6Pd/4AiKr2ki9SY4rqDyRMfmKTS7WYxIICbMid1mNp7MKXKTFKNxE0REAsHzzz/PAw88wG9+8xsOHTpEixYt+NWvfsWDDz7oX+e+++6jsLCQO+64g5ycHM4//3y++OILjaFTTblFHh76ZCMAv7m4I52Soy2OqAk5sBa++gv8tNCcd8XA0N/Beb+G0EgrIxMJGMGR6DjDKQ2JxlWWb5bqRCRgt9uIjwglq9BNthIdEZGAER0dzTPPPMMzzzxz2nVsNhsPP/wwDz/8cMMFFoAe/2Izh/NLad88kt9cYu24Hk1G9k+w4FHY8KE5b3fCubfDBfdYP86MSIAJjkQHKAptdizRSe0NQFyE00x0NJaOiIhItazYmc1/V5jtSaZf2QtXiMPiiBq5gsOw6O9mZwM+D2CD3tfBJX82OxYQkToXNIlOsTOReHZW6mI6ITKUHYcLydFYOiIiIlVWWuZl6kfmWCo3npvGoPYqiTit0gJY+iJ89xy4C8xlHYbB8L/4f3gVkfoRPIlOaPmH8CnG0lGJjoiISNW9vHAHOw4X0izKxZ8u62Z1OI2HYUB+BmRuhEMbIfNH2DEfCg+br6f2hRHToP3FVkYpEjSCJtEp8ic6x7qY9g8aqkRHRESkSrYfyuelr3cAMO1nPYiNCNIxWkoL4NCmYwlNRXJTfPTkdePbwrAHofuV6ipapAEFTaJTfKpExz9oqKquiYiInI3PZzD1o/W4vT6GdU1iTK8Uq0Oqfz4fZG0vT2jKk5pDG+HorlOvb7NDYkdI6g7JPSCll1lVLSS0QcMWkWBMdHKOb6Nj/gp1tEglOiIiImcza+VeVu46SkSog4fH98RmC+AxcwwDtn8F8x6Aw5tOvU5U8rGEJrmH+bx5F3CGN2ysInJKQZPoFDmbmU8KMqCsFEJcxKmNjoiISJUUlJbx9y83A3DPyC60jGvgL/PFRyE0umEG0Tz4A6Q/cGyMm5BwSOpWOaFJ7gGRzeo/FhGpsaBJdNwh0RghYdjKSiDvACS0I6E80clRiY6IiMgZvfndLo4WeWjXLJKbB7dpmIN6PbD5U1j5b9j1LUSnwoBbof9EiGpe98fLO2COcbNuJmCAIxQG/Qou+AOEx9f98USkXgVNooPNBjEtIXuH2fNaQjt/G51sJToiIiKnlV/i4ZVFPwHwu2GdCHHUc4P63P2w5k1Y/QYUZB4XyEH4+lFY9AT0vAYG3QEt+tX+eKX5sORZ+O4FKCs2l/W82uxAQGPciDRZwZPoAEZsK2zZO/wdEsSX9xRztFCdEYiIiJzO60t2kVvsoUPzSMb2aVE/BzEM2PkNrHwVNs8Fw2suj0wyS3D6/hz2rYLlM2D/Kvh+pjmlnWeWunQbC45q9gDnLYO1b8PXf4XCQ+aytPNg1GPQakCdvj0RaXhBlegQ08p8LE90EspLdApKy3CX+QgNUZePIiIix8st9vCvb83SnLuHd8Zhr+MOCIpz4Pv/mtXTsrYdW97mfBh4K3S94liPZQntoPe15QnPP2HjbNi7zJyiW8DAX0L/X5y97YxhwLZ0sx3O4c3l+24Pw6eZCVMgd7IgEkSCKtExYisSHbPntZgwJ3Yb+AyznU5STJiF0YmIiDQ+ry3eSX5JGZ2To7i8V2rd7fjAOlj1b/jh/WPVxUKjoc8NZoKTdIaBSFsNMKeRj8Cq12HVa5Bf3r7mm79Dr2vMUp7UPidve2JHA+HxcNEfzbY/6gJaJKAEV6JTUaJT3sW03W4jLiKU7EI32Up0REREKskpcvPa4p2AWZpjr21pjqcEfpxjVk/bt/LY8qTuMPA26H0duKKrvr/oFLhkKlwwBTbOgeUvw4G1sO4dc2o9GM69wyylKTysjgZEgkxQJTrEtjQfjx80NMJJdqFb7XRERERO8Oq3O8kvLaNrSjSX9ajF4KA+H6z8Fyx8HIqzzWV2J3T/mZngtB5cu+piIS7oc72ZKFW04/lxDuxZak7RqWYVOXU0IBJUgirRMWLTzCe5+8z6uTYbCZGh7DhcqEFDRUREjpNd6Ob1JWZpzu9H1KI05/AW+OQu2LvcnI9pBQMmQr+bITq5boKtYLNB2kBzynsUVldUaztovq6OBkSCSlAlOkSX9xRTVgxF2RCZqEFDRURETuFf3/5EodtLjxYxjOxeg4SkzG122bzoCfC6ITQKhv/F7CygIQb9jEmFS/5sVk3bNg9CI6H9JepoQCSIBFeiE+KCqGSzT/7cPRCZqEFDRURETnCkoJQ3v9sFwO+Hd8ZW3eRg/2r4+C44tNGc7zQSLn8K4tLqNtCqCHGZbXREJOgEV6IDENuqPNHZBy36HRs0VG10REREAHhl0U8Uub30bhXLsG5JVd/QXQRfPwbLXgLDB+EJMPoJsxc0laSISAMLwkQnzfyl6cRBQ1WiIyIiwqH8Et5augsw2+ZUuTTnp2/gf7+Fo+a29LoWLnv87GPaiIjUkyBMdCp3MV1RoqNER0REBP75zU+UeHz0TYvj4s7Nz75BcY45Ls2at8z5mJZwxdPQeVS9xikicjb26qw8ffp0Bg4cSHR0NElJSYwfP54tW7acdbv333+frl27EhYWRq9evZg7d26NA641f89rZqJT0UbnqDojEBGRIJeZV8J/lu0GYEpVSnM2fQovDjqW5Ay4FX6zTEmOiDQK1Up0vvnmGyZNmsSyZctIT0/H4/EwcuRICgsLT7vNd999x4033sitt97K2rVrGT9+POPHj2fDhg21Dr5G4o7rYhqIjzSrrmWrREdERILcywt3UFrmY0CbeC7odIYqZ/mZ8N7N8O4EKMiAxI4wcS5c8RSExTRcwCIiZ1CtqmtffPFFpfk33niDpKQkVq9ezYUXXnjKbZ599lkuu+wy7r33XgAeeeQR0tPTeeGFF5gxY0YNw66Fiqpr/jY65b2uqTMCEREJYgdzi5m5fA9whtIcw4B1M+HLP0NJDtgcMPR3cNEfwRnWsAGLiJxFrdro5ObmApCQkHDadZYuXcqUKVMqLRs1ahRz5sw57TalpaWUlpb65/Py8gDweDx4PNVPSCq28Xg8EJGCE6DwEJ7ifKJDHQDkl5ZRWFxKaEi1CrlEKp9fIvXAynNM53XweOnrHbi9Ps5tl8DgDoknr+Augo9uh82fmvMpvWHcC5Dap2EDFRGpohonOj6fj7vvvpuhQ4fSs2fP066XkZFBcnLlgcaSk5PJyMg47TbTp09n2rRpJy2fN28eERERNQ2Z9PR0MAwut4cS4nPzzSczyXelYMOBgY0PP/2C2NAa716CXHp6utUhSICz4hwrKipq8GNKw9ufU8yslWcozSk4DP+93uy11OGCS6bC4LsaZuBPEZEaqvEn1KRJk9iwYQOLFy+uy3gAmDp1aqVSoLy8PNLS0hg5ciQxMdWv++vxeEhPT2fEiBE4nU4c+9rCka1c3K8jRrsLefiHrzla5KH/4AvonBxdh+9EgsGJ55dIXbPyHKsoUZfA9sKC7Xi8BkM6JHJe+xNKc7J2wH+uhqM7ITwebpwFrc+zJlARkWqoUaIzefJkPv30UxYtWkSrVq3OuG5KSgqZmZmVlmVmZpKSknLabVwuFy6X66TlTqezVjd5//axaXBkKyGFB8HpJCEylKNFHvJKDX1RlRqr7fkpcjZWnGM6pwPf3uwi3l9l9kT6+xGdK7+4bxXMvA6KsiCuNdz0ETTrZEGUIiLVV60GKYZhMHnyZGbPns2CBQto167dWbcZPHgw8+fPr7QsPT2dwYMHVy/SunTiWDoRGktHRESC0wsLtlPmM7igUzMGtj2uze3mufDGFWaSk9oXbv1KSY6INCnVKtGZNGkSM2fO5OOPPyY6OtrfziY2Npbw8HAAbr75Zlq2bMn06dMB+N3vfsdFF13Ek08+yeWXX86sWbNYtWoVr7zySh2/lWqIPbGLaSU6IiISfHZnFfLBGvNeePfw40pzVv4b5t4Dhg86Dodr3wRXlEVRiojUTLVKdF5++WVyc3O5+OKLSU1N9U/vvvuuf509e/Zw8OBB//yQIUOYOXMmr7zyCn369OGDDz5gzpw5Z+zAoN7FadBQERGR5+Zvx+szuLhLc/q3iTe7j57/MHw2xUxy+v2f2SZHSY6INEHVKtExDOOs6yxcuPCkZddeey3XXnttdQ5Vv/xj6ZiJTlzFoKEaS0dERILET4cLmL3WLM35/fDOUOaGT+6CH2aZK1z8Z7joPjjVeDoiIk1AcPYL6U909oPP5y/RyVHVNRERCRLPzd+Gz4BhXZPo09wOM6+Fnxaag4D+7Dnod5PVIYqI1EpwJjoxLQEbeEuh6Ii/M4JsJToiIhIEth/K55PvDwBw75AYeH00ZG4AZyRc9xZ0Gm5xhCIitVetNjoBw+GE6FTzee7eY50RqI2OiIgEgWfnb8dnwC0di+n66VVmkhOZBL+YqyRHRAJGcCY6UKmL6YTyNjpHi9RGR0REAtuWjHw+/eEAg2ybePDQ7yFvHyR2gtu+ghZ9rQ5PRKTOKNHJ3Uecel0TEZEg8ez8rVxuW8p/XI/jcOdB2nlw6zyIb2N1aCIidSp4E524Y2PpVHRGkF9ahrvMZ2FQIiIi9WdvdhHJP77OC6HP48QD3X4GN8+BiISzbisi0tQEb6ITe2wsnZhwJ/by3jNzilWqIyIigSl96WoeCPmPOTPo13DtG+AMtzQmEZH6EsSJzrGxdBx2G7Hh5e10NJaOiIgEIMMwyPvhf9htBkfi+8Hox8HusDosEZF6o0Qn1xwsraLntWy10xERkQD0/b5c+hQtAyC6788sjkZEpP4FcaJTXnWtKAvcRRo0VEREAtqnK7cxxP4jAK5uYyyORkSk/gVvohMWC6HR5vPjel7ToKEiIhJoPF4fRzak47J5KI5sBc27WB2SiEi9C95Ex2ar1E7HP5aOqq6JiEiAWbT1MIPcKwBwdR9j3gNFRAJc8CY6UKmL6Yo2Oho0VEREAs3sNXu51LEWAHvX0RZHIyLSMII70TmuQ4J4DRoqIiIBKK/Ew4FNy0m25eB1RkKboVaHJCLSIJTogFl1TW10REQkAH2xPoMLjNUA2DteCiEuiyMSEWkYQZ7oqOqaiIgEto/W7mOYYw0Ats6XWRyNiEjDUaIDkLuX+Ah1RiAiIoFlf04xP/20g972nRjYoNNIq0MSEWkwQZ7oVFRd2098RAigREdERALHx+v2c4ljHQC2lv0hKsnagEREGlBwJzrRqWCzg89DopEDQH5pGR6vz9q4REREaskwDGav2c8wu1ltDVVbE5EgE9yJjiMEolsAEF2a4R9W4Kg6JBARkSZu44E89hzK5nz7BnNB51HWBiQi0sCCO9EB/1g6jrx9xIVXtNNRhwQiItK0zV67n8H2H4mwlZo/6qX0sjokEZEGpUTnuC6mj/W8phIdERFpusq8Pj5ed4BL7eYgoXQehb/agohIkFCio0FDRUQkwCzefoQjBSWMCClPdLqMtjYgERELKNE5fiwdDRoqIiIBYM7a/XSx7SWVIxASDu0utDokEZEGp0TnuLF0EiLNNjo5GjRURESaqMLSMr7cmMmwimpr7S8CZ7i1QYmIWECJTkXVtZy9x0p0VHVNRESaqC82ZFDs8TLGtc5coN7WRCRIKdGpSHRKckhymSU5aqMjIiJN1Zx1+0kgjx6+reaCTkp0RCQ4KdEJi4GwWABa2rIB9bomIiJNU2ZeCUu2H+Fi+zpsGJDSG2JbWh2WiIgllOiAv51Oc+MQANlqoyMiIk3Qx+v24zPgmuiN5oLOl1kbkIiIhZTogL/6WmKZmeio6pqIiDRFs9cewEkZA8rWmAuU6IhIEFOiA/4SnVh3BqCqayIi0vRszshj08E8hoRsIdRbCJFJ0KKf1WGJiFhGiQ74S3Qiis1EJ7+kDI/XZ2VEIiIi1TJ7zX4A/i9xs7mg80iw6zYvIsFLn4DgT3RCC/Zhs5mLVKojIiJNhddn8PG6A4DB4LKV5kJVWxORIKdEB/xV12y5+4kL16ChIiLStCz7KYuMvBJ6hx0msnAPOEKh/cVWhyUiYiklOgBxZqJD3n4Swx2ABg0VEZGm46Pyamt3ppaPndP2fHBFWxiRiIj1lOgARCWDPQQML+3CCwD1vCYi0pTt37+fm266icTERMLDw+nVqxerVq3yv24YBg8++CCpqamEh4czfPhwtm3bZmHENVfs9vLFhoMAnO9bbS5UtTURESU6ANgdENMCgA7OowAcVdU1EZEm6ejRowwdOhSn08nnn3/Ojz/+yJNPPkl8fLx/nSeeeILnnnuOGTNmsHz5ciIjIxk1ahQlJSUWRl4z837MoNDtpVu8l+hD5clc51HWBiUi0giEWB1AoxGbBjl7SHNkAc3VGYGISBP1t7/9jbS0NF5//XX/snbt2vmfG4bBM888w/3338+4ceMAeOutt0hOTmbOnDnccMMNDR5zbcxea1Zbm5y2G9tWLzTvBvFtrQ1KRKQRUKJTobxDghYcAdRGR0Skqfrkk08YNWoU1157Ld988w0tW7bkN7/5DbfffjsAO3fuJCMjg+HDh/u3iY2NZdCgQSxduvSUiU5paSmlpaX++by8PAA8Hg8eT/VrAFRsU5Ntj3ekoJRvt5n3rQt8ZmmOt+MIfLXcrzR9dXWOiZyK1edXVY+rRKdCeRfTzY3DgLqXFhFpqn766SdefvllpkyZwp///GdWrlzJb3/7W0JDQ7nlllvIyDDHTEtOTq60XXJysv+1E02fPp1p06adtHzevHlERETUONb09PQabwuw8KANr89B+8gywnaa+/ruSDTZc+fWar8SOGp7jomciVXnV1FRUZXWU6JToTzRSfBkAuqMQESkqfL5fAwYMIC//vWvAPTr148NGzYwY8YMbrnllhrtc+rUqUyZMsU/n5eXR1paGiNHjiQmJqba+/N4PKSnpzNixAicTmeNYgL418vLgDz+0MdN6JpCjPB4zrvmLrODHQlqdXWOiZyK1edXRan62eiTsEJ5F9PRpeavednqjEBEpElKTU2le/fulZZ169aNDz/8EICUlBQAMjMzSU1N9a+TmZlJ3759T7lPl8uFy+U6abnT6azVTb42228/lM+GA3mE2G1c6lgHgK3TSJyu8BrHI4GntueoyJlYdX5V9Zjqda1CeRud8CKzi84cVV0TEWmShg4dypYtWyot27p1K23atAHMjglSUlKYP3++//W8vDyWL1/O4MGDGzTW2qjohOCizs0J3/WVuVC9rYmI+CnRqRDTEoAQTz7RFKkzAhGRJur3v/89y5Yt469//Svbt29n5syZvPLKK0yaNAkAm83G3XffzaOPPsonn3zC+vXrufnmm2nRogXjx4+3Nvgq8vkM5qw9AMCELj44vBlsDugwzOLIREQaD1Vdq+CKgvB4KD5KC9sRtpRE4PH6cDqUC4qINCUDBw5k9uzZTJ06lYcffph27drxzDPPMGHCBP869913H4WFhdxxxx3k5ORw/vnn88UXXxAWFmZh5FW3Ylc2+3OKiXaFcIFRPkhomyEQHmdpXCIijYkSnePFpkHxUVras9jibU1OkYfm0SfXyRYRkcbtiiuu4Iorrjjt6zabjYcffpiHH364AaOqO3PKq62N7pWCc/vL5kJVWxMRqUTFFccrb6fTIfQooC6mRUSk8SnxePlsvdme9OqecbBrsflC59HWBSUi0ggp0TleeRfT7UOyAQ0aKiIijc/SHVnkl5SRGhvGQO868HkgoQM062h1aCIijYoSneOVJzqtHGaio57XRESksdmfUwxAjxax2Ld9aS7sfJmFEYmINE7VTnQWLVrE2LFjadGiBTabjTlz5pxx/YULF2Kz2U6aTjf6tKXKx9JJMQ4DkF2osXRERKRxqaht0CwiBLZWJDpqnyMicqJqJzqFhYX06dOHF198sVrbbdmyhYMHD/qnpKSk6h66/pW30WnmMxMdtdEREZHGpiLR6c52KDoCrhho3XTG/xERaSjV7nVt9OjRjB5d/QaPSUlJxMXFVXu7BlVedS3Wc4QQyjiqNjoiItLIVCQ6vQqXmgs6DoOQUAsjEhFpnBqsjU7fvn1JTU1lxIgRLFmypKEOWz2RSeAIxY6PZI6SrRIdERFpZCoSnXbZ35oL1D5HROSU6n0cndTUVGbMmMGAAQMoLS3l1Vdf5eKLL2b58uWcc845p9ymtLSU0tJS/3xeXh4AHo8Hj6f67WYqtqnKtiExLbEd3UkLWxZZBaU1Op4El+qcXyI1YeU5pvO68ckqdJNKFnF5WwAbdBxhdUgiIo1SvSc6Xbp0oUuXLv75IUOGsGPHDp5++mnefvvtU24zffp0pk2bdtLyefPmERERUeNY0tPTz7rOEE8YzYGWtiOsPXCYuXPn1vh4Elyqcn6J1IYV51hRUVGDH1POLLuwlOGOteZM2rkQmWhtQCIijVS9Jzqncu6557J48eLTvj516lSmTJnin8/LyyMtLY2RI0cSExNT7eN5PB7S09MZMWIETqfzjOs6/jcXfthEC9sR1oRGMGbMBdU+ngSX6pxfIjVh5TlWUaIujYNhGGQXurnUXp7oqLc1EZHTsiTRWbduHampqad93eVy4XK5TlrudDprdZOv0vbxbQBoacviaJFHX1ylymp7foqcjRXnmM7pxiW/tAyHt4ShIRvMBZ2r3zmQiEiwqHaiU1BQwPbt2/3zO3fuZN26dSQkJNC6dWumTp3K/v37eeuttwB45plnaNeuHT169KCkpIRXX32VBQsWMG/evLp7F3WpvOe1FrYj5JeU4fH6cDo0rqqIiFgvu8DNufbNhNk8ENsakrpZHZKISKNV7URn1apVXHLJJf75iipmt9xyC2+88QYHDx5kz549/tfdbjd/+MMf2L9/PxEREfTu3Zuvvvqq0j4alfJEp6XtCAA5RR6aR59cuiQiItLQsgrd/vsTKT3BZrM2IBGRRqzaic7FF1+MYRinff2NN96oNH/fffdx3333VTswy8S2BqClPQswOFrkVqIjIiKNQnahm0TK201FNrM2GBGRRk51sk4U2xKASEqIoVCDhoqISKORXVhKoq0i0WlubTAiIo2cEp0TOcMhwvyVzOyQQImOiIg0DlmFbprZcs0ZJToiImekROdUjmunk12owfJERKRxyC44vuqaEh0RkTNRonMqcWmA2fOaSnRERKSxyC5yH1d1TW10RETORInOqcRWJDpZaqMjIiKNRnahW210RESqSInOqZRXXWtlO8LRIlVdExGRxiGnoJgE8s0ZJToiImekROdUjhs0VFXXRESksfAUZGG3GRjYIDzB6nBERBo1JTqnclzVtWxVXRMRkUbCUWQOFuoLiwdHtYfCExEJKkp0TqU80Ukih4LCQouDERERgWK3l0hvjjkTpWprIiJno0TnVCKb4XOEYbcZOIsyrI5GRESErMJSmmGOoWNXoiMiclZKdE7FZsOIaQlArDuTMq/P4oBERCTYHd/jmk0dEYiInJUSndOwVYylwxFyitXzmoiIWCtLXUuLiFSLEp3TsMdV9LymsXRERMR62QVuEsurrinRERE5OyU6p1PeIUFL2xH1vCYiIpbLLnTTzF+i08zaYEREmgAlOqdzXKKjQUNFRMRqqromIlI9SnROJ/a4qmsaNFRERCyWXVhKIkp0RESqSonO6ZQnOi1tR8guKLU4GBERCXbZhR6V6IiIVIMSndMp71463OamNO+wxcGIiEiwKyjII9pWbM6ojY6IyFkp0TkdZxiFoYnm89x91sYiIiJBz1dwxHy0h4IrxuJoREQaPyU6Z1AS3gIAZ8EBiyMREZFgZys2Ex1veCLYbBZHIyLS+CnROQNPtJnoRBTvtzgSEREJZu4yH+HubABsUWqfIyJSFUp0zsCIMbuYjirJsDgSEREJZkeLjo2h44hKsjgaEZGmQYnOGYQkmIlOfNkhiyMREZFgllXg9nctrRIdEZGqUaJzBq7ENgAk+Q5T7PZaHI2IiASr7EqDharHNRGRqlCicwbRyW0BcyydZT9lWRuMiIgErazCUhJtueaMxtAREakSJTpnYIszS3Sa23JZvn6LxdGIiEiwyi500wwNFioiUh1KdM4kIoHchF4AuLbMwTAMiwMSEZFgVLnqmhIdEZGqUKJzFuH9bwTgEvfXbDtUYHE0IiISjLLURkdEpNqU6JxFaJ9r8WKnr30Hq1avtDocEREJQtn5pSSiNjoiItWhROdsopI42GwIAM6NH1gcjIiIBKOSwqOE2sp7/4xQiY6ISFUo0amCiupr5xakk1votjgaEREJNkbBYQDKnFHgDLM4GhGRpkGJThUk9r+SYsJoYzvED8vTrQ5HRESCjKP4CAC+cJXmiIhUlRKdqgiNZHviJQDY1r9ncTAiIhJMvD6D0NLysdyi1D5HRKSqlOhUUUi/GwDocXQ+Xk+pxdGIiEiwyClyk1g+hk5IdJLF0YiINB1KdKqo46AxHCaOePLZuewTq8MREZEgkV14LNGxq0RHRKTKlOhUkdMZyg9xwwEoWzfL4mhERCRYmGPoqGtpEZHqUqJTHb2uB6Bd1iIoybU4GBERCQbZlQYLVaIjIlJVSnSqoe+5F7LN1xIXbnLXfGR1OCIiEgSyCt008yc66nVNRKSqlOhUQ2J0GMujzOprxav/a3E0IiISDLILjrXRUYmOiEjVKdGpJk+PawBIyloBufstjkZERAJddmGp2uiIiNSAEp1qGti3D8t9XbFjUPaDxtQREZH6dbSgmARbgTmjREdEpMqU6FRTjxYxfOW8GICS1ep9TURE6pcn/zAABnYIj7c4GhGRpkOJTjXZbDY8nX9GqRFCVM5myNhgdUgiIhLICs1ExxMWD3aHxcGIiDQdSnRq4LweHVjg6weA8cO7FkcjIiKBzF50BABfuHpcExGpDiU6NXB+p2b8z7gAAO/374HPa3FEIiISiAzDwFWaDYAtSu1zRESqQ4lODUS5Qihqcym5RgQhhRmwa7HVIYmISADKKykjzjB7XAuJTrI4GhGRpkWJTg1d2K0Vn3nPM2fU+5qIiNSD7EK3v2tphxIdEZFqUaJTQ5d2TWK293wAjB/ngKfY2oBERCTgZBeWHjdYqNroiIhUhxKdGmrbLJLshH7sM5phcxfAls+tDklERAJMVoGbRFtFoqM2OiIi1aFEpxYu7pbCHO9Qc0bV10REpI5lF7pppkRHRKRGqp3oLFq0iLFjx9KiRQtsNhtz5sw56zYLFy7knHPOweVy0bFjR954440ahNr4XNo1yZ/oGNvToTDL4ohERCSQZBW6ScRso6NER0Skeqqd6BQWFtKnTx9efPHFKq2/c+dOLr/8ci655BLWrVvH3XffzW233caXX35Z7WAbm4FtE8gIbct6X1tsvjLY+JHVIYmISAAxOyNQGx0RkZqodqIzevRoHn30Ua688soqrT9jxgzatWvHk08+Sbdu3Zg8eTLXXHMNTz/9dLWDbWxCQ+yc37EZc8o7JVD1NRGRxufxxx/HZrNx9913+5eVlJQwadIkEhMTiYqK4uqrryYzM9O6IE+jID+XSFupOaMSHRGRagmp7wMsXbqU4cOHV1o2atSoSjecE5WWllJaWuqfz8szf83yeDx4PJ5qx1CxTU22PZsLOyXy9MbB/Nk5E8e+FXgObYX4dnV+HGm86vP8EgFrz7Gmfl6vXLmSf/7zn/Tu3bvS8t///vd89tlnvP/++8TGxjJ58mSuuuoqlixZYlGkp+bJPwyA1+7CERplcTQiIk1LvSc6GRkZJCcnV1qWnJxMXl4excXFhIeHn7TN9OnTmTZt2knL582bR0RERI1jSU9Pr/G2p+N1w2HiWeLtwYWO9Wz/aDpbU8fX+XGk8auP80vkeFacY0VFRQ1+zLpSUFDAhAkT+Ne//sWjjz7qX56bm8u///1vZs6cyaWXXgrA66+/Trdu3Vi2bBnnnXeeVSGfrOAQAJ6wRBw2m8XBiIg0LfWe6NTE1KlTmTJlin8+Ly+PtLQ0Ro4cSUxMTLX35/F4SE9PZ8SIETidzroMFYB3Dy5jdsb5XOhYT1f3OjqO/ifohhQ06vv8ErHyHKsoUW+KJk2axOWXX87w4cMrJTqrV6/G4/FUqm3QtWtXWrduzdKlS0+Z6FhV08BeZHZy4w1PbPKla9KwVNtA6pPV51dVj1vviU5KSspJ9Z4zMzOJiYk5ZWkOgMvlwuVynbTc6XTW6iZf2+1P59Juybx6YCClttdxZf+E89B6aNW/zo8jjVt9nV8iFaw4x5rqOT1r1izWrFnDypUrT3otIyOD0NBQ4uLiKi1PTk4mIyPjlPuzqqaBs+QwhEB2iZ01c+fW+DgSvFTbQOqTVedXVWsb1HuiM3jwYOae8OGcnp7O4MGD6/vQDebSrkk8Nz+MdN8ArrAthh/eVaIjImKRvXv38rvf/Y709HTCwsLqZJ9W1DQocpexeYV5/0xq25UxY8bULHgJSqptIPXJ6vOrqrUNqp3oFBQUsH37dv/8zp07WbduHQkJCbRu3ZqpU6eyf/9+3nrrLQDuvPNOXnjhBe677z5++ctfsmDBAt577z0+++yz6h660erdMpZmUaF8UDSEK0IXw4YPYdRj4NAHi4hIQ1u9ejWHDh3inHPO8S/zer0sWrSIF154gS+//BK3201OTk6lUp3MzExSUlJOuU8rahrk5XtItJlj6ITGpmDTl1WpAdU2kPpk1flV1WNWu3vpVatW0a9fP/r16wfAlClT6NevHw8++CAABw8eZM+ePf7127Vrx2effUZ6ejp9+vThySef5NVXX2XUqFHVPXSjZbfbuKhzEt/6elEYEg9FR2DH11aHJSISlIYNG8b69etZt26dfxowYAATJkzwP3c6ncyfP9+/zZYtW9izZ0+jqm1w/Bg6tih1LS0iUl3VLtG5+OKLMQzjtK+/8cYbp9xm7dq11T1Uk3Jp1yQ+XLOPL2xDuZpPzeprnUdaHZaISNCJjo6mZ8+elZZFRkaSmJjoX37rrbcyZcoUEhISiImJ4a677mLw4MGNqse17EI3iVQMFqpER0SkuqpdoiOndkHnZoTYbbxZMMhcsPkzKM23NigRETmlp59+miuuuIKrr76aCy+8kJSUFD766COrw6okq9BNM1tFotPM2mBERJqgRtm9dFMUE+ZkQNt4lv3UntyINsQW7YZNn0LfG60OTUQk6C1cuLDSfFhYGC+++CIvvviiNQFVQXZhqb+Njkp0RESqTyU6dejSrkmAjfSQi8wFP8yyNB4REWm6sgpKSKC8ZoASHRGRalOiU4cu7ZoMwMtZ5V1L//QN5B20MCIREWmqinOzcdq85kyEqq6JiFSXEp061KF5JK0TItjhbc7RxH6AARs+sDosERFpgrz5hwBwh0RDSKjF0YiIND1KdOqQzWYrr74GC8MuNRf+8K6FEYmISFNlFB4GwBOWaHEkIiJNkxKdOnZJeaLz8qHeGHYnZKyHla/CGbrkFhEROZGjOAsAn6qtiYjUiBKdOjaoXQLhTgdb850c7XKdufCzP8C7N0FhlrXBiYhIk+EqNe8Z9qgkiyMREWmalOjUsTCng6EdzV/fZjb7HYx8FOxO2PwpvDwEdiywOEIREWnsSsu8RHmPAhASo0RHRKQmlOjUg4p2OvO3HIEhd8Ht86FZZyjIgLevhC//H5SVWhyliIg0VkcLPSRiDhYaqkRHRKRGlOjUg0u6muMdrNubQ1ZBKaT2gTu+gQG3missfQH+dSkc2mRhlCIi0lhlFZaSaDMTHZuqromI1IgSnXqQGhtOt9QYDAO+2Wr2mkNoBFzxFNw4CyISIXMDvHIxrPiXOioQEZFKsgvd/kSHSHVGICJSE0p06sml5aU6CzYfqvxCl9Hw66XQcTiUlcDce2DmdVBw6BR7ERGRYJRd6KYZueZMZHNrgxERaaKU6NSTinY6i7Yepszrq/xidDJM+ABGPwEOF2ybZ3ZUsHWeBZGKiEhjk1VwfImOEh0RkZpQolNP+qbFEx/hJK+kjMXbj5y8gs0Gg34FdyyEpB5QeBhmXgtz7wVPcYPHKyIijUdOfiFxtkJzRomOiEiNKNGpJw67jXF9WwLw4tfbMU7XDie5O9y+AM77jTm/4hWz7U7G+oYJVEREGp2SPLN9pw8HhMVZG4yISBOlRKce3XlRB0IddlbuOsqyn7JPv6IzDC6bDjd9CFHJcHiz2Svb0hfB5zv9diIiEpC8+ZkAlITGg123ahGRmtCnZz1KiQ3jhnPTAHh2/tazb9BxOPz6O+gyBrxu+PLP8PpoyNxYz5GKiEijUmiW6JSFJ1ociIhI06VEp57deVEHnA4by37KZvlPWWffILIZ3DATrngaQqNg7zL454WQ/iC4C+s/YBERsZyj2Lxf+MLVtbSISE0p0alnLeLCuW6AWarz3IJtVdvIZoMBv4RJy6HbWPCVwZJn4cVBsOXzeoxWREQag9BSM9GxR6sjAhGRmlKi0wB+fXEHQuw2lmzPYtWuM7TVOVFsK7j+P3DjuxDbGnL3wn9vgFkTIHdf/QUsIiKWKfP6iPQcBcAZk2xxNCIiTZcSnQbQKj6Ca/q3AuC5Bdurv4Mul8GkZTD0brCHwOZP4YVz4bsXwFtWt8GKiIilcoo9JGKOoRMak2RxNCIiTZcSnQYy6ZKOOOw2Fm09zNo9R6u/g9BIGDENfvUttB4MnkKY9//Mrqj3rqzzeGsk+ydY/gp8PV3dY4uI1FB24bHBQh3RSnRERGoqxOoAgkVaQgRX9WvJ+6v38dz8bbz+i3NrtqPk7jBxLqx7B9IfgMz18O8R0H8iDH8IwuPrNO4z8pTA7sWw7SvYng5Zx5VWffM4pPaBvjdBr2sgIqHh4hJpbAwDPEXmYMAVU1mxeQ15iqCs5BTLjz23uwvps2cXjs/SwW47ed/HZk75FICxz4JDH/lNQVaBm0RbrjmjwUJFRGpMd70GNPnSjny0dj9fbznM93tz6JMWV7Md2e1wzv+Z3VCnP2AmPatfN6u0jfor9LrW7NCgPhzdBdvSzWnXt+aXNH9cIWZpU1gsbP0SDn5vTvP+nxlrv5ugw6Vgd9RPbCINxVMCxdlQlHXclA2FR05eVvHcW1rjwzmAtgBV6LjxtK54uhYbS0PKLnTTp7xER4mOiEjNKdFpQG0SIxnXtwUfrdnP8wu28eotA2u3w8hEGP8S9P05fDoFjmyBj26HtW/D5U9Bs061D7qsFHZ/ZyY229PhyAnjAUWnmuP/dBoJ7S+GsBhzeWEWrH8f1v3HrMb24xxzik6FPjeYJT3NOtY+PpG65C2DgkzI218+HSif9kPufijIMJMXd0HNj+EIBWc4hISbgwU7IyAkzFzmDD/hubmO1x7K1u0/0blzZxx2B1T6HaN8ptKPG8c9r1huU03lpiK7oMTfRodIdS8tIlJTSnQa2ORLOjJn7X6+2nSIDftz6dkytvY7bXs+3LkYlj4P3zwBOxfBy0OgxTnHvkhV+uIUXnlZxfzxX7AObTKTm52LzPZAFWwOaH1eeXIzApJ7nrr0KDIRzrvTnA7+YJY6/fAe5B+ExU+bU9og6DsBelx5LEGSwFWUbVZvzNoOOXvNkklHKDhc4HBCiOuE587y+VAICT22bkjosX0aBmCc8Him1wwoya2cxOTuO/a8IAMMX9Xej80BEYnHTQknzJdPkeWP4fHmdVaDEk2fx8PWgrl0PH8MDqez2ttL05KXn0u4zW3OqERHRKTGlOg0sPbNo/hZnxbMWXeA5+Zv45WbB9TNjkNC4YI/QI+rYO69ZunL3mV1s++oZOg4wkxs2l8M4XHV2z61tzmNeBi2fgFr3ymPb7k5ffEn6D7OTHraDDW/AEvT5Ck2O6XI2g5HtkHWjmPJTXE1ula3kj0EoltAzHFTbCvzMTr1WAITFlt/VUQlqLlzMwHw2MNwhkZaHI2ISNOlRMcCky/tyMffH2Dej5n8eCCP7i3qsDQjoR1MeB/2r4H8A+UNnIuOeyyp3Ci6rPiEdUrMEpyo5ONKbXrVTfIR4jITmu7jID8Dvp9llvQc2Qrf/9ecYsq/UIa4zBKmEFd5SVTFfNgJz4+bd4aZHSDEt619rHJqPm95W5TD5vl1ZPuxRCZrhznW00mt4I8T0xISOxz7H5W5wXvcVFYKXo/ZnsXrPsXr7uPauhxfZct28qN/lRNeC408dp4dn8TEtDDji0xSsi2WKss/DEBJaAIqvxMRqTklOhbomBTNFb1b8L/vD/D8gm28fFP/uj2AzQat+gN1vN+6FJ0C598NQ38H+1aZ7Yo2fAR5+8ypNpJ6QJfR0HUMpPbTl9Yz8Xmh+Gh5I/ojZgJTeOSE+SzzseiImeScKZEBs6QjsRMkdiyfOpjtxRLam0mGiJyRrdBMdMrCEy2ORESkaVOiY5G7Lu3I/74/wOcbMtiSkU+XlGirQ7KGzQZpA83pssdh3wpwF5Z3t1tiPpaVHvdYfML8ceuV5Jq9vB3aaE7f/sOsatRlNHS5HNpdYJb+WM3nhYPrYMcC2LHQfB6RYJYyxLaC2Jblj2lmCUNsq5pVk/J5zZKzvP3l7VDKG9Tn7St/3G8mMFVtk+JnM9ubRCWbSUxiRzORqUhsIhJVpUukFhzFRwDwRagjAhGR2lCiY5HOydGM6ZXC3PUZPL9gGy/8/ByrQ7JeaITZBqg2irLNThS2fAbb55udH6x6zZxCo6HjMOh6uVklryHHHDq6G376GnZ8DT8thJKcyq+7CyBnz+m3D40qr2JVkQSVT9Gp5rYnJjC5+833bnirFl94PEQ0Mxs+RyaajxHNzB6fIpsd91ozCE/QeCwi9Si01GzPZo/SYKEiIrWhbysWuuvSTsxdn8Fn6w/yu8x8OiUHaalOXYpIgD7Xm1NZqdlr3ObPYMvnZo9aFd1c20OgzRCzpKfLaIhvU7dxlOSZ4wzt+NosucneUfl1Vwy0uxA6XGKOPVRaYLZv8Ze87Ds2FZd3Z3x4szlVR0XD+tiW5UlSy/KSo/L56FTzb+ZQSwCRxsDnM4jwZIMDnDFKdEREakOJjoW6pcYwqkcyX27M5IWvt/PsDf2sDimwhLjMkptOI8xxhQ6sNUt6Ns+Fw5vMJGjnIvjij2Y32a0GgCvaLPlxRZU/P/4xqvJ8iMtfRctmeLHtWwm7vzUTm30rK5em2BzQaqCZ2HS41Oz6+6RSkUGnfh/uovLkZ69ZUpNb3o4pdx/kHTTjqijpOTGZiUrWAK0iTUheiYeE8jF0wuKSLY5GRKRpU6Jjsd8O68SXGzP53/cH+O2wTnRoHmV1SIHJbjc7aGjVH4Y9aHaBvOVzM+nZ8x1kbjCnau0zBEKjCAmNYnRhNiHriiq/ntDhWGLT9nyznU1NhEaYbWDqYgBYEWnUsgvdJJILQEi0Eh0RkdpQomOxHi1iGd4tma82ZfLigu08dX1fq0MKDgntYfAkcyrKhm3z4OguKM03J3eBWZ3MXXDyvKc8ofGVQUkOtpIcnIARFout/cXQ/hIzwVE31yJSTdmFbhJtZokOkeqMQESkNpToNAK/G9aJrzZlMmfdfn47rBNtm6kL3gYVkQB9bqj6+j5vpcSnrDCHxUuXMfSqX+F0hdVfnCIS8LIK3bTxJzrNrQ1GRKSJ0wAjjUCvVrFc2jUJnwEvfL3d6nDkbOwOsxpabEto3gWj5TnkRrRVWxgRqbXsghJ/Gx0lOiIitaNEp5H47TCz/cXstfvZk1V0lrVFRCQQFeUexmErH5Q3QgOGiojUhhKdRqJvWhwXdW6O12fwokp1RESCUmnOIQCKQ2LU7buISC0p0WlEKkp1Plyzj73ZNSvVOVJQyuy1+5j60XoWbM6sy/BERKSe+fLNz+2S0ASLIxERafrUGUEj0r9NPBd0asa3247w8jc7+OuVvc66jcfrY+2eHL7ZeohFW4+wfn+u/7X3Vu3l5QnnMLJHSn2GLSIidcQoOgJAWZh6XBMRqS0lOo3Mb4d14tttR3h/1V4mXdKRlnHhJ62z72gRi7Ye4Zuth/huexb5pWWVXu/RIoaYMCdLf8pi8sy1vHrLAC7srEatIiKNXUixmegY6lpaRKTWlOg0MgPbJjC4fSJLf8pixsIdPDK+JyUeL8t3ZvPNlsN8s/UQOw4XVtomPsLJhZ2bc1Hn5pzfqRlJ0WGUeX3c9d+1fL4hgzveXsVbvxzEue1UFUJEpDFzlWYDYI/Sj1MiIrWlRKcR+t3wTix9JYt3V+5lV1YhK3ZmU1rm87/usNvoV955wYWdm9OzZSwOu63SPkIcdp69oR8lb6/i6y2H+eUbK/nPbYPomxbXwO9GRESqwjAMwj1HwQ7OmGSrwxERafKU6DRC57VP5Nx2CazYmc2328xqDKmxYVxUXmozpGMzYsPP3htPaIidl2/qzy9eX8nSn7K45bUVzLrjPLqlxtT3WxARkWoqdHuJN8x2luFxSnRERGpLiU4jNf2qXrz09Q66pUZzUefmdEyKwmaznX3DE4Q5Hbx6ywD+79/LWbMnh//793Le/dVgOjSPqoeoRUSkprIL3CTazEQnNFaJjohIbal76UaqQ/MonryuD7dd0J5OydE1SnIqRLpCeP0X59I9NYYjBW4m/Gt5jbuvFhGR+pFd5CaRPHMmUm10RERqS4lOkIgNd/L2refSKSmKjLwSfv7qMjJyS6wOS0REymUXltLMpkRHRKSu1CjRefHFF2nbti1hYWEMGjSIFStWnHbdN954A5vNVmkKCwurccBSc4lRLv5z2yDaJEawN7uYCa8u40hBqdVhiYgIcDS3gBhbeWm7upcWEam1aic67777LlOmTOGhhx5izZo19OnTh1GjRnHo0KHTbhMTE8PBgwf90+7du2sVtNRcckwY79w2iBaxYew4XMj//XsFuUUeq8MSEQl6xTmZAHhxQFictcGIiASAaic6Tz31FLfffju/+MUv6N69OzNmzCAiIoLXXnvttNvYbDZSUlL8U3KyGllaqVV8BP+5bRDNolxsOpjHLa+voOCEQUdFRKRhufPMRKfQGQ+1aJcpIiKmaiU6breb1atXM3z48GM7sNsZPnw4S5cuPe12BQUFtGnThrS0NMaNG8fGjRtrHrHUifbNo/jPbecSF+Fk3d4cbn1jJcVur9VhiYgELW/+YQBKXYkWRyIiEhiq1b30kSNH8Hq9J5XIJCcns3nz5lNu06VLF1577TV69+5Nbm4u//jHPxgyZAgbN26kVatWp9ymtLSU0tJjbUfy8szGmR6PB4+n+tWsKrapybaBrENiOK/dfA43v76a5Tuz+dXbK3np5/1whaiPiurQ+SX1zcpzTOd1w7EVmYlOWZgSHRGRulDv4+gMHjyYwYMH++eHDBlCt27d+Oc//8kjjzxyym2mT5/OtGnTTlo+b948IiIiahxLenp6jbcNZL/sCDM2OVi0LYufPz+PiZ19OFRrotp0fkl9s+IcKypSV/QNxVGcBYARoY4IRETqQrUSnWbNmuFwOMjMzKy0PDMzk5SUlCrtw+l00q9fP7Zv337adaZOncqUKVP883l5eaSlpTFy5EhiYmKqEzJg/iKZnp7OiBEjcDqd1d4+GJyzI4vb317DD9l2Fha15O9X98RuV7ZTFTq/pL5ZeY5VlKhL/XOVmomOI1pdS4uI1IVqJTqhoaH079+f+fPnM378eAB8Ph/z589n8uTJVdqH1+tl/fr1jBkz5rTruFwuXC7XScudTmetbvK13T6QXdw1hZcn9OfO/6zmkx8OEhbq4LEre+F0qBpbVen8kvpmxTmmc7rhRHiOgg2cMeqwR0SkLlT7W+yUKVP417/+xZtvvsmmTZv49a9/TWFhIb/4xS8AuPnmm5k6dap//Ycffph58+bx008/sWbNGm666SZ2797NbbfdVnfvQurE8O7JPH19X2w2eG/VPn7+r2UcytOgoiIi9a3E4yXWlwNAWFzVakiIiMiZVbuNzvXXX8/hw4d58MEHycjIoG/fvnzxxRf+Dgr27NmD3X4sfzp69Ci33347GRkZxMfH079/f7777ju6d+9ed+9C6szYPi1whdj5w3vfs3LXUS5/fjEvTTiHgW0TrA5NRCRgZRe6SbSZ1QTD41SiIyJSF2rUGcHkyZNPW1Vt4cKFleaffvppnn766ZocRiwyskcKH0+O4s7/rGZrZgE3vrKMP4/pxi+GtsWmsR1EROrc8YmOLUptdERE6oIaYMgptW8exZxJQxnbpwVlPoOHP/2R381aR5FbA4uKiNS17IJSmlHe8UOkEh0RkbqgREdOKyI0hOdu6MtDY7sTYrfxyfcHuPLF7/jpcIHVoYmIBJS83KO4bOVjFql7aRGROqFER87IZrPxi6Ht+O8d59E82sWWzHzGvbCEeRszrA5NRCRgFOWYn6kltnAIrfl4cSIicowSHamSgW0T+Oyu8xnYNp780jLueHs1T3yxGa/PsDo0EZEmz5Nnjk9X5Iy3OBIRkcChREeqLCkmjJm3n8cvh7YD4KWFO7jltRVkFZRaHJmISNPmyz8EQKkr0eJIREQChxIdqRanw86DY7vz3I39CHc6WLz9CGOfX8y6vTlWhyYi0nQVHgGgLFyJjohIXVGiIzXysz4t+HjyUNo3i+RAbgnXzVjKzOV7MAxVZRMRqa6Q4iwAjAj1uCYiUleU6EiNdU6OZs7koYzsnozb6+PPs9fzxw9/oMTjtTo0EZEmxeU2Ex1HtBIdEZG6okRHaiUmzMk//68/f7ysK3YbvLdqH1e99B3Lf8qyOjQRCVLTp09n4MCBREdHk5SUxPjx49myZUuldUpKSpg0aRKJiYlERUVx9dVXk5mZaVHEEOE5CkBoTJJlMYiIBBolOlJrNpuNX1/cgbdvHURCZCg/Hszj+leWcdOry1mz56jV4YlIkPnmm2+YNGkSy5YtIz09HY/Hw8iRIyksLPSv8/vf/57//e9/vP/++3zzzTccOHCAq666ypJ4PV4fMd4cAMLiUiyJQUQkEIVYHYAEjqEdm/HF7y7guQXbeHflXhZvP8Li7Ue4pEtzpozoQq9WsVaHKCJB4Isvvqg0/8Ybb5CUlMTq1au58MILyc3N5d///jczZ87k0ksvBeD111+nW7duLFu2jPPOO69B4z1a5CbRlgdARHxqgx5bRCSQKdGROpUUE8aj43vxqws78PyCbXy4Zj9fbznM11sOM7J7Mr8f0ZluqTFWhykiQSQ3NxeAhIQEAFavXo3H42H48OH+dbp27Urr1q1ZunTpKROd0tJSSkuPdaWfl2cmJh6PB4/HU+2YKrbxeDwcyikhpTzR8YXH46vB/kROdPw5JlLXrD6/qnpcJTpSL9ISInjimj785uKOPDt/G3PW7Wfej5nM+zGTy3un8vvhneiYFG11mCIS4Hw+H3fffTdDhw6lZ8+eAGRkZBAaGkpcXFyldZOTk8nIyDjlfqZPn860adNOWj5v3jwiIiJqHF96ejrbcgymkA/A/KXfU+rcVeP9iZwoPT3d6hAkgFl1fhUVFVVpPSU6Uq/aNovk6ev7MumSDjz91TY+++Egn/1wkM/XH2Rc35b8blgn2jaLtDpMEQlQkyZNYsOGDSxevLhW+5k6dSpTpkzxz+fl5ZGWlsbIkSOJial+KbXH4yE9PZ0RI0YQ+v1W7DsNfNgYNvZasOvWLLV3/DnmdDqtDkcCjNXnV0Wp+tno01QaRMekaF78+TlMviSPp9O3Mu/HTGav3c8n3x/g6nNactelnUhLqPmvoiIiJ5o8eTKffvopixYtolWrVv7lKSkpuN1ucnJyKpXqZGZmkpJy6s4AXC4XLpfrpOVOp7NWN3mn04kn/zAAhfYYol3hNd6XyKnU9hwVOROrzq+qHlO9rkmD6pYawys3D+B/k8/nki7N8foM3lu1j0ufXMj9c9aTkVtidYgi0sQZhsHkyZOZPXs2CxYsoF27dpVe79+/P06nk/nz5/uXbdmyhT179jB48OCGDhd37iEAipzxDX5sEZFAphIdsUSvVrG8/otzWb37KE+nb2Xx9iP8Z9ke3lu1j5d+fg7DuydbHaKINFGTJk1i5syZfPzxx0RHR/vb3cTGxhIeHk5sbCy33norU6ZMISEhgZiYGO666y4GDx7c4D2uAfgKzBIdtyuxwY8tIhLIVKIjlurfJp7/3DaId+84jwFt4nGX+fjTR+vJLVYvMSJSMy+//DK5ublcfPHFpKam+qd3333Xv87TTz/NFVdcwdVXX82FF15ISkoKH330kSXx2orMRKcsXImOiEhdUqIjjcKg9om8c/sgOjSP5EhBKU98sdnqkESkiTIM45TTxIkT/euEhYXx4osvkp2dTWFhIR999NFp2+fUN0fxETPuiGaWHF9EJFAp0ZFGwxXi4LErewHwzvI9rN591OKIRETqX1hpNgCO6CSLIxERCSxKdKRROa99Itf2N3tH+n+z1+Px+iyOSESkfkWUmT/qOGPUNlFEpC4p0ZFGZ+qYbsRHONmckc9ri3daHY6ISL3x+QxivDkARMRbU3VORCRQKdGRRichMpQ/j+kGwNNfbWVvdtVGvxURaWpySzwkYA58F5mgREdEpC4p0ZFG6Zr+rRjULoESj48HP96AYRhWhyQiUueyCz0k2sxER1XXRETqlhIdaZRsNhuPXdkLp8PG11sO8/mGDKtDEhGpczm5eUTbis2ZSPW6JiJSl5ToSKPVMSmKX1/cEYC/fLKRvBKNrSMigaXgqPkjjocQcMVYHI2ISGBRoiON2m8u7kC7ZpEcyi/lyS+3WB2OiEidKs09BEC+Ix5sNoujEREJLEp0pFELczp4dHxPAN5atpvv9+ZYG5CISB0qyzMTnSJnvMWRiIgEHiU60ugN7diMK/u1xDBg6kfrKdPYOiISIHyFhwFwuxItjkREJPAo0ZEm4f9d3o3YcCc/Hszjje92WR2OiEidsBUdAaAsXImOiEhdU6IjTUKzKBdTR3cF4Kn0rRzIKbY4IhGR2nOWZJlPIptbG4iISABSoiNNxnUD0hjYNp4it5eHPtlodTgiIrXmKs0GwBGtREdEpK4p0ZEmw243x9YJsdtI/zGTLzdqbB0Radoiy44CEKrBQkVE6pwSHWlSOidH86uL2gPm2DoFpWUWRyQiUjOGATG+HADC45XoiIjUNSU60uTcdWknWidEcDC3hKfTt1odjohIjZR6IYE8AKISWlgcjYhI4FGiI01OmNPBI+Vj67y+ZCcb9udaHJGISPUVeAwSMT+/wuJUoiMiUteU6EiTdFHn5ozt0wKfAX+evR6vz7A6JBGRanGXFhFq85ozEc2sDUZEJAAp0ZEm64EruhEdFsIP+3L5z7LdVocjIlI9pfkAFNoiwBlmcTAiIoFHiY40WUnRYfzxMnNsnb9/uYWM3BKLIxIRqTqb22yfk++IszYQEZEApURHmrSfn9uafq3jKCgt4+FPNbaOiDQdIeWJTrEzweJIREQCkxIdadLsdht/vbIXDruNueszeH7+NhZtPcz2QwUUu71WhyciclqhZWbVNbdLiY6ISH0IsToAkdrqlhrDbee345+LfuLJE7qbTogMpUVcGC1iw2kRF07LuPLH+HBaxIXRLNKF3W6zKHIRCWauMrNEpyxcHRGIiNQHJToSEH4/ojPhoQ6+35vDgZwS9ucUU1BaRnahm+xCNxv2551yu1CHndTyRKh980j6pMXRNy2ODs2jcCgBEpF6FOE1P5dskc0tjkREJDAp0ZGAEOZ0cPfwzv55wzDIKynjQE6xf9qfU1L+aM5n5pXg9vrYnVXE7qwilv6UxTvL9wAQ5QqhV8tYf+LTNy2OlFj1iiQidSfSZyY6jmglOiIi9UGJjgQkm81GbLiT2HAn3VJjTrmOx+sjM6+kvASoiM0H81m3N4f1+3MpKC1j6U9ZLP0py79+coyLPq3i6Ns6jr6t4ujVKpboMGdDvSURCTAxvjywQWisBgsVEakPSnQkaDkddlrFR9AqPgJIgH7m8jKvj+2HC/h+bw7r9uawbm8uWzLyyMwrZd6Pmcz7MRMAmw06No+iV8sYQnNtDC5ykxSrxEdEqiaOXADC41MsjkREJDAp0RE5QYjDTteUGLqmxHD9wNYAFLnL2LA/z0x+9uWwbk8O+3OK2XaogG2HCgAHHz7xDRd1TuKqc1pyadckwpwOa9+IiDRaJR4vCZhV16ISUi2ORkQkMCnREamCiNAQzm2XwLntjnUDezi/lB/25bB6VzafrNrBvkL4alMmX23KJDoshMt7pTK+X0vObZugnt1EpJLs/CLa2AoAiFSJjohIvVCiI1JDzaNdDOuWzIUdE+ji3kqn/hfyvw2ZfLx2PwdyS5i1ci+zVu6lZVw44/q24Mp+LemUHG112CLSCORlHQLAix1HhMbRERGpD0p0ROpIp+Qo/tgqnntHdmH5zmzmrN3P3PUH2Z9TzEsLd/DSwh30bBnD+L4t+VnfFiRFqxc3kWBVlJMBQJ4thni7qrmKiNQHe002evHFF2nbti1hYWEMGjSIFStWnHH9999/n65duxIWFkavXr2YO3dujYIVaQrsdhuDOyTyt2t6s/L+4bz483MY3i2JELuNDfvzePSzTZz31/nc/NoK5qzdT5G7zOqQRaSBleSanZrkO+KsDUREJIBVu0Tn3XffZcqUKcyYMYNBgwbxzDPPMGrUKLZs2UJSUtJJ63/33XfceOONTJ8+nSuuuIKZM2cyfvx41qxZQ8+ePevkTYg0VmFOB5f3TuXy3qlkFZTy2fqDzF67n7V7cli09TCLth4mNMROfISTcKeD8NAQwp12wkMdhDtDCA91EOF0EB7qIMzpICLUQbjTQdhxy2PCnMRFVEyhRIY6sNnUJkikMSvLM6uuFTvjLY5ERCRwVTvReeqpp7j99tv5xS9+AcCMGTP47LPPeO211/jTn/500vrPPvssl112Gffeey8AjzzyCOnp6bzwwgvMmDGjluGLNB2JUS5uHtyWmwe3ZeeRQuas3c+cdfvZnVVEZl5pnR3H6bARGx5KfHnyc/zzuIhQ87F8WVioA7vNhg3MR5vZbbbdZvPP223muETHr2MvT6QMA3yGgdcwMAwDX/m8z1f+eNwy/+s+8zHEYcNhtxFir3i0++dDHJXnHY7K63m8PkrLfJSWeSn1HPe8zFc+X/68zEep57jnZV5s2HCF2HE57YSFOCo9ukIchJU/ukLshDkrP6pTCakrRuFhAEpdap8jIlJfqpXouN1uVq9ezdSpU/3L7HY7w4cPZ+nSpafcZunSpUyZMqXSslGjRjFnzpzTHqe0tJTS0mNf/PLyzC44PR4PHo+nOiH7tzv+UaQu1eT8ahUbyuSL2zHporbsyS6moLSMYo/XnNxeij2+8kcvJf5lJy8vcnvJK/GQW1zG0SI3Hq+Bx2twpKCUIwV1lzyJyWG3YfcnghWJ3/HPT3zt2LytfB7AxvHP4VgB3OmWQ36+g+e2LwZsGAaAmTAaBhgY5Y9AeUJpAIZhlC+Dr/9wAU5H9Wsr63OzftiLjgDgDW9mcSQiIoGrWonOkSNH8Hq9JCdXHsU5OTmZzZs3n3KbjIyMU66fkZFx2uNMnz6dadOmnbR83rx5REREVCfkStLT02u8rcjZ1OX55SyfYk73wikYBrh9UFQGhWVQVGY77jkUeWzHnpeZzz2+ii/E5Y/HPz9hGYDvuOeGwbESIMqfc9x8xfOK5SesY5b0gNc49txngJfjXztzCYoNgxA7OG2Yj/bjHm3gtBuV5kPKv+eX+cz37vFBmWE79txn/g3LfOAxzGW+42Lw+gy85rs/Y1z1wwbFRTXe+vPPv/C//+ooKqr5MeX0+jcrgyOQ1irN6lBERAJWo+x1berUqZVKgfLy8khLS2PkyJHExJz01e+sPB4P6enpjBgxAqdTI9dL3dL5Vb98PoMyn4G3/LHM5yPEbscVYsfpsNV7e6Qyrw+310eJx3w0DPzV8PxV9nwV1fM4obre8VX7zP35S1k4VhpT8ZzTLPeUlbFmzRoG9O+PMyTEn2BWlAyZyaPtWDJ5/PPydbqlRNeo6l1FibrUrYhL7+U7TxsGDrjG6lBERAJWtRKdZs2a4XA4yMzMrLQ8MzOTlJRTD3iWkpJSrfUBXC4XLpfrpOVOp7NWXyRru73Imej8qj8nfxo0HKcTwoFYC2PweDwU7jC4oHNSg59jOqfrSXxbDsf0hIT2VkciIhKwqlWRITQ0lP79+zN//nz/Mp/Px/z58xk8ePAptxk8eHCl9cGs4nO69UVERERERGqr2lXXpkyZwi233MKAAQM499xzeeaZZygsLPT3wnbzzTfTsmVLpk+fDsDvfvc7LrroIp588kkuv/xyZs2axapVq3jllVfq9p2IiIiIiIiUq3aic/3113P48GEefPBBMjIy6Nu3L1988YW/w4E9e/Zgtx8rKBoyZAgzZ87k/vvv589//jOdOnVizpw5GkNHRERERETqTY06I5g8eTKTJ08+5WsLFy48adm1117LtddeW5NDiYiIiIiIVFsNOhsVERERERFp3JToiIiIiIhIwFGiIyIiIiIiAUeJjoiIiIiIBBwlOiIiIiIiEnCU6IiIiIiISMBRoiMiIiIiIgFHiY6IiIiIiAQcJToiIiIiIhJwlOiIiIiIiEjACbE6gKowDAOAvLy8Gm3v8XgoKioiLy8Pp9NZl6GJ6PySemflOVbxuVvxOSwm3ZeksdM5JvXJ6vOrqvemJpHo5OfnA5CWlmZxJCIiwSk/P5/Y2Firw2g0dF8SEbHe2e5NNqMJ/Ezn8/k4cOAA0dHR2Gy2am+fl5dHWloae/fuJSYmph4ilGCm80vqm5XnmGEY5Ofn06JFC+x21XauoPuSNHY6x6Q+WX1+VfXe1CRKdOx2O61atar1fmJiYnSxS73R+SX1zapzTCU5J9N9SZoKnWNSn6w8v6pyb9LPcyIiIiIiEnCU6IiIiIiISMAJikTH5XLx0EMP4XK5rA5FApDOL6lvOscCj/6nUt90jkl9airnV5PojEBERERERKQ6gqJER0REREREgosSHRERERERCThBlejYbDbmzJljdRgSoHR+SUPatWsXNpuNdevWWR2K1JI+O6Q+6fyShtTY7k0Bl+i8+OKLtG3blrCwMAYNGsSKFSusDkkCxF/+8hdsNlulqWvXrlaHJU3UokWLGDt2LC1atDjlFxHDMHjwwQdJTU0lPDyc4cOHs23bNmuClVrTvUnqi+5NUpcC7d4UUInOu+++y5QpU3jooYdYs2YNffr0YdSoURw6dMjq0CRA9OjRg4MHD/qnxYsXWx2SNFGFhYX06dOHF1988ZSvP/HEEzz33HPMmDGD5cuXExkZyahRoygpKWngSKW2dG+S+qZ7k9SVQLs3BVSi89RTT3H77bfzi1/8gu7duzNjxgwiIiJ47bXXTrn+Qw89RGpqKj/88EMDRypNVUhICCkpKf6pWbNmp11X55ecyejRo3n00Ue58sorT3rNMAyeeeYZ7r//fsaNG0fv3r156623OHDgwGmroHi9Xn75y1/StWtX9uzZU8/RS3Xo3iT1TfcmqSuBdm8KmETH7XazevVqhg8f7l9mt9sZPnw4S5curbSuYRjcddddvPXWW3z77bf07t27ocOVJmrbtm20aNGC9u3bM2HChFNetDq/pLZ27txJRkZGpc+z2NhYBg0adNLnGUBpaSnXXnst69at49tvv6V169YNGa6cge5N0hB0b5KG0BTvTSENfsR6cuTIEbxeL8nJyZWWJycns3nzZv98WVkZN910E2vXrmXx4sW0bNmyoUOVJmrQoEG88cYbdOnShYMHDzJt2jQuuOACNmzYQHR0NKDzS+pGRkYGwCk/zypeq1BQUMDll19OaWkpX3/9NbGxsQ0Wp5yd7k1S33RvkobSFO9NAZPoVNXvf/97XC4Xy5YtO2PRrsiJRo8e7X/eu3dvBg0aRJs2bXjvvfe49dZbAZ1f0vBuvPFGWrVqxYIFCwgPD7c6HKkhfXZITeneJI1RY7k3BUzVtWbNmuFwOMjMzKy0PDMzk5SUFP/8iBEj2L9/P19++WVDhygBJi4ujs6dO7N9+3b/Mp1fUhcqPrPO9nkGMGbMGH744YdTVhsQ6+neJA1N9yapL03x3hQwiU5oaCj9+/dn/vz5/mU+n4/58+czePBg/7Kf/exnzJw5k9tuu41Zs2ZZEaoEiIKCAnbs2EFqaqp/mc4vqQvt2rUjJSWl0udZXl4ey5cvr/R5BvDrX/+axx9/nJ/97Gd88803DR2qnIXuTdLQdG+S+tIk701GAJk1a5bhcrmMN954w/jxxx+NO+64w4iLizMyMjIMwzAMwJg9e7ZhGIbx/vvvG2FhYcb7779vYcTSlPzhD38wFi5caOzcudNYsmSJMXz4cKNZs2bGoUOHDMPQ+SXVk5+fb6xdu9ZYu3atARhPPfWUsXbtWmP37t2GYRjG448/bsTFxRkff/yx8cMPPxjjxo0z2rVrZxQXFxuGYRg7d+40AGPt2rWGYRjG008/bURFRRnffvutVW9JTkP3JqlPujdJXQq0e1NAJTqGYRjPP/+80bp1ayM0NNQ499xzjWXLlvlfO/5iNwzDePfdd42wsDDjww8/tCBSaWquv/56IzU11QgNDTVatmxpXH/99cb27dv9r+v8kur4+uuvDeCk6ZZbbjEMwzB8Pp/xwAMPGMnJyYbL5TKGDRtmbNmyxb/9iTcTwzCMJ5980oiOjjaWLFnSwO9Gzkb3JqkvujdJXQq0e5PNMAyj4cqPRERERERE6l/AtNERERERERGpoERHREREREQCjhIdEREREREJOEp0REREREQk4CjRERERERGRgKNER0REREREAo4SHRERERERCThKdEREREREJOAo0RERERERkYCjREekjkycOJHx48dbHYaIiAig+5KIEh0REREREQk4SnREqumDDz6gV69ehIeHk5iYyPDhw7n33nt58803+fjjj7HZbNhsNhYuXAjA3r17ue6664iLiyMhIYFx48axa9cu//4qfnGbNm0azZs3JyYmhjvvvBO3223NGxQRkSZF9yWRUwuxOgCRpuTgwYPceOONPPHEE1x55ZXk5+fz7bffcvPNN7Nnzx7y8vJ4/fXXAUhISMDj8TBq1CgGDx7Mt9/+/3btJxS2MIzj+G8uncx0TPIvJR0bC2lSzEZTUqfESk1ZyIZsZScLhfzbKWVJbFjYYGVBcpopmbKZWShFhLKgiFOMv3endM2tWch17vezO52nt/etU0+/87xx5ebmanx8XC0tLUqlUjIMQ5K0tbWlvLw8OY6jk5MTdXd3q6ioSBMTE995XADAP46+BGRG0AGycHFxoefnZ0WjUVmWJUkKhUKSJL/fr3Q6rbKysvf6xcVFvb6+am5uTj6fT5K0sLCggoICOY6j5uZmSZJhGJqfn1cgEFBNTY1GR0fV39+vsbEx/frF4BUA8Dn6EpAZXyqQhdraWtm2rVAopPb2ds3Ozur6+jpjfTKZ1OHhofLz82WapkzTVGFhoR4eHnR0dPRh3UAg8P7c0NAg13V1dnb2pecBAPxs9CUgMyY6QBZycnK0ubmpnZ0dbWxsaGZmRoODg0okEp/Wu66r+vp6LS0t/fGupKTkq7cLAPA4+hKQGUEHyJLP51MkElEkEtHQ0JAsy9Lq6qoMw9DLy8uH2rq6Oi0vL6u0tFTBYDDjmslkUvf39/L7/ZKk3d1dmaapioqKLz0LAODnoy8Bn+PqGpCFRCKhyclJ7e3t6fT0VCsrK7q8vFR1dbUqKyuVSqV0cHCgq6srPT09qbOzU8XFxWpra1M8Htfx8bEcx1FfX5/Oz8/f1318fFRPT4/29/e1vr6u4eFh9fb2cg8aAPBX9CUgMyY6QBaCwaBisZimp6d1e3sry7I0NTWl1tZWhcNhOY6jcDgs13W1vb2tpqYmxWIxDQwMKBqN6u7uTuXl5bJt+8OfNNu2VVVVpcbGRqXTaXV0dGhkZOT7DgoA+BHoS0Bmvre3t7fv3gTwP+vq6tLNzY3W1ta+eysAANCX4BnMHwEAAAB4DkEHAAAAgOdwdQ0AAACA5zDRAQAAAOA5BB0AAAAAnkPQAQAAAOA5BB0AAAAAnkPQAQAAAOA5BB0AAAAAnkPQAQAAAOA5BB0AAAAAnkPQAQAAAOA5vwHlwqiQDxlTdQAAAABJRU5ErkJggg==",
            "text/plain": [
              "<Figure size 1000x500 with 2 Axes>"
            ]
          },
          "metadata": {},
          "output_type": "display_data"
        }
      ],
      "source": [
        "plot_learning_curves(history, sample_step=500)  #横坐标是 steps"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 30,
      "metadata": {
        "id": "e0JxP6FE9L8Y"
      },
      "outputs": [],
      "source": [
        "a =py7zr.SevenZipFile(r'./test.7z','r')\n",
        "a.extractall(path=r'./competitions/cifar-10/')\n",
        "a.close()"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 33,
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "KSDxPyov9i-y",
        "outputId": "e7941401-cdbf-49cb-d88f-e8e33c45e61b"
      },
      "outputs": [
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "300000\n"
          ]
        }
      ],
      "source": [
        "!ls competitions/cifar-10/test|wc -l"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 34,
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "H39dl-23-cil",
        "outputId": "6c61e8c5-e344-4074-8609-d508f496f584"
      },
      "outputs": [
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "competitions   sample_data\t     trainLabels.csv\n",
            "kaggle.json    sampleSubmission.csv  wangdao_deeplearning_train.py\n",
            "model_weights  test.7z\n",
            "__pycache__    train.7z\n"
          ]
        }
      ],
      "source": [
        "!ls"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 38,
      "metadata": {
        "ExecuteTime": {
          "end_time": "2025-06-26T01:45:37.818553Z",
          "start_time": "2025-06-26T01:45:37.816716Z"
        },
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "Yvx48aMb4pNw",
        "outputId": "1d98cf1d-e767-4981-c780-625760d02dc4"
      },
      "outputs": [
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "/usr/local/lib/python3.11/dist-packages/torch/utils/data/dataloader.py:624: UserWarning: This DataLoader will create 4 worker processes in total. Our suggested max number of worker in current system is 2, which is smaller than what this DataLoader is going to create. Please be aware that excessive worker creation might get DataLoader running slow or even freeze, lower the worker number to avoid potential slowness/freeze if necessary.\n",
            "  warnings.warn(\n"
          ]
        },
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "正在预测测试集...\n"
          ]
        },
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "\r预测进度:   0%|          | 0/2344 [00:00<?, ?it/s]/usr/local/lib/python3.11/dist-packages/torch/utils/data/dataloader.py:624: UserWarning: This DataLoader will create 4 worker processes in total. Our suggested max number of worker in current system is 2, which is smaller than what this DataLoader is going to create. Please be aware that excessive worker creation might get DataLoader running slow or even freeze, lower the worker number to avoid potential slowness/freeze if necessary.\n",
            "  warnings.warn(\n",
            "预测进度: 100%|██████████| 2344/2344 [01:59<00:00, 19.64it/s]\n"
          ]
        },
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "id列是否有重复值: False\n",
            "预测完成，结果已保存至 cifar10_submission.csv\n"
          ]
        }
      ],
      "source": [
        "# 导入所需库\n",
        "import os\n",
        "import pandas as pd\n",
        "from PIL import Image\n",
        "import torch\n",
        "from torch.utils.data import Dataset, DataLoader\n",
        "from torchvision import transforms\n",
        "import tqdm\n",
        "\n",
        "# 定义测试数据集类\n",
        "class CIFAR10TestDataset(Dataset):\n",
        "    def __init__(self, img_dir, transform=None):\n",
        "        \"\"\"\n",
        "        初始化测试数据集\n",
        "\n",
        "        参数:\n",
        "            img_dir: 测试图片目录\n",
        "            transform: 图像预处理变换\n",
        "        \"\"\"\n",
        "        self.img_dir = img_dir\n",
        "        self.transform = transform\n",
        "        self.img_files = [f for f in os.listdir(img_dir) if f.endswith('.png')]\n",
        "\n",
        "    def __len__(self):\n",
        "        return len(self.img_files)\n",
        "\n",
        "    def __getitem__(self, idx):\n",
        "        img_path = os.path.join(self.img_dir, self.img_files[idx])\n",
        "        image = Image.open(img_path).convert('RGB')\n",
        "\n",
        "        if self.transform:\n",
        "            image = self.transform(image)\n",
        "\n",
        "        # 提取图像ID（文件名去掉扩展名）\n",
        "        img_id = int(os.path.splitext(self.img_files[idx])[0])\n",
        "\n",
        "        return image, img_id\n",
        "\n",
        "# 定义预测函数\n",
        "def predict_test_set(model, img_dir, labels_file, device, batch_size=64):\n",
        "    \"\"\"\n",
        "    预测测试集并生成提交文件\n",
        "\n",
        "    参数:\n",
        "        model: 训练好的模型\n",
        "        img_dir: 测试图片目录\n",
        "        labels_file: 提交模板文件路径\n",
        "        device: 计算设备\n",
        "        batch_size: 批处理大小\n",
        "    \"\"\"\n",
        "    # 图像预处理变换（与训练集相同）\n",
        "    transform = transforms.Compose([\n",
        "        transforms.ToTensor(),\n",
        "        transforms.Normalize((0.4917, 0.4823, 0.4467), (0.2024, 0.1995, 0.2010))\n",
        "    ])\n",
        "\n",
        "    # 创建测试数据集和数据加载器\n",
        "    test_dataset = CIFAR10TestDataset(img_dir, transform=transform)\n",
        "    test_loader = DataLoader(test_dataset, batch_size=batch_size, shuffle=False, num_workers=4)\n",
        "\n",
        "    # 设置模型为评估模式\n",
        "    model.eval()\n",
        "\n",
        "    # 读取提交模板\n",
        "    submission_df = pd.read_csv(labels_file)\n",
        "    predictions = {}\n",
        "\n",
        "    # 使用tqdm显示进度条\n",
        "    print(\"正在预测测试集...\")\n",
        "    with torch.no_grad():\n",
        "        for images, img_ids in tqdm.tqdm(test_loader, desc=\"预测进度\"):\n",
        "            images = images.to(device)\n",
        "            outputs = model(images)\n",
        "            _, predicted = torch.max(outputs, 1) #取最大的索引，作为预测结果\n",
        "\n",
        "            # 记录每个图像的预测结果\n",
        "            for i, img_id in enumerate(img_ids):\n",
        "                predictions[img_id.item()] = predicted[i].item() #因为一个批次有多个图像，所以需要predicted[i]\n",
        "\n",
        "    # 定义类别名称\n",
        "    class_names = ['airplane', 'automobile', 'bird', 'cat', 'deer', 'dog', 'frog', 'horse', 'ship', 'truck']\n",
        "\n",
        "    # 将数值标签转换为类别名称\n",
        "    labeled_predictions = {img_id: class_names[pred] for img_id, pred in predictions.items()}\n",
        "\n",
        "    # 直接创建DataFrame\n",
        "    submission_df = pd.DataFrame({\n",
        "        'id': list(labeled_predictions.keys()),\n",
        "        'label': list(labeled_predictions.values())\n",
        "    })\n",
        "    # 按id列排序\n",
        "    submission_df = submission_df.sort_values(by='id')\n",
        "\n",
        "    # 检查id列是否有重复值\n",
        "    has_duplicates = submission_df['id'].duplicated().any()\n",
        "    print(f\"id列是否有重复值: {has_duplicates}\")\n",
        "    # 保存预测结果\n",
        "    output_file = 'cifar10_submission.csv'\n",
        "    submission_df.to_csv(output_file, index=False)\n",
        "    print(f\"预测完成，结果已保存至 {output_file}\")\n",
        "\n",
        "# 执行测试集预测\n",
        "img_dir = r\"competitions/cifar-10/test\"\n",
        "labels_file = r\"./sampleSubmission.csv\"\n",
        "predict_test_set(model, img_dir, labels_file, device, batch_size=128)\n"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 39,
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "f90sLQwP_o3I",
        "outputId": "bc0d41e4-2fe6-4211-d9ab-6de98d54831d"
      },
      "outputs": [
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "id,label\n",
            "1,deer\n",
            "2,airplane\n",
            "3,automobile\n",
            "4,ship\n",
            "5,bird\n",
            "6,cat\n",
            "7,airplane\n",
            "8,deer\n",
            "9,bird\n"
          ]
        }
      ],
      "source": [
        "!head -10 cifar10_submission.csv"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 40,
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "R1w9Z1W-AOgY",
        "outputId": "1d1fbcba-704f-4949-82a9-3b1a028aa06c"
      },
      "outputs": [
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "300001 cifar10_submission.csv\n"
          ]
        }
      ],
      "source": [
        "!wc -l cifar10_submission.csv"
      ]
    }
  ],
  "metadata": {
    "accelerator": "GPU",
    "colab": {
      "gpuType": "T4",
      "provenance": []
    },
    "kernelspec": {
      "display_name": "Python 3",
      "name": "python3"
    },
    "language_info": {
      "codemirror_mode": {
        "name": "ipython",
        "version": 3
      },
      "file_extension": ".py",
      "mimetype": "text/x-python",
      "name": "python",
      "nbconvert_exporter": "python",
      "pygments_lexer": "ipython3",
      "version": "3.12.3"
    },
    "widgets": {
      "application/vnd.jupyter.widget-state+json": {
        "12492b59648c43e4bdbe100ddb3a3702": {
          "model_module": "@jupyter-widgets/base",
          "model_module_version": "1.2.0",
          "model_name": "LayoutModel",
          "state": {
            "_model_module": "@jupyter-widgets/base",
            "_model_module_version": "1.2.0",
            "_model_name": "LayoutModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/base",
            "_view_module_version": "1.2.0",
            "_view_name": "LayoutView",
            "align_content": null,
            "align_items": null,
            "align_self": null,
            "border": null,
            "bottom": null,
            "display": null,
            "flex": null,
            "flex_flow": null,
            "grid_area": null,
            "grid_auto_columns": null,
            "grid_auto_flow": null,
            "grid_auto_rows": null,
            "grid_column": null,
            "grid_gap": null,
            "grid_row": null,
            "grid_template_areas": null,
            "grid_template_columns": null,
            "grid_template_rows": null,
            "height": null,
            "justify_content": null,
            "justify_items": null,
            "left": null,
            "margin": null,
            "max_height": null,
            "max_width": null,
            "min_height": null,
            "min_width": null,
            "object_fit": null,
            "object_position": null,
            "order": null,
            "overflow": null,
            "overflow_x": null,
            "overflow_y": null,
            "padding": null,
            "right": null,
            "top": null,
            "visibility": null,
            "width": null
          }
        },
        "1ee3d0c200c64a17a46ee92c07e0e1b7": {
          "model_module": "@jupyter-widgets/base",
          "model_module_version": "1.2.0",
          "model_name": "LayoutModel",
          "state": {
            "_model_module": "@jupyter-widgets/base",
            "_model_module_version": "1.2.0",
            "_model_name": "LayoutModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/base",
            "_view_module_version": "1.2.0",
            "_view_name": "LayoutView",
            "align_content": null,
            "align_items": null,
            "align_self": null,
            "border": null,
            "bottom": null,
            "display": null,
            "flex": null,
            "flex_flow": null,
            "grid_area": null,
            "grid_auto_columns": null,
            "grid_auto_flow": null,
            "grid_auto_rows": null,
            "grid_column": null,
            "grid_gap": null,
            "grid_row": null,
            "grid_template_areas": null,
            "grid_template_columns": null,
            "grid_template_rows": null,
            "height": null,
            "justify_content": null,
            "justify_items": null,
            "left": null,
            "margin": null,
            "max_height": null,
            "max_width": null,
            "min_height": null,
            "min_width": null,
            "object_fit": null,
            "object_position": null,
            "order": null,
            "overflow": null,
            "overflow_x": null,
            "overflow_y": null,
            "padding": null,
            "right": null,
            "top": null,
            "visibility": null,
            "width": null
          }
        },
        "32813af8951f4c2b8dd4171a5937c5a9": {
          "model_module": "@jupyter-widgets/controls",
          "model_module_version": "1.5.0",
          "model_name": "HTMLModel",
          "state": {
            "_dom_classes": [],
            "_model_module": "@jupyter-widgets/controls",
            "_model_module_version": "1.5.0",
            "_model_name": "HTMLModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/controls",
            "_view_module_version": "1.5.0",
            "_view_name": "HTMLView",
            "description": "",
            "description_tooltip": null,
            "layout": "IPY_MODEL_1ee3d0c200c64a17a46ee92c07e0e1b7",
            "placeholder": "​",
            "style": "IPY_MODEL_7ab573b1c9fd48a4bb67890514744bb0",
            "value": " 30%"
          }
        },
        "6e4a4ef865da440d92b72bcf10f8b877": {
          "model_module": "@jupyter-widgets/base",
          "model_module_version": "1.2.0",
          "model_name": "LayoutModel",
          "state": {
            "_model_module": "@jupyter-widgets/base",
            "_model_module_version": "1.2.0",
            "_model_name": "LayoutModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/base",
            "_view_module_version": "1.2.0",
            "_view_name": "LayoutView",
            "align_content": null,
            "align_items": null,
            "align_self": null,
            "border": null,
            "bottom": null,
            "display": null,
            "flex": null,
            "flex_flow": null,
            "grid_area": null,
            "grid_auto_columns": null,
            "grid_auto_flow": null,
            "grid_auto_rows": null,
            "grid_column": null,
            "grid_gap": null,
            "grid_row": null,
            "grid_template_areas": null,
            "grid_template_columns": null,
            "grid_template_rows": null,
            "height": null,
            "justify_content": null,
            "justify_items": null,
            "left": null,
            "margin": null,
            "max_height": null,
            "max_width": null,
            "min_height": null,
            "min_width": null,
            "object_fit": null,
            "object_position": null,
            "order": null,
            "overflow": null,
            "overflow_x": null,
            "overflow_y": null,
            "padding": null,
            "right": null,
            "top": null,
            "visibility": null,
            "width": null
          }
        },
        "7ab573b1c9fd48a4bb67890514744bb0": {
          "model_module": "@jupyter-widgets/controls",
          "model_module_version": "1.5.0",
          "model_name": "DescriptionStyleModel",
          "state": {
            "_model_module": "@jupyter-widgets/controls",
            "_model_module_version": "1.5.0",
            "_model_name": "DescriptionStyleModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/base",
            "_view_module_version": "1.2.0",
            "_view_name": "StyleView",
            "description_width": ""
          }
        },
        "8a284f1425f343c7b02cf5e060519df4": {
          "model_module": "@jupyter-widgets/base",
          "model_module_version": "1.2.0",
          "model_name": "LayoutModel",
          "state": {
            "_model_module": "@jupyter-widgets/base",
            "_model_module_version": "1.2.0",
            "_model_name": "LayoutModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/base",
            "_view_module_version": "1.2.0",
            "_view_name": "LayoutView",
            "align_content": null,
            "align_items": null,
            "align_self": null,
            "border": null,
            "bottom": null,
            "display": null,
            "flex": null,
            "flex_flow": null,
            "grid_area": null,
            "grid_auto_columns": null,
            "grid_auto_flow": null,
            "grid_auto_rows": null,
            "grid_column": null,
            "grid_gap": null,
            "grid_row": null,
            "grid_template_areas": null,
            "grid_template_columns": null,
            "grid_template_rows": null,
            "height": null,
            "justify_content": null,
            "justify_items": null,
            "left": null,
            "margin": null,
            "max_height": null,
            "max_width": null,
            "min_height": null,
            "min_width": null,
            "object_fit": null,
            "object_position": null,
            "order": null,
            "overflow": null,
            "overflow_x": null,
            "overflow_y": null,
            "padding": null,
            "right": null,
            "top": null,
            "visibility": null,
            "width": null
          }
        },
        "9cb464da4e09479292889a917a7f436a": {
          "model_module": "@jupyter-widgets/controls",
          "model_module_version": "1.5.0",
          "model_name": "FloatProgressModel",
          "state": {
            "_dom_classes": [],
            "_model_module": "@jupyter-widgets/controls",
            "_model_module_version": "1.5.0",
            "_model_name": "FloatProgressModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/controls",
            "_view_module_version": "1.5.0",
            "_view_name": "ProgressView",
            "bar_style": "danger",
            "description": "",
            "description_tooltip": null,
            "layout": "IPY_MODEL_8a284f1425f343c7b02cf5e060519df4",
            "max": 35200,
            "min": 0,
            "orientation": "horizontal",
            "style": "IPY_MODEL_c97b171f55714254a3bd3d0d73882031",
            "value": 10500
          }
        },
        "be06c2b352c14f5c9aee8a16d5b11e24": {
          "model_module": "@jupyter-widgets/controls",
          "model_module_version": "1.5.0",
          "model_name": "HBoxModel",
          "state": {
            "_dom_classes": [],
            "_model_module": "@jupyter-widgets/controls",
            "_model_module_version": "1.5.0",
            "_model_name": "HBoxModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/controls",
            "_view_module_version": "1.5.0",
            "_view_name": "HBoxView",
            "box_style": "",
            "children": [
              "IPY_MODEL_32813af8951f4c2b8dd4171a5937c5a9",
              "IPY_MODEL_9cb464da4e09479292889a917a7f436a",
              "IPY_MODEL_d36666c817b841f99576af80feb4a7ee"
            ],
            "layout": "IPY_MODEL_12492b59648c43e4bdbe100ddb3a3702"
          }
        },
        "c97b171f55714254a3bd3d0d73882031": {
          "model_module": "@jupyter-widgets/controls",
          "model_module_version": "1.5.0",
          "model_name": "ProgressStyleModel",
          "state": {
            "_model_module": "@jupyter-widgets/controls",
            "_model_module_version": "1.5.0",
            "_model_name": "ProgressStyleModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/base",
            "_view_module_version": "1.2.0",
            "_view_name": "StyleView",
            "bar_color": null,
            "description_width": ""
          }
        },
        "d36666c817b841f99576af80feb4a7ee": {
          "model_module": "@jupyter-widgets/controls",
          "model_module_version": "1.5.0",
          "model_name": "HTMLModel",
          "state": {
            "_dom_classes": [],
            "_model_module": "@jupyter-widgets/controls",
            "_model_module_version": "1.5.0",
            "_model_name": "HTMLModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/controls",
            "_view_module_version": "1.5.0",
            "_view_name": "HTMLView",
            "description": "",
            "description_tooltip": null,
            "layout": "IPY_MODEL_6e4a4ef865da440d92b72bcf10f8b877",
            "placeholder": "​",
            "style": "IPY_MODEL_e82290e7a17643cdb2eaf7df030265f1",
            "value": " 10500/35200 [12:04&lt;24:59, 16.48it/s, epoch=14, loss=0.0018, acc=100.00%]"
          }
        },
        "e82290e7a17643cdb2eaf7df030265f1": {
          "model_module": "@jupyter-widgets/controls",
          "model_module_version": "1.5.0",
          "model_name": "DescriptionStyleModel",
          "state": {
            "_model_module": "@jupyter-widgets/controls",
            "_model_module_version": "1.5.0",
            "_model_name": "DescriptionStyleModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/base",
            "_view_module_version": "1.2.0",
            "_view_name": "StyleView",
            "description_width": ""
          }
        }
      }
    }
  },
  "nbformat": 4,
  "nbformat_minor": 0
}
