{
 "cells": [
  {
   "cell_type": "markdown",
   "id": "a8ff8fe1a55aaeb4",
   "metadata": {
    "collapsed": false
   },
   "source": [
    "# Description 描述"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "id": "2d8f6f0232555f5b",
   "metadata": {
    "collapsed": false
   },
   "outputs": [],
   "source": [
    "# 导入常用包\n",
    "import sys\n",
    "import numpy as np\n",
    "import pandas as pd\n",
    "import sklearn as sk\n",
    "import matplotlib.pyplot as plt\n",
    "import time\n",
    "sys.path.append('..')\n",
    "# from utils.matplot import histogram\n",
    "# import importlib\n",
    "# importlib.reload(utils.matplot)\n",
    "%load_ext autoreload\n",
    "%autoreload 2\n",
    "\n",
    "np.random.seed(42)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "id": "ab4c536e",
   "metadata": {},
   "outputs": [],
   "source": [
    "# 引入mlflow\n",
    "import mlflow\n",
    "# mlflow.autolog() #开启autolog, 16s的svc训练过程变成了3min!!!!\n",
    "mlflow.set_tracking_uri(\"file:///Users/tianzhipeng/data/mlflows/mlruns\")\n",
    "mlflow.set_experiment(\"pytorch_resnet_classifier\")\n",
    "\n",
    "sys.path.append(\"/Users/tianzhipeng/Documents/private/cnm/Trial-7-Lan/PythonTest/ML/utils\")\n",
    "from my_config import CustomConfigObject\n",
    "params = CustomConfigObject()\n",
    "metrics = CustomConfigObject()"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "7178d38257f77196",
   "metadata": {
    "collapsed": false
   },
   "source": [
    "# Ingest 数据摄入"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "4fae4413",
   "metadata": {},
   "source": [
    "## load预训练模型"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "id": "initial_id",
   "metadata": {
    "collapsed": true
   },
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "Using cache found in /Users/tianzhipeng/.cache/torch/hub/pytorch_vision_v0.10.0\n",
      "/Users/tianzhipeng/Documents/env/miniconda3/lib/python3.11/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n",
      "  from .autonotebook import tqdm as notebook_tqdm\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "ResNet(\n",
       "  (conv1): Conv2d(3, 64, kernel_size=(7, 7), stride=(2, 2), padding=(3, 3), bias=False)\n",
       "  (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "  (relu): ReLU(inplace=True)\n",
       "  (maxpool): MaxPool2d(kernel_size=3, stride=2, padding=1, dilation=1, ceil_mode=False)\n",
       "  (layer1): Sequential(\n",
       "    (0): Bottleneck(\n",
       "      (conv1): Conv2d(64, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace=True)\n",
       "      (downsample): Sequential(\n",
       "        (0): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "        (1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      )\n",
       "    )\n",
       "    (1): Bottleneck(\n",
       "      (conv1): Conv2d(256, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace=True)\n",
       "    )\n",
       "    (2): Bottleneck(\n",
       "      (conv1): Conv2d(256, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace=True)\n",
       "    )\n",
       "  )\n",
       "  (layer2): Sequential(\n",
       "    (0): Bottleneck(\n",
       "      (conv1): Conv2d(256, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace=True)\n",
       "      (downsample): Sequential(\n",
       "        (0): Conv2d(256, 512, kernel_size=(1, 1), stride=(2, 2), bias=False)\n",
       "        (1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      )\n",
       "    )\n",
       "    (1): Bottleneck(\n",
       "      (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace=True)\n",
       "    )\n",
       "    (2): Bottleneck(\n",
       "      (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace=True)\n",
       "    )\n",
       "    (3): Bottleneck(\n",
       "      (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace=True)\n",
       "    )\n",
       "    (4): Bottleneck(\n",
       "      (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace=True)\n",
       "    )\n",
       "    (5): Bottleneck(\n",
       "      (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace=True)\n",
       "    )\n",
       "    (6): Bottleneck(\n",
       "      (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace=True)\n",
       "    )\n",
       "    (7): Bottleneck(\n",
       "      (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace=True)\n",
       "    )\n",
       "  )\n",
       "  (layer3): Sequential(\n",
       "    (0): Bottleneck(\n",
       "      (conv1): Conv2d(512, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace=True)\n",
       "      (downsample): Sequential(\n",
       "        (0): Conv2d(512, 1024, kernel_size=(1, 1), stride=(2, 2), bias=False)\n",
       "        (1): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      )\n",
       "    )\n",
       "    (1): Bottleneck(\n",
       "      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace=True)\n",
       "    )\n",
       "    (2): Bottleneck(\n",
       "      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace=True)\n",
       "    )\n",
       "    (3): Bottleneck(\n",
       "      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace=True)\n",
       "    )\n",
       "    (4): Bottleneck(\n",
       "      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace=True)\n",
       "    )\n",
       "    (5): Bottleneck(\n",
       "      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace=True)\n",
       "    )\n",
       "    (6): Bottleneck(\n",
       "      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace=True)\n",
       "    )\n",
       "    (7): Bottleneck(\n",
       "      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace=True)\n",
       "    )\n",
       "    (8): Bottleneck(\n",
       "      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace=True)\n",
       "    )\n",
       "    (9): Bottleneck(\n",
       "      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace=True)\n",
       "    )\n",
       "    (10): Bottleneck(\n",
       "      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace=True)\n",
       "    )\n",
       "    (11): Bottleneck(\n",
       "      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace=True)\n",
       "    )\n",
       "    (12): Bottleneck(\n",
       "      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace=True)\n",
       "    )\n",
       "    (13): Bottleneck(\n",
       "      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace=True)\n",
       "    )\n",
       "    (14): Bottleneck(\n",
       "      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace=True)\n",
       "    )\n",
       "    (15): Bottleneck(\n",
       "      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace=True)\n",
       "    )\n",
       "    (16): Bottleneck(\n",
       "      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace=True)\n",
       "    )\n",
       "    (17): Bottleneck(\n",
       "      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace=True)\n",
       "    )\n",
       "    (18): Bottleneck(\n",
       "      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace=True)\n",
       "    )\n",
       "    (19): Bottleneck(\n",
       "      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace=True)\n",
       "    )\n",
       "    (20): Bottleneck(\n",
       "      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace=True)\n",
       "    )\n",
       "    (21): Bottleneck(\n",
       "      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace=True)\n",
       "    )\n",
       "    (22): Bottleneck(\n",
       "      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace=True)\n",
       "    )\n",
       "    (23): Bottleneck(\n",
       "      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace=True)\n",
       "    )\n",
       "    (24): Bottleneck(\n",
       "      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace=True)\n",
       "    )\n",
       "    (25): Bottleneck(\n",
       "      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace=True)\n",
       "    )\n",
       "    (26): Bottleneck(\n",
       "      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace=True)\n",
       "    )\n",
       "    (27): Bottleneck(\n",
       "      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace=True)\n",
       "    )\n",
       "    (28): Bottleneck(\n",
       "      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace=True)\n",
       "    )\n",
       "    (29): Bottleneck(\n",
       "      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace=True)\n",
       "    )\n",
       "    (30): Bottleneck(\n",
       "      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace=True)\n",
       "    )\n",
       "    (31): Bottleneck(\n",
       "      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace=True)\n",
       "    )\n",
       "    (32): Bottleneck(\n",
       "      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace=True)\n",
       "    )\n",
       "    (33): Bottleneck(\n",
       "      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace=True)\n",
       "    )\n",
       "    (34): Bottleneck(\n",
       "      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace=True)\n",
       "    )\n",
       "    (35): Bottleneck(\n",
       "      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace=True)\n",
       "    )\n",
       "  )\n",
       "  (layer4): Sequential(\n",
       "    (0): Bottleneck(\n",
       "      (conv1): Conv2d(1024, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(512, 2048, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(2048, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace=True)\n",
       "      (downsample): Sequential(\n",
       "        (0): Conv2d(1024, 2048, kernel_size=(1, 1), stride=(2, 2), bias=False)\n",
       "        (1): BatchNorm2d(2048, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      )\n",
       "    )\n",
       "    (1): Bottleneck(\n",
       "      (conv1): Conv2d(2048, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(512, 2048, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(2048, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace=True)\n",
       "    )\n",
       "    (2): Bottleneck(\n",
       "      (conv1): Conv2d(2048, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (conv3): Conv2d(512, 2048, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "      (bn3): BatchNorm2d(2048, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace=True)\n",
       "    )\n",
       "  )\n",
       "  (avgpool): AdaptiveAvgPool2d(output_size=(1, 1))\n",
       "  (fc): Linear(in_features=2048, out_features=1000, bias=True)\n",
       ")"
      ]
     },
     "execution_count": 1,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "import torch\n",
    "# model = torch.hub.load('pytorch/vision:v0.10.0', 'resnet18', pretrained=True)\n",
    "# or any of these variants\n",
    "# model = torch.hub.load('pytorch/vision:v0.10.0', 'resnet34', pretrained=True)\n",
    "# model = torch.hub.load('pytorch/vision:v0.10.0', 'resnet50', pretrained=True)\n",
    "# model = torch.hub.load('pytorch/vision:v0.10.0', 'resnet101', pretrained=True)\n",
    "model = torch.hub.load('pytorch/vision:v0.10.0', 'resnet152', pretrained=True)\n",
    "\n",
    "model\n",
    "model.eval()  # 调不调用这个完全不一样啊. 这个是切换到推理模式, 奇怪\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "id": "75d4b380",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "ResNet(\n",
       "  (conv1): Conv2d(3, 64, kernel_size=(7, 7), stride=(2, 2), padding=(3, 3), bias=False)\n",
       "  (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "  (relu): ReLU(inplace=True)\n",
       "  (maxpool): MaxPool2d(kernel_size=3, stride=2, padding=1, dilation=1, ceil_mode=False)\n",
       "  (layer1): Sequential(\n",
       "    (0): BasicBlock(\n",
       "      (conv1): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace=True)\n",
       "      (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "    )\n",
       "    (1): BasicBlock(\n",
       "      (conv1): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace=True)\n",
       "      (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "    )\n",
       "  )\n",
       "  (layer2): Sequential(\n",
       "    (0): BasicBlock(\n",
       "      (conv1): Conv2d(64, 128, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace=True)\n",
       "      (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (downsample): Sequential(\n",
       "        (0): Conv2d(64, 128, kernel_size=(1, 1), stride=(2, 2), bias=False)\n",
       "        (1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      )\n",
       "    )\n",
       "    (1): BasicBlock(\n",
       "      (conv1): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace=True)\n",
       "      (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "    )\n",
       "  )\n",
       "  (layer3): Sequential(\n",
       "    (0): BasicBlock(\n",
       "      (conv1): Conv2d(128, 256, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace=True)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (downsample): Sequential(\n",
       "        (0): Conv2d(128, 256, kernel_size=(1, 1), stride=(2, 2), bias=False)\n",
       "        (1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      )\n",
       "    )\n",
       "    (1): BasicBlock(\n",
       "      (conv1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace=True)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "    )\n",
       "  )\n",
       "  (layer4): Sequential(\n",
       "    (0): BasicBlock(\n",
       "      (conv1): Conv2d(256, 512, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace=True)\n",
       "      (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (downsample): Sequential(\n",
       "        (0): Conv2d(256, 512, kernel_size=(1, 1), stride=(2, 2), bias=False)\n",
       "        (1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      )\n",
       "    )\n",
       "    (1): BasicBlock(\n",
       "      (conv1): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace=True)\n",
       "      (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "    )\n",
       "  )\n",
       "  (avgpool): AdaptiveAvgPool2d(output_size=(1, 1))\n",
       "  (fc): Linear(in_features=512, out_features=1000, bias=True)\n",
       ")"
      ]
     },
     "execution_count": 2,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "import torchvision.models as models\n",
    "import torch.nn as nn\n",
    "\n",
    "# 加载预训练的 ResNet 模型\n",
    "model = models.resnet18(pretrained=True)\n",
    "model"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "b89ce994",
   "metadata": {},
   "source": [
    "## load自己的数据"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "id": "69997b7b",
   "metadata": {},
   "outputs": [],
   "source": [
    "base_path=\"/Users/tianzhipeng/data/pdfres/gcjjx\"\n",
    "train_dir = f\"{base_path}/classifier\"\n",
    "test_dir = f\"{base_path}/classifier_test\"    "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "id": "14a3b554",
   "metadata": {},
   "outputs": [],
   "source": [
    "# 帮我实现一个python函数, 输入为一个目录\n",
    "# 1. 该目录下有多个子目录, 每个子目录代表一个类别, 子目录下是该类别的所有图片, 将所有子目录和图片读取\n",
    "# 2.使用PIL将图片转化为特征, 存储为训练用的X变量\n",
    "# 3.将子目录代表的标签, 存储为训练用的y变量\n",
    "# 函数返回X, y\n",
    "import os\n",
    "from PIL import Image\n",
    "import numpy as np\n",
    "\n",
    "def read_images_and_labels(directory, flatten=False, force_resize=None):\n",
    "    X = []  # 存储图像特征的列表\n",
    "    y = []  # 存储图像标签的列表\n",
    "    labels = {}  # 存储类别和对应标签的字典\n",
    "    label_index = 0  # 标签索引\n",
    "    img_shape = None\n",
    "\n",
    "    # 遍历目录下的子目录（代表类别）\n",
    "    for root, dirs, files in os.walk(directory):\n",
    "        for subdir in dirs:\n",
    "            labels[label_index] = subdir  # 将类别与索引关联\n",
    "            label_index += 1\n",
    "            subdir_path = os.path.join(root, subdir)\n",
    "\n",
    "            # 读取子目录中的图像文件\n",
    "            for file in os.listdir(subdir_path):\n",
    "                file_path = os.path.join(subdir_path, file)\n",
    "                if os.path.isfile(file_path):\n",
    "                    # 使用PIL库打开图像文件\n",
    "                    try:\n",
    "                        img = Image.open(file_path)\n",
    "                        if force_resize:\n",
    "                            img = img.resize(force_resize)\n",
    "                        else:\n",
    "                            if img_shape is None:\n",
    "                                img_shape = img.size\n",
    "                            if img.size != img_shape:\n",
    "                                raise RuntimeError(f\"图像大小不一致 {file_path} {img.size} {img_shape}\")\n",
    "                        # 将图像转换为特征向量并存储\n",
    "                        img_array = np.array(img)\n",
    "                        if flatten:\n",
    "                            feature_vector = img_array.flatten()\n",
    "                            X.append(feature_vector)\n",
    "                        else:\n",
    "                            X.append(img_array)\n",
    "                        # 存储图像对应的标签\n",
    "                        y.append(labels[label_index - 1])\n",
    "                    except Exception as e:\n",
    "                        print(f\"Error processing {file_path}: {e}\")\n",
    "    \n",
    "    # 将列表转换为NumPy数组\n",
    "    X = np.array(X)\n",
    "    y = np.array(y)\n",
    "    \n",
    "    return X, y"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "id": "6a01dfff",
   "metadata": {},
   "outputs": [],
   "source": [
    "params.train_dir=train_dir\n",
    "params.flatten=False\n",
    "params.force_resize=(1900, 2640)\n",
    "X, y = read_images_and_labels(params.train_dir, params.flatten, params.force_resize)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "id": "5c5bd633",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(50, 2640, 1900, 3)"
      ]
     },
     "execution_count": 7,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "X.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "id": "0d104bc3",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(50, 3, 2640, 1900)"
      ]
     },
     "execution_count": 8,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "# 将X的shape由(50, 2640, 1900, 3)调整为(50, 3, 2640, 1900)\n",
    "X = X.transpose(0, 3, 1, 2)\n",
    "X.shape\n"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "6d7acca1",
   "metadata": {},
   "source": [
    "## 使用ImageFolder加载数据"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "id": "56c7d5dd",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "num_classes: 2\n",
      "train size: 50\n",
      "val size: 11\n"
     ]
    }
   ],
   "source": [
    "import torch\n",
    "from torchvision import datasets, transforms\n",
    "from torch.utils.data import DataLoader\n",
    "\n",
    "# 定义数据变换（与预训练模型的输入大小一致）\n",
    "data_transforms = {\n",
    "    'train': transforms.Compose([\n",
    "        transforms.Resize(256),\n",
    "        transforms.CenterCrop(224),\n",
    "        transforms.ToTensor(),\n",
    "        transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])\n",
    "    ]),\n",
    "    'val': transforms.Compose([\n",
    "        transforms.Resize(256),\n",
    "        transforms.CenterCrop(224),\n",
    "        transforms.ToTensor(),\n",
    "        transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])\n",
    "    ]),\n",
    "}\n",
    "\n",
    "# 加载数据\n",
    "data_dir = {\n",
    "    \"train\": \"/Users/tianzhipeng/data/pdfres/gcjjx/classifier\",\n",
    "    \"val\": \"/Users/tianzhipeng/data/pdfres/gcjjx/classifier_test\"\n",
    "}\n",
    "image_datasets = {x: datasets.ImageFolder(root=f\"{data_dir[x]}\", transform=data_transforms[x])\n",
    "                  for x in ['train', 'val']}\n",
    "dataloaders = {x: DataLoader(image_datasets[x], batch_size=32, shuffle=True, num_workers=4)\n",
    "               for x in ['train', 'val']}\n",
    "\n",
    "# 获取类别数\n",
    "class_names = image_datasets['train'].classes\n",
    "num_classes = len(class_names)\n",
    "\n",
    "print(f\"num_classes: {num_classes}\")\n",
    "print(f\"train size: {len(image_datasets['train'])}\")\n",
    "print(f\"val size: {len(image_datasets['val'])}\")"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "dc4151f0bfcbf79d",
   "metadata": {
    "collapsed": false
   },
   "source": [
    "# 探索resnet基本使用"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "id": "f79e6cd9173313a2",
   "metadata": {
    "collapsed": false
   },
   "outputs": [],
   "source": [
    "# filename='./dog.jpg'\n",
    "filename='./111.jpg'"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "id": "2ee115fc",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "PIL.JpegImagePlugin.JpegImageFile"
      ]
     },
     "execution_count": 3,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "from PIL import Image\n",
    "from torchvision import transforms\n",
    "input_image = Image.open(filename)\n",
    "type(input_image)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "id": "4c2694e5",
   "metadata": {},
   "outputs": [],
   "source": [
    "\n",
    "preprocess = transforms.Compose([\n",
    "    transforms.Resize(256),\n",
    "    transforms.CenterCrop(224),\n",
    "    transforms.ToTensor(),\n",
    "    transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]),\n",
    "])\n",
    "input_tensor = preprocess(input_image)\n",
    "input_batch = input_tensor.unsqueeze(0) # create a mini-batch as expected by the model\n",
    "\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "id": "228a6d2a",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "tensor([-3.9357e+00, -1.4323e+00, -2.9303e+00,  1.6183e-01, -1.1056e+00,\n",
      "        -2.2118e+00, -3.0682e+00, -1.7581e+00, -1.9676e+00,  7.7288e-01,\n",
      "        -1.3753e-01, -1.2872e+00,  2.6682e+00, -7.8641e-01, -2.0916e+00,\n",
      "         1.7134e+00, -2.6782e+00, -1.3659e+00, -9.1160e-01, -2.4041e+00,\n",
      "        -1.3005e+00, -2.4969e+00, -6.3978e-01, -7.0144e-01,  5.8015e-01,\n",
      "        -2.3081e+00, -1.8911e+00, -3.2185e+00, -2.2025e+00, -1.4667e-01,\n",
      "        -1.5135e-01, -1.8322e+00, -4.0025e+00, -2.8789e+00, -1.2353e+00,\n",
      "        -3.5485e+00, -1.4559e-01, -7.2956e-02,  6.3001e-01,  2.3757e+00,\n",
      "        -1.6164e+00,  1.5494e+00, -1.5016e+00, -2.0600e+00,  6.7119e-01,\n",
      "        -2.3020e-01,  1.4372e+00, -2.7225e+00,  1.4275e-01, -1.3444e+00,\n",
      "        -3.8187e-01,  1.1639e+00, -6.9664e-01, -1.8376e+00, -9.5910e-01,\n",
      "        -1.9121e+00, -2.3566e+00, -2.8124e+00, -1.1275e+00, -1.3908e+00,\n",
      "        -3.5659e-01, -1.0545e+00,  1.9678e-01, -2.1577e+00, -1.7985e+00,\n",
      "        -1.2973e+00,  9.7178e-01,  1.4864e-01, -3.7580e-01,  6.0305e-01,\n",
      "        -3.3939e+00,  5.2303e-01, -2.0414e+00, -2.2242e+00, -1.2714e+00,\n",
      "        -3.4415e+00, -2.9116e-01, -2.9374e-01, -1.4113e+00,  9.6924e-01,\n",
      "        -1.6323e+00, -3.1075e+00, -9.0677e-03, -5.4968e-01,  3.3171e-01,\n",
      "        -5.2928e-01, -1.1454e+00,  9.3677e-01, -1.5674e+00, -2.4344e+00,\n",
      "        -1.5791e+00, -4.7647e+00, -4.2178e+00, -1.9038e+00, -1.4510e+00,\n",
      "        -4.3485e+00, -1.7285e+00, -2.6186e+00, -3.0337e+00, -1.0891e+00,\n",
      "        -1.8881e+00, -2.9008e+00, -1.5667e+00, -1.2966e+00, -1.5641e+00,\n",
      "        -1.4446e+00,  3.8357e-01, -3.8959e+00, -2.5042e+00, -1.8272e+00,\n",
      "        -3.1420e+00, -3.0331e+00,  2.7865e+00, -1.0517e+00,  1.5886e+00,\n",
      "        -4.2890e+00, -4.7195e+00,  1.3663e+00,  2.1847e-01, -1.1453e+00,\n",
      "        -2.6526e+00, -1.7786e+00, -6.2947e-01, -2.1169e+00,  3.9833e-01,\n",
      "         1.2386e+00, -1.8372e+00, -4.6375e+00, -4.8769e+00, -5.7027e+00,\n",
      "        -4.2903e+00, -5.2383e+00, -1.8018e+00, -9.3425e-01, -3.2093e+00,\n",
      "        -5.5096e+00, -3.2295e+00, -2.6686e+00, -1.2322e+00, -1.9873e+00,\n",
      "        -2.0249e+00, -3.4904e+00, -4.3788e+00, -3.2362e+00, -3.2174e+00,\n",
      "        -1.5999e+00, -2.2940e+00, -9.7042e-01,  3.8270e-02, -4.3665e+00,\n",
      "         4.5151e+00,  3.4805e-01, -2.7932e+00, -1.3693e+00, -1.6983e+00,\n",
      "        -7.8636e-01, -2.2013e+00, -2.8754e+00, -1.1022e+00, -4.0397e-01,\n",
      "        -2.4599e+00, -6.3489e-01, -1.3726e+00, -1.8757e-01,  8.2323e-01,\n",
      "        -7.9736e-01, -2.5832e+00, -3.2260e+00,  1.2743e-02, -2.2419e+00,\n",
      "         5.2587e-02,  1.3055e+00,  3.1590e+00, -7.2371e-01,  2.5170e-01,\n",
      "        -1.8660e+00, -1.0493e+00, -7.5385e-01,  1.6437e+00,  5.5651e-01,\n",
      "         7.6319e-01, -2.4684e+00, -7.9934e-01, -5.4263e-01, -1.4123e+00,\n",
      "         1.3592e+00,  3.0345e+00,  2.0131e+00, -2.5812e+00,  1.5988e-01,\n",
      "        -3.0408e+00,  7.1023e-01, -2.3283e-01,  2.1221e+00, -2.2537e+00,\n",
      "         3.0262e+00,  5.2261e-01,  1.2109e+00, -1.0276e-01, -8.4290e-01,\n",
      "        -8.3903e-01,  1.5221e+00,  1.6361e-01, -2.8405e-01,  7.5854e-01,\n",
      "        -7.2628e-01, -8.1730e-01, -1.6541e+00, -3.7658e-01, -2.0209e+00,\n",
      "        -1.7012e+00,  3.1746e-01, -1.3246e+00, -1.7865e+00, -3.7019e-01,\n",
      "        -1.8466e+00, -3.2539e+00, -6.5254e-01, -3.4019e+00, -3.9869e-01,\n",
      "        -2.6328e+00, -2.6777e+00, -2.3675e+00, -4.9260e-01, -9.6042e-01,\n",
      "        -3.8699e+00, -3.5360e-01, -9.4387e-01, -9.7488e-01, -1.0232e+00,\n",
      "        -2.0324e+00, -2.3989e-01, -5.3317e-01,  2.0633e-01,  9.2274e-01,\n",
      "         8.5274e-02,  1.1647e+00, -1.0404e-01, -1.2675e+00, -3.3471e-01,\n",
      "        -1.1341e+00, -1.9750e+00,  6.3583e-02,  1.4343e+00, -1.6406e+00,\n",
      "         7.7936e-01,  3.0695e+00, -2.7773e+00,  4.7542e-01,  2.3762e-02,\n",
      "        -6.5709e-01,  1.7595e+00, -2.3493e+00,  1.8495e+00,  6.4057e-01,\n",
      "        -3.0011e+00, -8.7926e-01, -3.2218e+00, -1.7894e+00, -1.2805e+00,\n",
      "        -3.4392e+00, -2.5829e+00, -3.6072e+00, -6.9696e-01,  1.1586e+00,\n",
      "         5.0419e-02, -2.6173e+00, -1.7472e+00,  2.8549e-01, -9.6203e-01,\n",
      "        -2.8158e+00, -2.9249e+00,  1.5821e+00, -2.1104e+00, -3.9845e-01,\n",
      "        -4.4787e-01,  2.9660e+00, -1.0907e+00,  2.7604e-01, -1.5539e+00,\n",
      "         1.1443e+00,  1.5674e+01,  1.4497e+01,  5.4188e+00,  4.2268e+00,\n",
      "         1.4960e+01,  3.2656e+00,  7.9106e+00,  4.6882e+00,  5.8592e+00,\n",
      "         3.9593e+00,  2.7609e+00,  6.9643e+00,  1.0458e+00, -1.2170e+00,\n",
      "        -5.6540e-01,  9.6739e-01, -3.4101e+00,  3.6814e-01,  6.4644e-01,\n",
      "        -3.8023e+00, -1.6618e+00, -3.9457e+00, -1.4962e+00, -4.1992e+00,\n",
      "        -3.3128e+00, -2.6135e-01, -1.5511e+00, -9.3056e-01, -9.6821e-01,\n",
      "        -2.7029e+00,  1.3738e+00,  1.3157e+00, -2.5517e+00, -4.8457e-01,\n",
      "         5.6176e-01,  3.0323e-01, -6.8587e-01, -2.1993e+00, -2.2486e+00,\n",
      "        -4.3884e+00, -4.1928e+00, -4.8741e+00, -3.8262e+00, -2.7844e+00,\n",
      "        -3.3950e+00, -2.8083e+00, -1.5467e+00, -2.5575e+00, -1.9397e+00,\n",
      "        -3.5967e-01, -9.2784e-01, -1.2897e+00,  2.8891e-01, -3.2699e+00,\n",
      "         8.2574e-01, -2.0740e+00, -1.1811e+00, -4.3435e-01, -4.1097e+00,\n",
      "         3.2303e+00, -1.4050e+00, -8.5844e-01, -2.6490e+00, -1.0605e+00,\n",
      "        -3.4439e+00, -2.8376e+00, -3.5210e+00, -1.9812e+00, -2.9480e+00,\n",
      "        -1.2523e+00, -2.6713e+00, -2.4935e+00, -2.1480e+00, -4.1274e+00,\n",
      "        -3.3128e+00,  1.1227e+00,  9.3799e-01, -3.1159e-01,  1.2217e+00,\n",
      "         1.7851e+00, -1.3165e+00,  4.5975e-01, -1.0018e+00, -3.4427e+00,\n",
      "        -3.6592e+00, -4.6847e+00, -2.0481e+00, -4.0525e+00, -2.0863e+00,\n",
      "        -1.1781e+00, -2.9665e+00, -1.9607e+00, -1.5295e+00, -3.0179e+00,\n",
      "        -3.5827e+00, -4.1789e+00, -1.7931e-01, -1.7502e+00, -3.7587e+00,\n",
      "        -1.2807e+00, -4.5606e+00, -1.9867e+00, -2.7853e-01, -4.3522e+00,\n",
      "        -2.2593e+00, -1.6995e+00, -1.5136e+00, -2.3914e+00, -1.5356e+00,\n",
      "         2.6047e-02,  1.5369e+00, -2.6655e+00, -4.3173e+00, -2.2462e+00,\n",
      "         1.9816e+00, -1.2646e+00, -2.1911e+00,  7.8204e-02, -7.9205e-01,\n",
      "        -1.8704e+00,  5.4386e-01,  1.0785e+00, -3.2956e+00, -2.0120e+00,\n",
      "        -1.6596e+00,  2.4751e+00, -1.9674e+00, -4.4976e-01,  3.9758e-01,\n",
      "        -1.1361e+00,  2.1829e+00,  4.9259e+00, -3.5872e-01,  1.9280e+00,\n",
      "         2.1071e-01,  2.2787e+00, -3.7958e-01,  2.0951e+00,  2.0113e+00,\n",
      "         1.2005e+00,  3.2275e+00,  7.0507e-01, -1.7252e-01, -1.9765e-01,\n",
      "        -2.2946e+00,  2.2034e+00,  3.6419e+00,  2.5725e+00, -7.2163e-01,\n",
      "        -9.5862e-01,  5.0825e+00, -6.1524e-01,  9.5381e-01,  5.1058e+00,\n",
      "         9.5467e+00, -2.0445e+00, -4.5902e-01,  1.3729e+00, -2.0382e+00,\n",
      "         1.3576e+00,  1.0700e+00, -1.2105e+00,  1.7423e+00, -3.8253e+00,\n",
      "        -6.7021e-01,  2.3898e+00,  3.6298e-01, -5.9649e-01, -2.5998e+00,\n",
      "        -2.1495e-01, -2.3286e+00,  1.5177e-01,  5.3731e+00,  1.7787e+00,\n",
      "         1.5056e+00, -9.1045e-01,  3.1430e+00,  3.5304e-01,  3.3533e+00,\n",
      "         1.3372e+00, -1.6153e+00,  1.5831e+00,  7.1801e+00,  1.7845e+00,\n",
      "        -6.7280e-02, -8.0860e-01, -1.2098e-01,  1.8707e-01,  2.5137e+00,\n",
      "         3.7128e+00,  1.8561e+00, -1.1457e+00,  1.3668e+00, -3.5652e-01,\n",
      "        -6.7326e-01, -2.0654e+00,  2.8363e-01,  6.6584e+00,  1.3727e-01,\n",
      "        -7.4831e-01,  1.3277e-02, -2.3415e+00, -5.6485e-01, -1.0925e+00,\n",
      "         4.8959e-01, -4.4711e-01,  8.2400e-01, -7.0765e-01,  1.2502e+00,\n",
      "        -1.6583e+00, -1.7027e+00,  1.0574e+00,  1.1333e+00,  8.6753e-01,\n",
      "         1.3359e+00,  1.9207e+00, -1.0734e+00, -6.8040e-01,  2.7911e+00,\n",
      "        -2.6175e+00,  5.0682e-01, -7.6960e-01,  1.4664e+00,  4.2955e+00,\n",
      "         2.8057e+00,  2.6005e+00, -1.3026e+00,  4.3373e+00, -5.0575e-01,\n",
      "        -1.1271e+00, -3.1079e+00,  9.3573e-01, -9.8121e-01,  1.7581e+00,\n",
      "         1.3621e+00,  4.0598e+00,  1.3286e+00,  1.2275e+00,  4.8152e+00,\n",
      "         5.0669e+00,  2.8414e+00, -1.9622e+00,  1.3276e+00, -2.2811e+00,\n",
      "         5.8769e-01,  3.6588e+00,  1.2058e+00,  2.1209e-01,  2.1387e+00,\n",
      "         2.1257e+00,  8.9950e-01,  3.3180e+00,  1.0917e+00,  3.8638e+00,\n",
      "        -1.8745e+00,  2.5690e-02, -2.1338e+00, -5.7261e-01,  4.5150e+00,\n",
      "        -2.8378e+00,  1.3193e+00,  4.0873e+00,  3.2974e+00, -6.8836e-01,\n",
      "         1.5569e+00,  9.4011e-02, -2.0631e+00,  3.9744e-01,  1.5981e+00,\n",
      "         1.0795e+00, -1.7796e-01,  3.1484e+00,  4.4026e+00, -1.8774e+00,\n",
      "        -1.7971e+00,  2.1414e+00,  1.1000e+00,  1.0971e+00,  2.5680e+00,\n",
      "        -1.2698e+00,  1.3818e+00,  4.0498e+00,  2.0455e+00,  7.3273e-01,\n",
      "         1.8346e+00,  1.0562e+00,  3.8973e+00,  1.4472e+00, -1.7248e+00,\n",
      "         1.2540e+00, -2.3463e+00,  2.4890e+00, -2.7217e+00, -9.0522e-01,\n",
      "        -1.7967e+00, -3.5980e+00,  6.6864e-01,  3.6787e-01,  3.7409e+00,\n",
      "        -1.3318e+00, -7.0077e-01,  6.7098e-01, -4.2569e-01, -1.6239e+00,\n",
      "         2.9923e+00,  2.4053e+00,  4.5333e+00,  5.8002e+00,  5.0519e-01,\n",
      "        -1.2158e+00,  2.7090e+00, -2.0942e-01,  2.0246e+00, -6.6144e-01,\n",
      "        -1.7530e+00,  8.4043e-01, -1.6730e+00,  9.6650e-02, -2.5657e-02,\n",
      "         2.9271e+00, -1.0672e-01, -2.2774e-01, -1.9989e+00,  1.0330e+00,\n",
      "         1.8646e+00,  3.4168e+00,  2.8978e-02,  2.6929e+00, -4.0523e+00,\n",
      "         1.7393e+00,  2.6300e+00, -2.2859e+00, -9.1328e-01, -5.9728e-01,\n",
      "        -9.1900e-02, -3.0692e-02,  2.2358e+00,  2.9258e+00,  4.3611e+00,\n",
      "         4.4839e+00, -6.4981e-01,  5.6722e+00,  1.3451e+00,  3.3063e+00,\n",
      "        -2.0721e+00,  2.0060e+00, -2.8048e+00, -4.0683e-01,  4.9330e-01,\n",
      "        -1.0390e+00,  2.1527e+00,  6.5862e-01,  1.5785e+00, -2.5881e+00,\n",
      "        -1.5670e+00,  2.6594e+00,  1.7169e+00,  2.3784e+00,  3.6402e-01,\n",
      "        -2.7042e+00,  1.8804e-01,  1.9227e-01,  5.7606e-01,  4.2191e-01,\n",
      "        -5.2913e+00,  9.8548e-01,  2.8177e+00,  2.9180e+00, -1.7438e+00,\n",
      "         5.1713e-01,  3.6619e+00,  1.6420e+00,  6.6166e-01, -1.5613e+00,\n",
      "         1.5390e+00, -1.4516e+00,  8.5375e-01, -1.4878e+00,  5.9583e+00,\n",
      "         1.6930e-01, -1.0409e+00,  1.5944e-02, -1.7312e+00,  3.2182e+00,\n",
      "        -1.1124e+00,  2.3345e+00, -6.9353e-01, -3.1061e+00,  2.4125e+00,\n",
      "        -6.0539e-01, -1.3078e+00, -3.7500e+00,  4.7132e+00,  1.6851e+00,\n",
      "         6.2504e-04, -4.1733e-01,  7.1192e-01, -3.0770e-01,  3.4399e-01,\n",
      "         1.2412e+00,  3.7133e+00,  5.0957e-01, -6.2496e-01, -1.2143e+00,\n",
      "        -2.7733e+00,  9.2649e-01, -4.8178e-01,  8.8499e-01, -1.1535e+00,\n",
      "        -4.9614e+00,  1.1923e-01,  1.7654e+00, -1.2717e+00, -1.9011e+00,\n",
      "        -1.1826e+00,  4.1639e+00,  1.0912e+00, -8.9893e-01, -4.0606e-01,\n",
      "         7.9410e+00, -1.2096e+00,  1.0139e-01,  9.6001e-01,  1.2294e-01,\n",
      "        -1.6323e+00,  8.9243e-01, -1.2255e-01,  5.3976e+00,  2.9769e+00,\n",
      "         2.2475e+00,  1.0230e+00, -2.0515e+00,  2.8988e+00, -7.0542e-01,\n",
      "        -1.5311e+00,  3.8594e-01, -1.0470e+00,  5.6791e-01, -2.3175e-01,\n",
      "         1.3275e+00,  5.0656e+00,  5.9607e+00,  5.0995e-01, -1.0738e+00,\n",
      "         5.8207e+00, -2.8621e+00,  3.9203e-02,  6.5361e+00,  1.4702e+00,\n",
      "        -2.4048e+00,  2.7814e+00, -1.3005e+00,  1.3313e+00, -2.4489e+00,\n",
      "        -6.8632e-01,  5.8250e-01,  2.0892e+00,  3.4194e+00,  1.2671e+00,\n",
      "         2.2465e-02,  1.7409e+00,  4.9363e+00, -2.2572e-01,  1.6350e+00,\n",
      "         1.2353e+00, -4.6740e-01,  5.2502e+00,  2.5511e+00,  2.8067e-01,\n",
      "         6.9734e+00, -9.8350e-01,  1.4829e+00,  9.0402e+00,  2.4448e+00,\n",
      "        -2.2379e+00,  3.2775e+00, -1.0268e+00,  1.0856e+00,  8.9652e-01,\n",
      "         5.5606e+00,  5.2024e+00, -1.8081e-01, -1.1450e+00,  4.6596e-01,\n",
      "         1.7177e+00, -1.5529e-01,  7.7771e-01,  7.2160e-01,  5.1845e+00,\n",
      "         2.6784e+00, -1.5386e+00,  5.2479e-01,  1.0675e+00, -5.3548e-01,\n",
      "         5.5140e-01, -5.1774e-01,  7.1089e-01,  3.9319e+00,  1.0481e+00,\n",
      "        -3.8243e-01, -2.3111e+00,  4.8189e+00,  4.4660e-01,  1.3302e+00,\n",
      "        -7.1415e-01,  2.3968e+00, -5.9076e-01,  4.0232e-02,  3.1299e+00,\n",
      "         2.9054e+00,  1.2541e-01,  1.6851e+00,  2.1959e+00,  1.0854e+01,\n",
      "        -6.7991e-02, -8.5211e-02,  3.3423e+00,  2.8022e-01,  3.5842e+00,\n",
      "        -3.8841e+00, -1.9838e+00, -2.5650e+00,  4.4400e-01,  4.4182e+00,\n",
      "         4.9894e-01,  3.8057e+00, -1.7944e+00,  1.4523e-01,  4.2725e+00,\n",
      "         7.5476e-01,  5.3879e+00, -1.9623e+00,  3.1375e+00, -2.5566e+00,\n",
      "        -1.9959e-01,  5.3076e-01, -6.8656e-01,  2.5633e+00,  4.2162e-01,\n",
      "        -5.0310e+00, -3.1546e+00,  1.8263e-02,  2.6624e+00, -2.2798e-01,\n",
      "        -2.5311e-01, -3.8277e-01,  3.6133e+00,  2.0640e+00, -2.1092e-01,\n",
      "         1.9230e-01,  5.4943e+00, -2.1592e-01, -1.4645e+00,  5.5838e-02,\n",
      "        -8.5555e-01,  4.6563e-01,  2.8701e+00,  1.8036e+00, -1.2090e+00,\n",
      "         4.3961e+00,  3.2949e+00,  1.3692e+00, -1.7977e+00,  1.6370e+00,\n",
      "         3.7516e+00,  4.1684e+00, -6.8316e-01, -1.0912e+00,  3.3223e+00,\n",
      "         1.4758e+00,  3.9265e+00,  4.0251e+00, -3.5880e+00,  1.1573e+00,\n",
      "         2.6187e+00, -4.2605e+00,  1.6889e-01,  3.2597e+00,  3.0534e+00,\n",
      "         3.5610e-01,  4.0352e+00,  6.1907e-02, -3.1708e-01, -2.5284e+00,\n",
      "        -9.7154e-01, -1.2210e+00,  9.0570e-01,  2.9912e+00,  1.3937e-02,\n",
      "        -2.8208e-01, -6.8247e-01,  7.2609e-01,  3.4775e-01, -2.6970e+00,\n",
      "        -1.6822e+00,  9.9865e+00, -1.8610e+00,  1.2002e+00,  3.3565e+00,\n",
      "         1.7644e-01,  3.5918e+00,  3.0724e+00,  3.7401e+00,  2.9225e-01,\n",
      "         3.2521e+00, -3.4397e-01,  2.5526e-01, -1.0517e+00,  1.1949e+00,\n",
      "        -1.0659e+00, -4.9563e-01,  2.7649e+00,  2.0208e+00,  4.0554e+00,\n",
      "        -1.9616e+00,  8.9457e+00,  4.0962e+00,  5.1096e+00,  4.7975e+00,\n",
      "         1.0991e+00, -1.4848e+00,  3.7619e-01,  8.4967e-01,  6.6485e+00,\n",
      "         6.7087e+00, -7.5583e-01,  1.8490e+00, -2.0231e+00,  3.2511e+00,\n",
      "         3.2237e+00,  1.5333e+00, -1.0413e+00, -2.4160e+00, -1.5483e+00,\n",
      "        -1.9229e+00,  1.6716e+00, -1.8393e-01,  5.5779e+00,  9.0218e-02,\n",
      "        -1.4938e+00,  3.2086e+00,  1.2042e+00,  8.0241e-01,  1.3178e+00,\n",
      "        -2.1023e+00, -1.3136e+00, -2.0347e+00, -9.5990e-01,  1.6768e+00,\n",
      "        -1.8564e+00,  4.6697e-01, -1.1502e+00, -7.3563e-01,  5.5788e-01,\n",
      "        -7.7385e-01, -1.5222e-01,  1.5389e+00,  1.1758e-01,  2.9016e+00,\n",
      "        -9.8968e-01, -1.5382e-01,  2.6348e+00,  2.0002e+00, -2.0059e+00,\n",
      "        -1.9048e+00, -2.5647e+00, -5.0234e-01,  3.1447e-01, -2.4048e+00,\n",
      "         1.1019e+00,  8.5048e-01, -1.0753e+00,  8.8782e-01,  3.2973e+00,\n",
      "        -3.4709e+00, -7.8879e-01, -1.1975e+00,  4.1726e-01, -1.1919e+00,\n",
      "        -1.2162e+00,  2.2831e+00, -9.2997e-01, -6.4753e-01, -1.6479e+00,\n",
      "         1.0345e+00,  1.7413e+00,  1.0507e+00,  6.2043e+00,  4.0568e-01,\n",
      "        -1.8938e+00,  2.3396e+00, -1.9568e+00, -2.8761e+00, -1.6954e+00,\n",
      "        -1.1102e+00, -2.0637e+00,  3.2690e-01,  8.9426e-01, -3.0787e+00,\n",
      "        -2.3538e+00, -1.4251e+00,  3.3636e-01, -1.5661e+00, -1.5799e+00,\n",
      "        -2.8517e+00, -3.9043e+00,  8.4509e-01, -1.5557e+00, -1.7070e+00,\n",
      "        -1.1095e+00, -3.9197e+00, -4.5091e+00, -5.6697e+00, -4.6216e+00,\n",
      "        -5.3548e+00, -2.1236e+00, -4.0200e+00, -5.7269e-01,  6.1211e+00])\n",
      "tensor([1.6752e-09, 2.0478e-08, 4.5781e-09, 1.0083e-07, 2.8388e-08, 9.3919e-09,\n",
      "        3.9884e-09, 1.4784e-08, 1.1989e-08, 1.8576e-07, 7.4744e-08, 2.3673e-08,\n",
      "        1.2362e-06, 3.9063e-08, 1.0591e-08, 4.7581e-07, 5.8907e-09, 2.1883e-08,\n",
      "        3.4467e-08, 7.7483e-09, 2.3361e-08, 7.0615e-09, 4.5232e-08, 4.2528e-08,\n",
      "        1.5320e-07, 8.5296e-09, 1.2942e-08, 3.4318e-09, 9.4794e-09, 7.4064e-08,\n",
      "        7.3718e-08, 1.3727e-08, 1.5670e-09, 4.8198e-09, 2.4936e-08, 2.4672e-09,\n",
      "        7.4144e-08, 7.9730e-08, 1.6103e-07, 9.2266e-07, 1.7034e-08, 4.0384e-07,\n",
      "        1.9107e-08, 1.0931e-08, 1.6780e-07, 6.8129e-08, 3.6099e-07, 5.6357e-09,\n",
      "        9.8924e-08, 2.2359e-08, 5.8541e-08, 2.7466e-07, 4.2732e-08, 1.3653e-08,\n",
      "        3.2868e-08, 1.2673e-08, 8.1255e-09, 5.1513e-09, 2.7774e-08, 2.1344e-08,\n",
      "        6.0040e-08, 2.9878e-08, 1.0442e-07, 9.9140e-09, 1.4198e-08, 2.3437e-08,\n",
      "        2.2664e-07, 9.9508e-08, 5.8897e-08, 1.5675e-07, 2.8798e-09, 1.4470e-07,\n",
      "        1.1136e-08, 9.2756e-09, 2.4051e-08, 2.7460e-09, 6.4100e-08, 6.3934e-08,\n",
      "        2.0912e-08, 2.2607e-07, 1.6765e-08, 3.8345e-09, 8.4990e-08, 4.9497e-08,\n",
      "        1.1950e-07, 5.0517e-08, 2.7282e-08, 2.1885e-07, 1.7890e-08, 7.5171e-09,\n",
      "        1.7680e-08, 7.3115e-10, 1.2634e-09, 1.2779e-08, 2.0097e-08, 1.1086e-09,\n",
      "        1.5227e-08, 6.2524e-09, 4.1286e-09, 2.8862e-08, 1.2981e-08, 4.7150e-09,\n",
      "        1.7901e-08, 2.3453e-08, 1.7949e-08, 2.0226e-08, 1.2586e-07, 1.7431e-09,\n",
      "        7.0106e-09, 1.3796e-08, 3.7047e-09, 4.1308e-09, 1.3914e-06, 2.9962e-08,\n",
      "        4.1997e-07, 1.1765e-09, 7.6496e-10, 3.3627e-07, 1.0671e-07, 2.7284e-08,\n",
      "        6.0437e-09, 1.4483e-08, 4.5701e-08, 1.0326e-08, 1.2773e-07, 2.9595e-07,\n",
      "        1.3658e-08, 8.3037e-10, 6.5360e-10, 2.8618e-10, 1.1750e-09, 4.5533e-10,\n",
      "        1.4151e-08, 3.3695e-08, 3.4637e-09, 3.4714e-10, 3.3942e-09, 5.9477e-09,\n",
      "        2.5013e-08, 1.1756e-08, 1.1321e-08, 2.6149e-09, 1.0755e-09, 3.3716e-09,\n",
      "        3.4356e-09, 1.7317e-08, 8.6500e-09, 3.2498e-08, 8.9110e-08, 1.0888e-09,\n",
      "        7.8379e-06, 1.2147e-07, 5.2510e-09, 2.1808e-08, 1.5694e-08, 3.9065e-08,\n",
      "        9.4905e-09, 4.8367e-09, 2.8487e-08, 5.7261e-08, 7.3281e-09, 4.5454e-08,\n",
      "        2.1736e-08, 7.1096e-08, 1.9536e-07, 3.8638e-08, 6.4780e-09, 3.4062e-09,\n",
      "        8.6864e-08, 9.1130e-09, 9.0395e-08, 3.1643e-07, 2.0195e-06, 4.1591e-08,\n",
      "        1.1031e-07, 1.3271e-08, 3.0032e-08, 4.0356e-08, 4.4376e-07, 1.4962e-07,\n",
      "        1.8397e-07, 7.2656e-09, 3.8562e-08, 4.9848e-08, 2.0890e-08, 3.3388e-07,\n",
      "        1.7831e-06, 6.4205e-07, 6.4906e-09, 1.0063e-07, 4.0991e-09, 1.7448e-07,\n",
      "        6.7950e-08, 7.1601e-07, 9.0061e-09, 1.7684e-06, 1.4463e-07, 2.8788e-07,\n",
      "        7.7388e-08, 3.6918e-08, 3.7061e-08, 3.9294e-07, 1.0101e-07, 6.4557e-08,\n",
      "        1.8312e-07, 4.1485e-08, 3.7875e-08, 1.6403e-08, 5.8851e-08, 1.1367e-08,\n",
      "        1.5649e-08, 1.1781e-07, 2.2804e-08, 1.4369e-08, 5.9229e-08, 1.3531e-08,\n",
      "        3.3125e-09, 4.4659e-08, 2.8569e-09, 5.7564e-08, 6.1642e-09, 5.8940e-09,\n",
      "        8.0374e-09, 5.2405e-08, 3.2825e-08, 1.7891e-09, 6.0220e-08, 3.3372e-08,\n",
      "        3.2353e-08, 3.0826e-08, 1.1237e-08, 6.7472e-08, 5.0321e-08, 1.0542e-07,\n",
      "        2.1580e-07, 9.3398e-08, 2.7487e-07, 7.7289e-08, 2.4144e-08, 6.1368e-08,\n",
      "        2.7592e-08, 1.1901e-08, 9.1394e-08, 3.5994e-07, 1.6626e-08, 1.8697e-07,\n",
      "        1.8465e-06, 5.3348e-09, 1.3797e-07, 8.7826e-08, 4.4456e-08, 4.9824e-07,\n",
      "        8.1851e-09, 5.4517e-07, 1.6274e-07, 4.2651e-09, 3.5600e-08, 3.4205e-09,\n",
      "        1.4327e-08, 2.3833e-08, 2.7521e-09, 6.4799e-09, 2.3266e-09, 4.2719e-08,\n",
      "        2.7321e-07, 9.0199e-08, 6.2607e-09, 1.4945e-08, 1.1410e-07, 3.2772e-08,\n",
      "        5.1334e-09, 4.6029e-09, 4.1726e-07, 1.0394e-08, 5.7579e-08, 5.4802e-08,\n",
      "        1.6650e-06, 2.8815e-08, 1.1303e-07, 1.8133e-08, 2.6931e-07, 5.5014e-01,\n",
      "        1.6949e-01, 1.9348e-05, 5.8744e-06, 2.6945e-01, 2.2466e-06, 2.3380e-04,\n",
      "        9.3185e-06, 3.0056e-05, 4.4959e-06, 1.3563e-06, 9.0756e-05, 2.4406e-07,\n",
      "        2.5397e-08, 4.8725e-08, 2.2565e-07, 2.8334e-09, 1.2393e-07, 1.6370e-07,\n",
      "        1.9142e-09, 1.6277e-08, 1.6585e-09, 1.9210e-08, 1.2871e-09, 3.1231e-09,\n",
      "        6.6039e-08, 1.8183e-08, 3.3820e-08, 3.2570e-08, 5.7472e-09, 3.3879e-07,\n",
      "        3.1967e-07, 6.6853e-09, 5.2827e-08, 1.5041e-07, 1.1614e-07, 4.3195e-08,\n",
      "        9.5099e-09, 9.0521e-09, 1.0653e-09, 1.2953e-09, 6.5538e-10, 1.8691e-09,\n",
      "        5.2973e-09, 2.8764e-09, 5.1722e-09, 1.8263e-08, 6.6468e-09, 1.2328e-08,\n",
      "        5.9855e-08, 3.3911e-08, 2.3614e-08, 1.1449e-07, 3.2600e-09, 1.9585e-07,\n",
      "        1.0779e-08, 2.6324e-08, 5.5548e-08, 1.4076e-09, 2.1687e-06, 2.1044e-08,\n",
      "        3.6349e-08, 6.0652e-09, 2.9700e-08, 2.7393e-09, 5.0231e-09, 2.5359e-09,\n",
      "        1.1827e-08, 4.4977e-09, 2.4516e-08, 5.9315e-09, 7.0856e-09, 1.0010e-08,\n",
      "        1.3829e-09, 3.1231e-09, 2.6356e-07, 2.1911e-07, 6.2803e-08, 2.9099e-07,\n",
      "        5.1119e-07, 2.2990e-08, 1.3582e-07, 3.1493e-08, 2.7426e-09, 2.2086e-09,\n",
      "        7.9211e-10, 1.1062e-08, 1.4904e-09, 1.0648e-08, 2.6404e-08, 4.4154e-09,\n",
      "        1.2072e-08, 1.8580e-08, 4.1940e-09, 2.3843e-09, 1.3135e-09, 7.1686e-08,\n",
      "        1.4900e-08, 1.9995e-09, 2.3829e-08, 8.9671e-10, 1.1762e-08, 6.4914e-08,\n",
      "        1.1045e-09, 8.9562e-09, 1.5675e-08, 1.8878e-08, 7.8479e-09, 1.8468e-08,\n",
      "        8.8027e-08, 3.9880e-07, 5.9661e-09, 1.1437e-09, 9.0741e-09, 6.2214e-07,\n",
      "        2.4216e-08, 9.5881e-09, 9.2740e-08, 3.8844e-08, 1.3212e-08, 1.4774e-07,\n",
      "        2.5217e-07, 3.1773e-09, 1.1468e-08, 1.6314e-08, 1.0191e-06, 1.1992e-08,\n",
      "        5.4699e-08, 1.2764e-07, 2.7535e-08, 7.6087e-07, 1.1819e-05, 5.9912e-08,\n",
      "        5.8968e-07, 1.0588e-07, 8.3739e-07, 5.8675e-08, 6.9693e-07, 6.4090e-07,\n",
      "        2.8490e-07, 2.1628e-06, 1.7359e-07, 7.2174e-08, 7.0383e-08, 8.6448e-09,\n",
      "        7.7665e-07, 3.2730e-06, 1.1233e-06, 4.1678e-08, 3.2884e-08, 1.3824e-05,\n",
      "        4.6356e-08, 2.2261e-07, 1.4150e-05, 1.2005e-03, 1.1101e-08, 5.4194e-08,\n",
      "        3.3850e-07, 1.1172e-08, 3.3336e-07, 2.5003e-07, 2.5561e-08, 4.8976e-07,\n",
      "        1.8707e-09, 4.3877e-08, 9.3582e-07, 1.2329e-07, 4.7234e-08, 6.3713e-09,\n",
      "        6.9176e-08, 8.3562e-09, 9.9820e-08, 1.8484e-05, 5.0793e-07, 3.8651e-07,\n",
      "        3.4507e-08, 1.9873e-06, 1.2208e-07, 2.4526e-06, 3.2664e-07, 1.7052e-08,\n",
      "        4.1766e-07, 1.1262e-04, 5.1087e-07, 8.0183e-08, 3.8206e-08, 7.5991e-08,\n",
      "        1.0341e-07, 1.0592e-06, 3.5136e-06, 5.4880e-07, 2.7273e-08, 3.3642e-07,\n",
      "        6.0044e-08, 4.3743e-08, 1.0872e-08, 1.1389e-07, 6.6834e-05, 9.8383e-08,\n",
      "        4.0581e-08, 8.6910e-08, 8.2488e-09, 4.8752e-08, 2.8762e-08, 1.3994e-07,\n",
      "        5.4844e-08, 1.9551e-07, 4.2265e-08, 2.9942e-07, 1.6336e-08, 1.5626e-08,\n",
      "        2.4691e-07, 2.6638e-07, 2.0421e-07, 3.2620e-07, 5.8539e-07, 2.9317e-08,\n",
      "        4.3432e-08, 1.3978e-06, 6.2595e-09, 1.4237e-07, 3.9726e-08, 3.7167e-07,\n",
      "        6.2926e-06, 1.4184e-06, 1.1553e-06, 2.3314e-08, 6.5611e-06, 5.1720e-08,\n",
      "        2.7786e-08, 3.8333e-09, 2.1862e-07, 3.2149e-08, 4.9756e-07, 3.3485e-07,\n",
      "        4.9712e-06, 3.2381e-07, 2.9268e-07, 1.0580e-05, 1.3609e-05, 1.4700e-06,\n",
      "        1.2053e-08, 3.2351e-07, 8.7630e-09, 1.5436e-07, 3.3290e-06, 2.8641e-07,\n",
      "        1.0603e-07, 7.2800e-07, 7.1863e-07, 2.1084e-07, 2.3676e-06, 2.5551e-07,\n",
      "        4.0864e-06, 1.3159e-08, 8.7996e-08, 1.0153e-08, 4.8375e-08, 7.8372e-06,\n",
      "        5.0216e-09, 3.2083e-07, 5.1095e-06, 2.3193e-06, 4.3088e-08, 4.0687e-07,\n",
      "        9.4218e-08, 1.0897e-08, 1.2762e-07, 4.2401e-07, 2.5241e-07, 7.1782e-08,\n",
      "        1.9982e-06, 7.0038e-06, 1.3120e-08, 1.4218e-08, 7.2994e-07, 2.5766e-07,\n",
      "        2.5691e-07, 1.1184e-06, 2.4089e-08, 3.4153e-07, 4.9217e-06, 6.6325e-07,\n",
      "        1.7845e-07, 5.3709e-07, 2.4661e-07, 4.2257e-06, 3.6461e-07, 1.5284e-08,\n",
      "        3.0055e-07, 8.2094e-09, 1.0333e-06, 5.6400e-09, 3.4688e-08, 1.4223e-08,\n",
      "        2.3480e-09, 1.6738e-07, 1.2390e-07, 3.6139e-06, 2.2643e-08, 4.2556e-08,\n",
      "        1.6777e-07, 5.6031e-08, 1.6906e-08, 1.7094e-06, 9.5042e-07, 7.9819e-06,\n",
      "        2.8333e-05, 1.4214e-07, 2.5425e-08, 1.2877e-06, 6.9559e-08, 6.4952e-07,\n",
      "        4.4264e-08, 1.4859e-08, 1.9875e-07, 1.6096e-08, 9.4467e-08, 8.3591e-08,\n",
      "        1.6014e-06, 7.7082e-08, 6.8296e-08, 1.1619e-08, 2.4095e-07, 5.5348e-07,\n",
      "        2.6133e-06, 8.8286e-08, 1.2671e-06, 1.4908e-09, 4.8827e-07, 1.1898e-06,\n",
      "        8.7205e-09, 3.4409e-08, 4.7196e-08, 7.8233e-08, 8.3172e-08, 8.0220e-07,\n",
      "        1.5994e-06, 6.7187e-06, 7.5970e-06, 4.4781e-08, 2.4929e-05, 3.2922e-07,\n",
      "        2.3400e-06, 1.0800e-08, 6.3750e-07, 5.1902e-09, 5.7098e-08, 1.4046e-07,\n",
      "        3.0345e-08, 7.3827e-07, 1.6571e-07, 4.1577e-07, 6.4462e-09, 1.7896e-08,\n",
      "        1.2254e-06, 4.7749e-07, 9.2521e-07, 1.2342e-07, 5.7394e-09, 1.0351e-07,\n",
      "        1.0395e-07, 1.5258e-07, 1.3078e-07, 4.3185e-10, 2.2977e-07, 1.4355e-06,\n",
      "        1.5870e-06, 1.4997e-08, 1.4384e-07, 3.3393e-06, 4.4302e-07, 1.6621e-07,\n",
      "        1.7999e-08, 3.9964e-07, 2.0086e-08, 2.0141e-07, 1.9372e-08, 3.3186e-05,\n",
      "        1.0159e-07, 3.0287e-08, 8.7142e-08, 1.5186e-08, 2.1427e-06, 2.8197e-08,\n",
      "        8.8545e-07, 4.2865e-08, 3.8402e-09, 9.5732e-07, 4.6815e-08, 2.3193e-08,\n",
      "        2.0169e-09, 9.5547e-06, 4.6251e-07, 8.5817e-08, 5.6502e-08, 1.7478e-07,\n",
      "        6.3048e-08, 1.2098e-07, 2.9672e-07, 3.5155e-06, 1.4276e-07, 4.5908e-08,\n",
      "        2.5464e-08, 5.3565e-09, 2.1661e-07, 5.2975e-08, 2.0780e-07, 2.7062e-08,\n",
      "        6.0063e-10, 9.6624e-08, 5.0120e-07, 2.4046e-08, 1.2814e-08, 2.6284e-08,\n",
      "        5.5164e-06, 2.5539e-07, 3.4906e-08, 5.7142e-08, 2.4101e-04, 2.5584e-08,\n",
      "        9.4916e-08, 2.2399e-07, 9.6983e-08, 1.6766e-08, 2.0935e-07, 7.5872e-08,\n",
      "        1.8944e-05, 1.6832e-06, 8.1171e-07, 2.3856e-07, 1.1024e-08, 1.5568e-06,\n",
      "        4.2359e-08, 1.8550e-08, 1.2616e-07, 3.0101e-08, 1.5134e-07, 6.8023e-08,\n",
      "        3.2347e-07, 1.3591e-05, 3.3268e-05, 1.4282e-07, 2.9305e-08, 2.8919e-05,\n",
      "        4.9013e-09, 8.9193e-08, 5.9140e-05, 3.7307e-07, 7.7427e-09, 1.3844e-06,\n",
      "        2.3362e-08, 3.2469e-07, 7.4088e-09, 4.3176e-08, 1.5356e-07, 6.9286e-07,\n",
      "        2.6203e-06, 3.0452e-07, 8.7712e-08, 4.8908e-07, 1.1943e-05, 6.8434e-08,\n",
      "        4.3994e-07, 2.9498e-07, 5.3742e-08, 1.6347e-05, 1.0996e-06, 1.1355e-07,\n",
      "        9.1583e-05, 3.2076e-08, 3.7785e-07, 7.2346e-04, 9.8873e-07, 9.1495e-09,\n",
      "        2.2734e-06, 3.0715e-08, 2.5396e-07, 2.1021e-07, 2.2297e-05, 1.5583e-05,\n",
      "        7.1578e-08, 2.7291e-08, 1.3667e-07, 4.7787e-07, 7.3428e-08, 1.8666e-07,\n",
      "        1.7648e-07, 1.5308e-05, 1.2489e-06, 1.8411e-08, 1.4495e-07, 2.4941e-07,\n",
      "        5.0205e-08, 1.4886e-07, 5.1104e-08, 1.7460e-07, 4.3741e-06, 2.4461e-07,\n",
      "        5.8508e-08, 8.5034e-09, 1.0620e-05, 1.3405e-07, 3.2433e-07, 4.1990e-08,\n",
      "        9.4237e-07, 4.7505e-08, 8.9285e-08, 1.9615e-06, 1.5671e-06, 9.7223e-08,\n",
      "        4.6251e-07, 7.7087e-07, 4.4373e-03, 8.0126e-08, 7.8759e-08, 2.4257e-06,\n",
      "        1.1350e-07, 3.0897e-06, 1.7638e-09, 1.1797e-08, 6.5970e-09, 1.3370e-07,\n",
      "        7.1136e-06, 1.4125e-07, 3.8556e-06, 1.4256e-08, 9.9169e-08, 6.1493e-06,\n",
      "        1.8243e-07, 1.8760e-05, 1.2053e-08, 1.9766e-06, 6.6523e-09, 7.0246e-08,\n",
      "        1.4582e-07, 4.3165e-08, 1.1131e-06, 1.3074e-07, 5.6024e-10, 3.6582e-09,\n",
      "        8.7345e-08, 1.2290e-06, 6.8280e-08, 6.6585e-08, 5.8488e-08, 3.1808e-06,\n",
      "        6.7562e-07, 6.9455e-08, 1.0395e-07, 2.0867e-05, 6.9109e-08, 1.9827e-08,\n",
      "        9.0689e-08, 3.6454e-08, 1.3662e-07, 1.5128e-06, 5.2072e-07, 2.5599e-08,\n",
      "        6.9582e-06, 2.3135e-06, 3.3725e-07, 1.4210e-08, 4.4079e-07, 3.6525e-06,\n",
      "        5.5412e-06, 4.3312e-08, 2.8801e-08, 2.3778e-06, 3.7518e-07, 4.3507e-06,\n",
      "        4.8014e-06, 2.3717e-09, 2.7285e-07, 1.1765e-06, 1.2106e-09, 1.0154e-07,\n",
      "        2.2335e-06, 1.8170e-06, 1.2245e-07, 4.8502e-06, 9.1241e-08, 6.2459e-08,\n",
      "        6.8430e-09, 3.2462e-08, 2.5294e-08, 2.1215e-07, 1.7076e-06, 8.6968e-08,\n",
      "        6.4684e-08, 4.3342e-08, 1.7727e-07, 1.2143e-07, 5.7814e-09, 1.5949e-08,\n",
      "        1.8637e-03, 1.3337e-08, 2.8481e-07, 2.4605e-06, 1.0231e-07, 3.1130e-06,\n",
      "        1.8519e-06, 3.6108e-06, 1.1488e-07, 2.2165e-06, 6.0802e-08, 1.1070e-07,\n",
      "        2.9961e-08, 2.8330e-07, 2.9538e-08, 5.2246e-08, 1.3618e-06, 6.4702e-07,\n",
      "        4.9490e-06, 1.2062e-08, 6.5820e-04, 5.1553e-06, 1.4202e-05, 1.0395e-05,\n",
      "        2.5740e-07, 1.9431e-08, 1.2493e-07, 2.0059e-07, 6.6179e-05, 7.0284e-05,\n",
      "        4.0276e-08, 5.4491e-07, 1.1342e-08, 2.2142e-06, 2.1545e-06, 3.9737e-07,\n",
      "        3.0274e-08, 7.6570e-09, 1.8233e-08, 1.2537e-08, 4.5634e-07, 7.1355e-08,\n",
      "        2.2685e-05, 9.3861e-08, 1.9256e-08, 2.1222e-06, 2.8596e-07, 1.9133e-07,\n",
      "        3.2035e-07, 1.0478e-08, 2.3058e-08, 1.1211e-08, 3.2842e-08, 4.5868e-07,\n",
      "        1.3399e-08, 1.3681e-07, 2.7151e-08, 4.1098e-08, 1.4983e-07, 3.9557e-08,\n",
      "        7.3654e-08, 3.9962e-07, 9.6465e-08, 1.5612e-06, 3.1878e-08, 7.3536e-08,\n",
      "        1.1955e-06, 6.3381e-07, 1.1538e-08, 1.2766e-08, 6.5986e-09, 5.1897e-08,\n",
      "        1.1746e-07, 7.7433e-09, 2.5814e-07, 2.0075e-07, 2.9262e-08, 2.0839e-07,\n",
      "        2.3190e-06, 2.6663e-09, 3.8971e-08, 2.5895e-08, 1.3017e-07, 2.6042e-08,\n",
      "        2.5416e-08, 8.4110e-07, 3.3840e-08, 4.4883e-08, 1.6506e-08, 2.4130e-07,\n",
      "        4.8925e-07, 2.4525e-07, 4.2440e-05, 1.2867e-07, 1.2908e-08, 8.9000e-07,\n",
      "        1.2119e-08, 4.8330e-09, 1.5740e-08, 2.8259e-08, 1.0891e-08, 1.1893e-07,\n",
      "        2.0974e-07, 3.9470e-09, 8.1481e-09, 2.0624e-08, 1.2006e-07, 1.7913e-08,\n",
      "        1.7667e-08, 4.9527e-09, 1.7286e-09, 1.9967e-07, 1.8099e-08, 1.5559e-08,\n",
      "        2.8280e-08, 1.7022e-09, 9.4416e-10, 2.9580e-10, 8.4368e-10, 4.0529e-10,\n",
      "        1.0257e-08, 1.5397e-09, 4.8371e-08, 3.9052e-05])\n"
     ]
    }
   ],
   "source": [
    "# move the input and model to GPU for speed if available\n",
    "if torch.cuda.is_available():\n",
    "    input_batch = input_batch.to('cuda')\n",
    "    model.to('cuda')\n",
    "\n",
    "with torch.no_grad():\n",
    "    output = model(input_batch)\n",
    "# Tensor of shape 1000, with confidence scores over ImageNet's 1000 classes\n",
    "print(output[0])\n",
    "# The output has unnormalized scores. To get probabilities, you can run a softmax on it.\n",
    "probabilities = torch.nn.functional.softmax(output[0], dim=0)\n",
    "print(probabilities)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "id": "d3489af4",
   "metadata": {},
   "outputs": [],
   "source": [
    "# !wget https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "id": "81f5a02a",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "tabby 0.5501436591148376\n",
      "Egyptian cat 0.2694525718688965\n",
      "tiger cat 0.1694866418838501\n",
      "shower curtain 0.004437304567545652\n",
      "tub 0.001863670302554965\n"
     ]
    }
   ],
   "source": [
    "with open(\"imagenet_classes.txt\", \"r\") as f:\n",
    "    categories = [s.strip() for s in f.readlines()]\n",
    "# Show top categories per image\n",
    "top5_prob, top5_catid = torch.topk(probabilities, 5)\n",
    "for i in range(top5_prob.size(0)):\n",
    "    print(categories[top5_catid[i]], top5_prob[i].item())"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "e6d0599f9f20a40",
   "metadata": {
    "collapsed": false
   },
   "source": [
    "# Modeling 建模"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "id": "a76aece0",
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/Users/tianzhipeng/Documents/env/miniconda3/lib/python3.11/site-packages/torchvision/models/_utils.py:208: UserWarning: The parameter 'pretrained' is deprecated since 0.13 and may be removed in the future, please use 'weights' instead.\n",
      "  warnings.warn(\n",
      "/Users/tianzhipeng/Documents/env/miniconda3/lib/python3.11/site-packages/torchvision/models/_utils.py:223: UserWarning: Arguments other than a weight enum or `None` for 'weights' are deprecated since 0.13 and may be removed in the future. The current behavior is equivalent to passing `weights=ResNet152_Weights.IMAGENET1K_V1`. You can also use `weights=ResNet152_Weights.DEFAULT` to get the most up-to-date weights.\n",
      "  warnings.warn(msg)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "num fc in_features: 2048\n"
     ]
    }
   ],
   "source": [
    "import torchvision.models as models\n",
    "import torch.nn as nn\n",
    "\n",
    "# 加载预训练的 ResNet 模型\n",
    "model = models.resnet152(pretrained=True)\n",
    "\n",
    "# 替换最后一层，全连接层的输入大小是模型的fc层输入特征数量\n",
    "num_ftrs = model.fc.in_features\n",
    "print(f\"num fc in_features: {num_ftrs}\")\n",
    "\n",
    "# 将输出层修改为2分类\n",
    "model.fc = nn.Linear(num_ftrs, num_classes)  # num_classes = 2 for binary classification\n",
    "\n",
    "# 如果你不想微调所有层，只训练最后一层：\n",
    "for param in model.parameters():\n",
    "    param.requires_grad = False  # 冻结所有参数\n",
    "\n",
    "# 解冻最后一层的参数\n",
    "for param in model.fc.parameters():\n",
    "    param.requires_grad = True\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "id": "daf56c0a64b48c24",
   "metadata": {
    "collapsed": false
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch 0/9\n",
      "----------\n",
      "train Loss: 0.1709 Acc: 0.9600\n",
      "val Loss: 0.2706 Acc: 0.8182\n",
      "Epoch 1/9\n",
      "----------\n",
      "train Loss: 0.1519 Acc: 1.0000\n",
      "val Loss: 0.2638 Acc: 0.8182\n",
      "Epoch 2/9\n",
      "----------\n",
      "train Loss: 0.1475 Acc: 0.9800\n",
      "val Loss: 0.2901 Acc: 0.8182\n",
      "Epoch 3/9\n",
      "----------\n",
      "train Loss: 0.1220 Acc: 1.0000\n",
      "val Loss: 0.2311 Acc: 0.8182\n",
      "Epoch 4/9\n",
      "----------\n",
      "train Loss: 0.1024 Acc: 1.0000\n",
      "val Loss: 0.2101 Acc: 0.9091\n",
      "Epoch 5/9\n",
      "----------\n",
      "train Loss: 0.0881 Acc: 1.0000\n",
      "val Loss: 0.2194 Acc: 0.8182\n",
      "Epoch 6/9\n",
      "----------\n",
      "train Loss: 0.0962 Acc: 0.9800\n",
      "val Loss: 0.2629 Acc: 0.8182\n",
      "Epoch 7/9\n",
      "----------\n",
      "train Loss: 0.0904 Acc: 1.0000\n",
      "val Loss: 0.2421 Acc: 0.8182\n",
      "Epoch 8/9\n",
      "----------\n",
      "train Loss: 0.0736 Acc: 1.0000\n",
      "val Loss: 0.1919 Acc: 0.8182\n",
      "Epoch 9/9\n",
      "----------\n",
      "train Loss: 0.0721 Acc: 1.0000\n",
      "val Loss: 0.1789 Acc: 0.9091\n",
      "训练完成\n"
     ]
    }
   ],
   "source": [
    "import torch.optim as optim\n",
    "\n",
    "# 定义损失函数\n",
    "criterion = nn.CrossEntropyLoss()\n",
    "\n",
    "# 优化器（只更新最后的全连接层参数）\n",
    "optimizer = optim.Adam(model.fc.parameters(), lr=0.001)\n",
    "\n",
    "# device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\")\n",
    "device = torch.device(\"mps\")\n",
    "model = model.to(device)\n",
    "\n",
    "# 用with\n",
    "with mlflow.start_run(run_name=None):\n",
    "    num_epochs = 10\n",
    "    for epoch in range(num_epochs):\n",
    "        print(f'Epoch {epoch}/{num_epochs-1}')\n",
    "        print('-' * 10)\n",
    "        \n",
    "        # 每个epoch有训练和验证阶段\n",
    "        for phase in ['train', 'val']:\n",
    "            if phase == 'train':\n",
    "                model.train()  # 训练模式\n",
    "            else:\n",
    "                model.eval()   # 验证模式\n",
    "\n",
    "            running_loss = 0.0\n",
    "            running_corrects = 0\n",
    "\n",
    "            # 遍历数据\n",
    "            for inputs, labels in dataloaders[phase]:\n",
    "                inputs = inputs.to(device)\n",
    "                labels = labels.to(device)\n",
    "\n",
    "                # 梯度归零\n",
    "                optimizer.zero_grad()\n",
    "\n",
    "                # 前向传播\n",
    "                with torch.set_grad_enabled(phase == 'train'):\n",
    "                    outputs = model(inputs)\n",
    "                    _, preds = torch.max(outputs, 1)\n",
    "                    loss = criterion(outputs, labels)\n",
    "\n",
    "                    # 训练时反向传播和优化\n",
    "                    if phase == 'train':\n",
    "                        loss.backward()\n",
    "                        optimizer.step()\n",
    "\n",
    "                # 统计损失和准确率\n",
    "                running_loss += loss.item() * inputs.size(0)\n",
    "                running_corrects += torch.sum(preds == labels.data)\n",
    "\n",
    "            epoch_loss = running_loss / len(image_datasets[phase])\n",
    "            epoch_acc = running_corrects.float() / len(image_datasets[phase])\n",
    "\n",
    "            print(f'{phase} Loss: {epoch_loss:.4f} Acc: {epoch_acc:.4f}')\n",
    "\n",
    "print('训练完成')"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "c064f0a2",
   "metadata": {},
   "source": [
    "### resnet18\n",
    "cpu训练 3m41s\n",
    "train Loss: 0.3164 Acc: 0.8800\n",
    "val Loss: 0.2843 Acc: 0.9091\n",
    "\n",
    "gpu训练 3m28s\n",
    "(跟cpu/gpu应该没关系, 我没重新加载模型直接跑的两遍)\n",
    "\n",
    "中间有epoch的准确率达到1的"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "73c1fc3439698ff5",
   "metadata": {
    "collapsed": false
   },
   "source": [
    "# Conclusion 结论结果"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "55fe62d6c3e193fb",
   "metadata": {
    "collapsed": false
   },
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.11.8"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
