{
 "cells": [
  {
   "cell_type": "code",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-02-27T06:13:54.541608Z",
     "start_time": "2025-02-27T06:13:51.126549Z"
    }
   },
   "source": [
    "import matplotlib as mpl\n",
    "import matplotlib.pyplot as plt\n",
    "%matplotlib inline\n",
    "import numpy as np\n",
    "import sklearn\n",
    "import pandas as pd\n",
    "import os\n",
    "import sys\n",
    "import time\n",
    "from tqdm.auto import tqdm\n",
    "import torch\n",
    "import torch.nn as nn\n",
    "import torch.nn.functional as F\n",
    "\n",
    "print(sys.version_info)\n",
    "for module in mpl, np, pd, sklearn, torch:\n",
    "    print(module.__name__, module.__version__)\n",
    "    \n",
    "device = torch.device(\"cuda:0\") if torch.cuda.is_available() else torch.device(\"cpu\")\n",
    "print(device)\n",
    "\n",
    "seed = 42\n"
   ],
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "sys.version_info(major=3, minor=12, micro=3, releaselevel='final', serial=0)\n",
      "matplotlib 3.10.0\n",
      "numpy 1.26.4\n",
      "pandas 2.2.3\n",
      "sklearn 1.6.1\n",
      "torch 2.6.0+cu118\n",
      "cuda:0\n"
     ]
    }
   ],
   "execution_count": 1
  },
  {
   "attachments": {},
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## 数据准备\n",
    "\n",
    "```shell\n",
    "$ tree -L 2 archive \n",
    "archive\n",
    "├── monkey_labels.txt\n",
    "├── training\n",
    "│   ├── n0\n",
    "│   ├── n1\n",
    "│   ├── n2\n",
    "│   ├── n3\n",
    "│   ├── n4\n",
    "│   ├── n5\n",
    "│   ├── n6\n",
    "│   ├── n7\n",
    "│   ├── n8\n",
    "│   └── n9\n",
    "└── validation\n",
    "    ├── n0\n",
    "    ├── n1\n",
    "    ├── n2\n",
    "    ├── n3\n",
    "    ├── n4\n",
    "    ├── n5\n",
    "    ├── n6\n",
    "    ├── n7\n",
    "    ├── n8\n",
    "    └── n9\n",
    "\n",
    "22 directories, 1 file\n",
    "```"
   ]
  },
  {
   "cell_type": "code",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-02-27T06:52:01.593505Z",
     "start_time": "2025-02-27T06:52:01.560131Z"
    }
   },
   "source": [
    "from torchvision import datasets\n",
    "from torchvision.transforms import ToTensor, Resize, Compose, ConvertImageDtype, Normalize\n",
    "\n",
    "\n",
    "from pathlib import Path\n",
    "\n",
    "DATA_DIR = Path(\"./archive/\")\n",
    "\n",
    "class MonkeyDataset(datasets.ImageFolder):\n",
    "    def __init__(self, mode, transform=None):\n",
    "        if mode == \"train\":\n",
    "            root = DATA_DIR / \"training\"\n",
    "        elif mode == \"val\":\n",
    "            root = DATA_DIR / \"validation\"\n",
    "        else:\n",
    "            raise ValueError(\"mode should be one of the following: train, val, but got {}\".format(mode))\n",
    "        super().__init__(root, transform)\n",
    "        self.imgs = self.samples\n",
    "        self.targets = [s[1] for s in self.samples]\n",
    "\n",
    "# resnet 要求的，见 https://pytorch.org/vision/stable/models/generated/torchvision.models.resnet50.html\n",
    "img_h, img_w = 224, 224\n",
    "transform = Compose([\n",
    "     Resize((img_h, img_w)),\n",
    "     ToTensor(),\n",
    "     Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]),\n",
    "     ConvertImageDtype(torch.float),\n",
    "])\n",
    "\n",
    "\n",
    "train_ds = MonkeyDataset(\"train\", transform=transform)\n",
    "val_ds = MonkeyDataset(\"val\", transform=transform)\n",
    "\n",
    "print(\"load {} images from training dataset\".format(len(train_ds)))\n",
    "print(\"load {} images from validation dataset\".format(len(val_ds)))\n",
    "train_ds.imgs"
   ],
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "load 1097 images from training dataset\n",
      "load 272 images from validation dataset\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "[('archive\\\\training\\\\n0\\\\n0018.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0019.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0020.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0021.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0022.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0023.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0024.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0025.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0026.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0027.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0028.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0029.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0030.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0031.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0032.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0033.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0034.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0035.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0036.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0037.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0038.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0039.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0040.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0041.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0042.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0043.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0044.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0045.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0046.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0047.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0048.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0049.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0050.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0051.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0052.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0053.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0110.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0111.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0112.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0113.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0114.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0115.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0116.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0117.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0118.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0119.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0120.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0121.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0122.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0123.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0124.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0125.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0126.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0127.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0128.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0129.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0130.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0131.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0132.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0133.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0134.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0135.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0136.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0137.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0138.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0139.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0140.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0141.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0142.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0143.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0144.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0145.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0146.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0147.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0148.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0149.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0150.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0151.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0152.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0153.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0154.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0155.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0156.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0157.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0158.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0159.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0160.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0161.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0162.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0163.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0164.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0165.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0166.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0167.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0168.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0169.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0170.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0171.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0172.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0173.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0174.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0175.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n0176.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n018.jpg', 0),\n",
       " ('archive\\\\training\\\\n0\\\\n019.jpg', 0),\n",
       " ('archive\\\\training\\\\n1\\\\n1017.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1018.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1019.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1021.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1022.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1023.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1024.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1025.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1026.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1027.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1028.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1029.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1030.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1031.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1032.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1033.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1034.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1035.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1036.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1037.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1038.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1039.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1040.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1041.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1042.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1043.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1044.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1045.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1046.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1047.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1048.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1049.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1050.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1051.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1052.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1053.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1054.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1055.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1056.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1057.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1058.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1059.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1060.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1061.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1062.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1063.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1064.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1065.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1066.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1067.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1068.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1069.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1070.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1071.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1072.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1110.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1111.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1112.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1113.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1114.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1115.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1116.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1117.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1118.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1119.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1120.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1121.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1122.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1123.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1124.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1125.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1126.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1127.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1128.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1129.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1130.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1131.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1132.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1133.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1134.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1135.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1136.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1137.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1138.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1139.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1140.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1141.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1142.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1143.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1144.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1145.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1146.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1147.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1148.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1149.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1150.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1151.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1152.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1153.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1154.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1155.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1156.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1157.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1158.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1159.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1160.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1161.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1162.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1163.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1164.jpg', 1),\n",
       " ('archive\\\\training\\\\n1\\\\n1165.jpg', 1),\n",
       " ('archive\\\\training\\\\n2\\\\n2017.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2018.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2019.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2020.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2021.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2022.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2023.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2024.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2025.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2026.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2027.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2028.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2029.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2030.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2031.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2032.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2033.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2034.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2035.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2036.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2037.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2038.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2039.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2040.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2041.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2042.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2043.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2044.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2045.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2046.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2047.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2048.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2049.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2050.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2051.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2052.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2053.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2054.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2055.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2056.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2057.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2058.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2059.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2060.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2061.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2062.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2063.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2064.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2065.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2066.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2067.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2068.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2069.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2110.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2111.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2112.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2113.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2114.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2115.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2116.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2117.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2118.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2119.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2120.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2121.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2122.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2123.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2124.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2125.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2126.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2127.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2128.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2129.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2130.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2131.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2132.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2133.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2134.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2135.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2136.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2137.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2138.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2139.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2140.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2141.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2142.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2143.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2144.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2145.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2146.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2147.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2148.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2149.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2150.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2151.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2152.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2153.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2154.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2155.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2156.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2157.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2158.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2159.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2160.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2161.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2162.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2163.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2164.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2165.jpg', 2),\n",
       " ('archive\\\\training\\\\n2\\\\n2166.jpg', 2),\n",
       " ('archive\\\\training\\\\n3\\\\n3020.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3021.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3022.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3023.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3024.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3025.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3026.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3027.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3028.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3029.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3030.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3031.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3032.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3033.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3034.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3035.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3036.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3037.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3038.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3039.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3040.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3041.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3042.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3043.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3044.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3045.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3046.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3047.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3048.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3049.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3050.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3051.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3052.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3053.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3054.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3055.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3056.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3057.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3058.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3059.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3060.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3061.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3062.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3063.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3064.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3065.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3066.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3067.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3068.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3069.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3070.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3071.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3072.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3073.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3074.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3075.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3076.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3077.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3078.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3110.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3111.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3112.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3113.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3114.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3115.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3116.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3117.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3118.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3119.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3120.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3121.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3122.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3123.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3124.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3125.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3126.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3127.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3128.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3129.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3130.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3131.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3132.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3133.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3134.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3135.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3136.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3137.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3138.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3139.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3140.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3141.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3142.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3143.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3144.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3145.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3146.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3147.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3148.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3149.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3150.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3151.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3152.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3153.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3154.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3155.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3156.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3157.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3158.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3159.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3160.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3161.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3162.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3163.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3164.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3165.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3166.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3167.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3168.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3169.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3170.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3171.jpg', 3),\n",
       " ('archive\\\\training\\\\n3\\\\n3172.jpg', 3),\n",
       " ('archive\\\\training\\\\n4\\\\n4016.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4017.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4018.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4019.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4020.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4021.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4022.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4023.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4024.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4025.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4026.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4027.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4028.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4029.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4030.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4031.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4032.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4033.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4034.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4035.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4036.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4037.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4038.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4039.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4040.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4041.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4042.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4043.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4044.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4045.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4046.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4047.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4048.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4049.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4050.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4051.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4052.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4053.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4054.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4055.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4056.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4057.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4058.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4059.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4060.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4061.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4062.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4063.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4064.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4110.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4111.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4112.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4113.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4114.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4115.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4116.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4117.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4118.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4119.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4120.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4121.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4122.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4123.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4124.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4125.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4126.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4127.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4128.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4129.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4130.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4131.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4132.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4133.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4134.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4135.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4136.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4137.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4138.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4139.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4140.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4141.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4142.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4143.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4144.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4145.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4146.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4147.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4148.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4149.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4150.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4151.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4152.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4153.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4154.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n4155.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n41556.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n41557.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n41558.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n41559.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n41560.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n41561.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n41562.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n41563.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n41564.jpg', 4),\n",
       " ('archive\\\\training\\\\n4\\\\n41565.jpg', 4),\n",
       " ('archive\\\\training\\\\n5\\\\n5018.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5019.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5020.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5021.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5022.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5023.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5024.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5025.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5026.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5027.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5028.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5029.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5030.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5031.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5032.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5033.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5034.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5035.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5036.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5037.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5038.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5039.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5040.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5041.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5042.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5043.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5044.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5045.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5046.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5047.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5048.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5049.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5050.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5051.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5052.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5053.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5054.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5055.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5056.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5057.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5058.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5059.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5060.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5061.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5062.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5063.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5064.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5065.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5066.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5067.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5068.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5069.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5070.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5071.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5072.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5073.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5110.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5111.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5112.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5113.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5114.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5115.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5116.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5117.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5118.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5119.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5120.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5121.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5122.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5123.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5124.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5125.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5126.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5127.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5128.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5129.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5130.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5131.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5132.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5133.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5134.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5135.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5136.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5137.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5138.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5139.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5140.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5141.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5142.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5143.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5144.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5145.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5146.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5147.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5148.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5149.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5150.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5151.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5152.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5153.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5154.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5155.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5156.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5157.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5158.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5159.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5160.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5161.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5162.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5163.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5164.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5165.jpg', 5),\n",
       " ('archive\\\\training\\\\n5\\\\n5166.jpg', 5),\n",
       " ('archive\\\\training\\\\n6\\\\n6016.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6017.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6018.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6019.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6020.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6021.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6022.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6023.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6024.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6025.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6026.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6027.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6028.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6029.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6030.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6031.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6032.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6033.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6034.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6035.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6036.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6037.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6038.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6039.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6040.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6041.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6042.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6043.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6044.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6045.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6046.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6047.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6048.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6049.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6050.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6051.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6052.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6053.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6054.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6055.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6056.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6057.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6058.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6059.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6060.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6061.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6062.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6063.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6064.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6065.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6066.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6067.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6068.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6069.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6110.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6111.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6112.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6113.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6114.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6115.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6116.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6117.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6118.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6119.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6120.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6121.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6122.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6123.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6124.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6125.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6126.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6127.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6128.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6129.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6130.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6131.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6132.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6133.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6134.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6135.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6136.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6137.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6138.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6139.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6140.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6141.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6142.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6143.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6144.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6145.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6146.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6147.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6148.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6149.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6150.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6151.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6152.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6153.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6154.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6155.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6156.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6157.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6158.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6159.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6160.jpg', 6),\n",
       " ('archive\\\\training\\\\n6\\\\n6161.jpg', 6),\n",
       " ('archive\\\\training\\\\n7\\\\n7019.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n702.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7020.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7021.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7022.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7023.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7024.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7025.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7026.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7027.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7028.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7029.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7030.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7031.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7032.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7033.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7034.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7035.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7036.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7037.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7038.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7039.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7040.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7041.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7042.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7043.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7044.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7045.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7046.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7047.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7048.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7049.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7050.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7051.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7052.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7053.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7054.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7055.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7056.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7057.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7058.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7059.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7060.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7061.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7062.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7063.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7064.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7065.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7066.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7067.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7068.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7069.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7070.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7071.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7110.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7111.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7112.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7113.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7114.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7115.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7116.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7117.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7118.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7119.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7120.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7121.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7122.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7123.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7124.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7125.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7126.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7127.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7128.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7129.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7130.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7131.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7132.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7133.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7134.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7135.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7136.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7137.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7138.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7139.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7140.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7141.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7142.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7143.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7144.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7145.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7146.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7147.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7148.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7149.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7150.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7151.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7152.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7153.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7154.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7155.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7156.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7157.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7158.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7159.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7160.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7161.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7162.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7163.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7164.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7165.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7166.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7167.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7168.jpg', 7),\n",
       " ('archive\\\\training\\\\n7\\\\n7169.jpg', 7),\n",
       " ('archive\\\\training\\\\n8\\\\n8018.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8019.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8020.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8022.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8023.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8024.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8025.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8026.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8027.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8028.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8029.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8030.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8032.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8033.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8034.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8035.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8036.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8037.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8038.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8039.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8040.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8041.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8042.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8043.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8044.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8045.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8046.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8047.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8048.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8049.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8050.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8051.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8052.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8053.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8054.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8055.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8056.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8057.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8058.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8059.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8060.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8061.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8062.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8063.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8064.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8065.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8066.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8067.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8068.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8069.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8070.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8071.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8072.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8073.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8074.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8075.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8076.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8077.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8078.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8079.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n808.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8080.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8110.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8111.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8112.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8113.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8114.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8115.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8116.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8117.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8118.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8119.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8120.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8121.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8122.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8123.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8124.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8125.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8126.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8127.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8128.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8129.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8130.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8131.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8132.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8133.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8134.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8135.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8136.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8137.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8138.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8139.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8140.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8141.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8142.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8143.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8144.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8145.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8146.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8147.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8148.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8149.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8150.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n8151.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n817.jpg', 8),\n",
       " ('archive\\\\training\\\\n8\\\\n818.jpg', 8),\n",
       " ('archive\\\\training\\\\n9\\\\n9014.jpg', 9),\n",
       " ('archive\\\\training\\\\n9\\\\n9016.jpg', 9),\n",
       " ('archive\\\\training\\\\n9\\\\n9017.jpg', 9),\n",
       " ('archive\\\\training\\\\n9\\\\n9018.jpg', 9),\n",
       " ('archive\\\\training\\\\n9\\\\n9019.jpg', 9),\n",
       " ('archive\\\\training\\\\n9\\\\n9020.jpg', 9),\n",
       " ('archive\\\\training\\\\n9\\\\n9022.jpg', 9),\n",
       " ('archive\\\\training\\\\n9\\\\n9023.jpg', 9),\n",
       " ...]"
      ]
     },
     "execution_count": 20,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "execution_count": 20
  },
  {
   "cell_type": "code",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-02-27T06:13:55.872889Z",
     "start_time": "2025-02-27T06:13:55.866908Z"
    }
   },
   "source": [
    "# 数据类别\n",
    "train_ds.classes, train_ds.class_to_idx"
   ],
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(['n0', 'n1', 'n2', 'n3', 'n4', 'n5', 'n6', 'n7', 'n8', 'n9'],\n",
       " {'n0': 0,\n",
       "  'n1': 1,\n",
       "  'n2': 2,\n",
       "  'n3': 3,\n",
       "  'n4': 4,\n",
       "  'n5': 5,\n",
       "  'n6': 6,\n",
       "  'n7': 7,\n",
       "  'n8': 8,\n",
       "  'n9': 9})"
      ]
     },
     "execution_count": 3,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "execution_count": 3
  },
  {
   "cell_type": "code",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-02-27T06:20:28.027065Z",
     "start_time": "2025-02-27T06:13:56.192469Z"
    }
   },
   "source": [
    "from torchvision.models import resnet50\n",
    "\n",
    "\n",
    "# 加载resnet50模型以及预训练权重，冻结除最后一层外所有层的权重，去除最后一层并添加自定义分类层\n",
    "class ResNet50(nn.Module):\n",
    "    def __init__(self, num_classes=10, frozen=True):\n",
    "        super().__init__()\n",
    "        # 下载预训练权重\n",
    "        self.model = resnet50(weights=\"IMAGENET1K_V2\",) # 这里的weights参数可以选择\"IMAGENET1K_V2\"或None，None表示随机初始化\n",
    "        # 冻结前面的层\n",
    "        if frozen:\n",
    "            for param in self.model.parameters():\n",
    "                param.requires_grad = False # 冻结权重\n",
    "        # for param in self.model.layer4.parameters():\n",
    "        #     param.requires_grad = True  # 解冻 layer4\n",
    "        for name, param in self.model.named_parameters():\n",
    "            if name == \"layer4.2.conv3.weight\":\n",
    "                param.requires_grad = True  # 解冻该层\n",
    "        # 添加自定义分类层\n",
    "        # print(self.model)\n",
    "        print(self.model.fc.in_features) # 打印resnet50的最后一层的输入通道数\n",
    "        print(self.model.fc.out_features) # 打印resnet50的最后一层的输出通道数 1000\n",
    "        self.model.fc = nn.Linear(self.model.fc.in_features, num_classes)   # 自定义分类层,把resnet50的最后一层改为num_classes个输出\n",
    "        \n",
    "        \n",
    "    def forward(self, x):\n",
    "        return self.model(x)\n",
    "\n",
    "\n",
    "for idx, (key, value) in enumerate(ResNet50().named_parameters()):\n",
    "    print(f\"{key:^40}paramerters num: {np.prod(value.shape)}\")\n"
   ],
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "Downloading: \"https://download.pytorch.org/models/resnet50-11ad3fa6.pth\" to C:\\Users\\75713/.cache\\torch\\hub\\checkpoints\\resnet50-11ad3fa6.pth\n",
      "100%|██████████| 97.8M/97.8M [06:30<00:00, 262kB/s]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "2048\n",
      "1000\n",
      "           model.conv1.weight           paramerters num: 9408\n",
      "            model.bn1.weight            paramerters num: 64\n",
      "             model.bn1.bias             paramerters num: 64\n",
      "      model.layer1.0.conv1.weight       paramerters num: 4096\n",
      "       model.layer1.0.bn1.weight        paramerters num: 64\n",
      "        model.layer1.0.bn1.bias         paramerters num: 64\n",
      "      model.layer1.0.conv2.weight       paramerters num: 36864\n",
      "       model.layer1.0.bn2.weight        paramerters num: 64\n",
      "        model.layer1.0.bn2.bias         paramerters num: 64\n",
      "      model.layer1.0.conv3.weight       paramerters num: 16384\n",
      "       model.layer1.0.bn3.weight        paramerters num: 256\n",
      "        model.layer1.0.bn3.bias         paramerters num: 256\n",
      "   model.layer1.0.downsample.0.weight   paramerters num: 16384\n",
      "   model.layer1.0.downsample.1.weight   paramerters num: 256\n",
      "    model.layer1.0.downsample.1.bias    paramerters num: 256\n",
      "      model.layer1.1.conv1.weight       paramerters num: 16384\n",
      "       model.layer1.1.bn1.weight        paramerters num: 64\n",
      "        model.layer1.1.bn1.bias         paramerters num: 64\n",
      "      model.layer1.1.conv2.weight       paramerters num: 36864\n",
      "       model.layer1.1.bn2.weight        paramerters num: 64\n",
      "        model.layer1.1.bn2.bias         paramerters num: 64\n",
      "      model.layer1.1.conv3.weight       paramerters num: 16384\n",
      "       model.layer1.1.bn3.weight        paramerters num: 256\n",
      "        model.layer1.1.bn3.bias         paramerters num: 256\n",
      "      model.layer1.2.conv1.weight       paramerters num: 16384\n",
      "       model.layer1.2.bn1.weight        paramerters num: 64\n",
      "        model.layer1.2.bn1.bias         paramerters num: 64\n",
      "      model.layer1.2.conv2.weight       paramerters num: 36864\n",
      "       model.layer1.2.bn2.weight        paramerters num: 64\n",
      "        model.layer1.2.bn2.bias         paramerters num: 64\n",
      "      model.layer1.2.conv3.weight       paramerters num: 16384\n",
      "       model.layer1.2.bn3.weight        paramerters num: 256\n",
      "        model.layer1.2.bn3.bias         paramerters num: 256\n",
      "      model.layer2.0.conv1.weight       paramerters num: 32768\n",
      "       model.layer2.0.bn1.weight        paramerters num: 128\n",
      "        model.layer2.0.bn1.bias         paramerters num: 128\n",
      "      model.layer2.0.conv2.weight       paramerters num: 147456\n",
      "       model.layer2.0.bn2.weight        paramerters num: 128\n",
      "        model.layer2.0.bn2.bias         paramerters num: 128\n",
      "      model.layer2.0.conv3.weight       paramerters num: 65536\n",
      "       model.layer2.0.bn3.weight        paramerters num: 512\n",
      "        model.layer2.0.bn3.bias         paramerters num: 512\n",
      "   model.layer2.0.downsample.0.weight   paramerters num: 131072\n",
      "   model.layer2.0.downsample.1.weight   paramerters num: 512\n",
      "    model.layer2.0.downsample.1.bias    paramerters num: 512\n",
      "      model.layer2.1.conv1.weight       paramerters num: 65536\n",
      "       model.layer2.1.bn1.weight        paramerters num: 128\n",
      "        model.layer2.1.bn1.bias         paramerters num: 128\n",
      "      model.layer2.1.conv2.weight       paramerters num: 147456\n",
      "       model.layer2.1.bn2.weight        paramerters num: 128\n",
      "        model.layer2.1.bn2.bias         paramerters num: 128\n",
      "      model.layer2.1.conv3.weight       paramerters num: 65536\n",
      "       model.layer2.1.bn3.weight        paramerters num: 512\n",
      "        model.layer2.1.bn3.bias         paramerters num: 512\n",
      "      model.layer2.2.conv1.weight       paramerters num: 65536\n",
      "       model.layer2.2.bn1.weight        paramerters num: 128\n",
      "        model.layer2.2.bn1.bias         paramerters num: 128\n",
      "      model.layer2.2.conv2.weight       paramerters num: 147456\n",
      "       model.layer2.2.bn2.weight        paramerters num: 128\n",
      "        model.layer2.2.bn2.bias         paramerters num: 128\n",
      "      model.layer2.2.conv3.weight       paramerters num: 65536\n",
      "       model.layer2.2.bn3.weight        paramerters num: 512\n",
      "        model.layer2.2.bn3.bias         paramerters num: 512\n",
      "      model.layer2.3.conv1.weight       paramerters num: 65536\n",
      "       model.layer2.3.bn1.weight        paramerters num: 128\n",
      "        model.layer2.3.bn1.bias         paramerters num: 128\n",
      "      model.layer2.3.conv2.weight       paramerters num: 147456\n",
      "       model.layer2.3.bn2.weight        paramerters num: 128\n",
      "        model.layer2.3.bn2.bias         paramerters num: 128\n",
      "      model.layer2.3.conv3.weight       paramerters num: 65536\n",
      "       model.layer2.3.bn3.weight        paramerters num: 512\n",
      "        model.layer2.3.bn3.bias         paramerters num: 512\n",
      "      model.layer3.0.conv1.weight       paramerters num: 131072\n",
      "       model.layer3.0.bn1.weight        paramerters num: 256\n",
      "        model.layer3.0.bn1.bias         paramerters num: 256\n",
      "      model.layer3.0.conv2.weight       paramerters num: 589824\n",
      "       model.layer3.0.bn2.weight        paramerters num: 256\n",
      "        model.layer3.0.bn2.bias         paramerters num: 256\n",
      "      model.layer3.0.conv3.weight       paramerters num: 262144\n",
      "       model.layer3.0.bn3.weight        paramerters num: 1024\n",
      "        model.layer3.0.bn3.bias         paramerters num: 1024\n",
      "   model.layer3.0.downsample.0.weight   paramerters num: 524288\n",
      "   model.layer3.0.downsample.1.weight   paramerters num: 1024\n",
      "    model.layer3.0.downsample.1.bias    paramerters num: 1024\n",
      "      model.layer3.1.conv1.weight       paramerters num: 262144\n",
      "       model.layer3.1.bn1.weight        paramerters num: 256\n",
      "        model.layer3.1.bn1.bias         paramerters num: 256\n",
      "      model.layer3.1.conv2.weight       paramerters num: 589824\n",
      "       model.layer3.1.bn2.weight        paramerters num: 256\n",
      "        model.layer3.1.bn2.bias         paramerters num: 256\n",
      "      model.layer3.1.conv3.weight       paramerters num: 262144\n",
      "       model.layer3.1.bn3.weight        paramerters num: 1024\n",
      "        model.layer3.1.bn3.bias         paramerters num: 1024\n",
      "      model.layer3.2.conv1.weight       paramerters num: 262144\n",
      "       model.layer3.2.bn1.weight        paramerters num: 256\n",
      "        model.layer3.2.bn1.bias         paramerters num: 256\n",
      "      model.layer3.2.conv2.weight       paramerters num: 589824\n",
      "       model.layer3.2.bn2.weight        paramerters num: 256\n",
      "        model.layer3.2.bn2.bias         paramerters num: 256\n",
      "      model.layer3.2.conv3.weight       paramerters num: 262144\n",
      "       model.layer3.2.bn3.weight        paramerters num: 1024\n",
      "        model.layer3.2.bn3.bias         paramerters num: 1024\n",
      "      model.layer3.3.conv1.weight       paramerters num: 262144\n",
      "       model.layer3.3.bn1.weight        paramerters num: 256\n",
      "        model.layer3.3.bn1.bias         paramerters num: 256\n",
      "      model.layer3.3.conv2.weight       paramerters num: 589824\n",
      "       model.layer3.3.bn2.weight        paramerters num: 256\n",
      "        model.layer3.3.bn2.bias         paramerters num: 256\n",
      "      model.layer3.3.conv3.weight       paramerters num: 262144\n",
      "       model.layer3.3.bn3.weight        paramerters num: 1024\n",
      "        model.layer3.3.bn3.bias         paramerters num: 1024\n",
      "      model.layer3.4.conv1.weight       paramerters num: 262144\n",
      "       model.layer3.4.bn1.weight        paramerters num: 256\n",
      "        model.layer3.4.bn1.bias         paramerters num: 256\n",
      "      model.layer3.4.conv2.weight       paramerters num: 589824\n",
      "       model.layer3.4.bn2.weight        paramerters num: 256\n",
      "        model.layer3.4.bn2.bias         paramerters num: 256\n",
      "      model.layer3.4.conv3.weight       paramerters num: 262144\n",
      "       model.layer3.4.bn3.weight        paramerters num: 1024\n",
      "        model.layer3.4.bn3.bias         paramerters num: 1024\n",
      "      model.layer3.5.conv1.weight       paramerters num: 262144\n",
      "       model.layer3.5.bn1.weight        paramerters num: 256\n",
      "        model.layer3.5.bn1.bias         paramerters num: 256\n",
      "      model.layer3.5.conv2.weight       paramerters num: 589824\n",
      "       model.layer3.5.bn2.weight        paramerters num: 256\n",
      "        model.layer3.5.bn2.bias         paramerters num: 256\n",
      "      model.layer3.5.conv3.weight       paramerters num: 262144\n",
      "       model.layer3.5.bn3.weight        paramerters num: 1024\n",
      "        model.layer3.5.bn3.bias         paramerters num: 1024\n",
      "      model.layer4.0.conv1.weight       paramerters num: 524288\n",
      "       model.layer4.0.bn1.weight        paramerters num: 512\n",
      "        model.layer4.0.bn1.bias         paramerters num: 512\n",
      "      model.layer4.0.conv2.weight       paramerters num: 2359296\n",
      "       model.layer4.0.bn2.weight        paramerters num: 512\n",
      "        model.layer4.0.bn2.bias         paramerters num: 512\n",
      "      model.layer4.0.conv3.weight       paramerters num: 1048576\n",
      "       model.layer4.0.bn3.weight        paramerters num: 2048\n",
      "        model.layer4.0.bn3.bias         paramerters num: 2048\n",
      "   model.layer4.0.downsample.0.weight   paramerters num: 2097152\n",
      "   model.layer4.0.downsample.1.weight   paramerters num: 2048\n",
      "    model.layer4.0.downsample.1.bias    paramerters num: 2048\n",
      "      model.layer4.1.conv1.weight       paramerters num: 1048576\n",
      "       model.layer4.1.bn1.weight        paramerters num: 512\n",
      "        model.layer4.1.bn1.bias         paramerters num: 512\n",
      "      model.layer4.1.conv2.weight       paramerters num: 2359296\n",
      "       model.layer4.1.bn2.weight        paramerters num: 512\n",
      "        model.layer4.1.bn2.bias         paramerters num: 512\n",
      "      model.layer4.1.conv3.weight       paramerters num: 1048576\n",
      "       model.layer4.1.bn3.weight        paramerters num: 2048\n",
      "        model.layer4.1.bn3.bias         paramerters num: 2048\n",
      "      model.layer4.2.conv1.weight       paramerters num: 1048576\n",
      "       model.layer4.2.bn1.weight        paramerters num: 512\n",
      "        model.layer4.2.bn1.bias         paramerters num: 512\n",
      "      model.layer4.2.conv2.weight       paramerters num: 2359296\n",
      "       model.layer4.2.bn2.weight        paramerters num: 512\n",
      "        model.layer4.2.bn2.bias         paramerters num: 512\n",
      "      model.layer4.2.conv3.weight       paramerters num: 1048576\n",
      "       model.layer4.2.bn3.weight        paramerters num: 2048\n",
      "        model.layer4.2.bn3.bias         paramerters num: 2048\n",
      "            model.fc.weight             paramerters num: 20480\n",
      "             model.fc.bias              paramerters num: 10\n"
     ]
    }
   ],
   "execution_count": 8
  },
  {
   "metadata": {},
   "cell_type": "code",
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "archive\\training\\n0\\n0018.jpg 0\n",
      "torch.Size([3, 224, 224]) 0\n"
     ]
    }
   ],
   "execution_count": 4,
   "source": [
    "# 图片路径 及 标签\n",
    "for fpath, label in train_ds.imgs:\n",
    "    print(fpath, label)\n",
    "    break\n",
    "\n",
    "for img, label in train_ds:\n",
    "    # c, h, w  label\n",
    "    print(img.shape, label)\n",
    "    break"
   ]
  },
  {
   "metadata": {},
   "cell_type": "code",
   "outputs": [],
   "execution_count": 5,
   "source": [
    "# 遍历train_ds得到每张图片，计算每个通道的均值和方差\n",
    "def cal_mean_std(ds):\n",
    "    mean = 0.\n",
    "    std = 0.\n",
    "    for img, _ in ds:\n",
    "        mean += img.mean(dim=(1, 2))\n",
    "        std += img.std(dim=(1, 2))\n",
    "    mean /= len(ds)\n",
    "    std /= len(ds)\n",
    "    return mean, std\n",
    "\n",
    "# 经过 normalize 后 均值为0，方差为1\n",
    "# print(cal_mean_std(train_ds))"
   ]
  },
  {
   "metadata": {},
   "cell_type": "code",
   "outputs": [],
   "execution_count": 6,
   "source": [
    "import torch.nn as nn\n",
    "from torch.utils.data.dataloader import DataLoader    \n",
    "\n",
    "batch_size = 16\n",
    "# 从数据集到dataloader\n",
    "train_loader = DataLoader(train_ds, batch_size=batch_size, shuffle=True)\n",
    "val_loader = DataLoader(val_ds, batch_size=batch_size, shuffle=False)"
   ]
  },
  {
   "metadata": {},
   "cell_type": "code",
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "torch.Size([16, 3, 224, 224])\n",
      "torch.Size([16])\n"
     ]
    }
   ],
   "execution_count": 7,
   "source": [
    "for imgs, labels in train_loader:\n",
    "    print(imgs.shape)\n",
    "    print(labels.shape)\n",
    "    break"
   ]
  },
  {
   "metadata": {},
   "cell_type": "markdown",
   "source": "## 定义模型"
  },
  {
   "cell_type": "code",
   "source": [
    "model = ResNet50(num_classes=10, frozen=True)\n",
    "def count_parameters(model): #计算模型总参数量\n",
    "    return sum(p.numel() for p in model.parameters() if p.requires_grad)\n",
    "count_parameters(model)"
   ],
   "metadata": {
    "collapsed": false,
    "ExecuteTime": {
     "end_time": "2025-02-27T06:20:28.339540Z",
     "start_time": "2025-02-27T06:20:28.028410Z"
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "2048\n",
      "1000\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "1069066"
      ]
     },
     "execution_count": 9,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "execution_count": 9
  },
  {
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-02-27T06:20:28.343079Z",
     "start_time": "2025-02-27T06:20:28.340546Z"
    }
   },
   "cell_type": "code",
   "source": "",
   "outputs": [],
   "execution_count": 9
  },
  {
   "cell_type": "code",
   "source": "model",
   "metadata": {
    "collapsed": false,
    "ExecuteTime": {
     "end_time": "2025-02-27T06:20:28.350514Z",
     "start_time": "2025-02-27T06:20:28.344463Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "ResNet50(\n",
       "  (model): ResNet(\n",
       "    (conv1): Conv2d(3, 64, kernel_size=(7, 7), stride=(2, 2), padding=(3, 3), bias=False)\n",
       "    (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "    (relu): ReLU(inplace=True)\n",
       "    (maxpool): MaxPool2d(kernel_size=3, stride=2, padding=1, dilation=1, ceil_mode=False)\n",
       "    (layer1): Sequential(\n",
       "      (0): Bottleneck(\n",
       "        (conv1): Conv2d(64, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "        (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "        (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "        (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "        (conv3): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "        (bn3): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "        (relu): ReLU(inplace=True)\n",
       "        (downsample): Sequential(\n",
       "          (0): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "          (1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "        )\n",
       "      )\n",
       "      (1): Bottleneck(\n",
       "        (conv1): Conv2d(256, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "        (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "        (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "        (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "        (conv3): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "        (bn3): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "        (relu): ReLU(inplace=True)\n",
       "      )\n",
       "      (2): Bottleneck(\n",
       "        (conv1): Conv2d(256, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "        (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "        (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "        (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "        (conv3): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "        (bn3): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "        (relu): ReLU(inplace=True)\n",
       "      )\n",
       "    )\n",
       "    (layer2): Sequential(\n",
       "      (0): Bottleneck(\n",
       "        (conv1): Conv2d(256, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "        (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "        (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n",
       "        (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "        (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "        (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "        (relu): ReLU(inplace=True)\n",
       "        (downsample): Sequential(\n",
       "          (0): Conv2d(256, 512, kernel_size=(1, 1), stride=(2, 2), bias=False)\n",
       "          (1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "        )\n",
       "      )\n",
       "      (1): Bottleneck(\n",
       "        (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "        (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "        (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "        (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "        (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "        (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "        (relu): ReLU(inplace=True)\n",
       "      )\n",
       "      (2): Bottleneck(\n",
       "        (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "        (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "        (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "        (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "        (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "        (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "        (relu): ReLU(inplace=True)\n",
       "      )\n",
       "      (3): Bottleneck(\n",
       "        (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "        (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "        (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "        (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "        (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "        (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "        (relu): ReLU(inplace=True)\n",
       "      )\n",
       "    )\n",
       "    (layer3): Sequential(\n",
       "      (0): Bottleneck(\n",
       "        (conv1): Conv2d(512, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "        (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "        (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n",
       "        (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "        (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "        (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "        (relu): ReLU(inplace=True)\n",
       "        (downsample): Sequential(\n",
       "          (0): Conv2d(512, 1024, kernel_size=(1, 1), stride=(2, 2), bias=False)\n",
       "          (1): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "        )\n",
       "      )\n",
       "      (1): Bottleneck(\n",
       "        (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "        (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "        (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "        (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "        (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "        (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "        (relu): ReLU(inplace=True)\n",
       "      )\n",
       "      (2): Bottleneck(\n",
       "        (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "        (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "        (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "        (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "        (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "        (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "        (relu): ReLU(inplace=True)\n",
       "      )\n",
       "      (3): Bottleneck(\n",
       "        (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "        (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "        (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "        (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "        (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "        (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "        (relu): ReLU(inplace=True)\n",
       "      )\n",
       "      (4): Bottleneck(\n",
       "        (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "        (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "        (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "        (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "        (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "        (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "        (relu): ReLU(inplace=True)\n",
       "      )\n",
       "      (5): Bottleneck(\n",
       "        (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "        (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "        (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "        (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "        (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "        (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "        (relu): ReLU(inplace=True)\n",
       "      )\n",
       "    )\n",
       "    (layer4): Sequential(\n",
       "      (0): Bottleneck(\n",
       "        (conv1): Conv2d(1024, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "        (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "        (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n",
       "        (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "        (conv3): Conv2d(512, 2048, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "        (bn3): BatchNorm2d(2048, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "        (relu): ReLU(inplace=True)\n",
       "        (downsample): Sequential(\n",
       "          (0): Conv2d(1024, 2048, kernel_size=(1, 1), stride=(2, 2), bias=False)\n",
       "          (1): BatchNorm2d(2048, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "        )\n",
       "      )\n",
       "      (1): Bottleneck(\n",
       "        (conv1): Conv2d(2048, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "        (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "        (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "        (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "        (conv3): Conv2d(512, 2048, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "        (bn3): BatchNorm2d(2048, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "        (relu): ReLU(inplace=True)\n",
       "      )\n",
       "      (2): Bottleneck(\n",
       "        (conv1): Conv2d(2048, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "        (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "        (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "        (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "        (conv3): Conv2d(512, 2048, kernel_size=(1, 1), stride=(1, 1), bias=False)\n",
       "        (bn3): BatchNorm2d(2048, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "        (relu): ReLU(inplace=True)\n",
       "      )\n",
       "    )\n",
       "    (avgpool): AdaptiveAvgPool2d(output_size=(1, 1))\n",
       "    (fc): Linear(in_features=2048, out_features=10, bias=True)\n",
       "  )\n",
       ")"
      ]
     },
     "execution_count": 10,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "execution_count": 10
  },
  {
   "cell_type": "code",
   "source": [
    "total_params = sum(p.numel() for p in model.parameters() )\n",
    "print(f\"Total trainable parameters: {total_params}\")"
   ],
   "metadata": {
    "collapsed": false,
    "ExecuteTime": {
     "end_time": "2025-02-27T06:20:28.357831Z",
     "start_time": "2025-02-27T06:20:28.351813Z"
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Total trainable parameters: 23528522\n"
     ]
    }
   ],
   "execution_count": 11
  },
  {
   "cell_type": "code",
   "source": [
    "m = nn.AdaptiveAvgPool2d(output_size=(1, 1))\n",
    "input = torch.randn(1, 2048, 9, 9)\n",
    "output = m(input)\n",
    "output.shape"
   ],
   "metadata": {
    "collapsed": false,
    "ExecuteTime": {
     "end_time": "2025-02-27T06:20:28.369796Z",
     "start_time": "2025-02-27T06:20:28.359019Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "torch.Size([1, 2048, 1, 1])"
      ]
     },
     "execution_count": 12,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "execution_count": 12
  },
  {
   "cell_type": "code",
   "source": [
    "512*3*3*512"
   ],
   "metadata": {
    "collapsed": false,
    "ExecuteTime": {
     "end_time": "2025-02-27T06:20:28.377303Z",
     "start_time": "2025-02-27T06:20:28.372801Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "2359296"
      ]
     },
     "execution_count": 13,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "execution_count": 13
  },
  {
   "cell_type": "code",
   "source": [
    "512*1*1*2048"
   ],
   "metadata": {
    "collapsed": false,
    "ExecuteTime": {
     "end_time": "2025-02-27T06:20:28.382786Z",
     "start_time": "2025-02-27T06:20:28.378819Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "1048576"
      ]
     },
     "execution_count": 14,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "execution_count": 14
  },
  {
   "cell_type": "code",
   "source": [
    "512/16"
   ],
   "metadata": {
    "collapsed": false,
    "ExecuteTime": {
     "end_time": "2025-02-27T06:20:28.389616Z",
     "start_time": "2025-02-27T06:20:28.383789Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "32.0"
      ]
     },
     "execution_count": 15,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "execution_count": 15
  },
  {
   "attachments": {},
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## 训练\n",
    "\n",
    "pytorch的训练需要自行实现，包括\n",
    "1. 定义损失函数\n",
    "2. 定义优化器\n",
    "3. 定义训练步\n",
    "4. 训练"
   ]
  },
  {
   "cell_type": "code",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-02-27T06:20:28.476649Z",
     "start_time": "2025-02-27T06:20:28.390621Z"
    }
   },
   "source": [
    "from sklearn.metrics import accuracy_score\n",
    "\n",
    "@torch.no_grad()\n",
    "def evaluating(model, dataloader, loss_fct):\n",
    "    loss_list = []\n",
    "    pred_list = []\n",
    "    label_list = []\n",
    "    for datas, labels in dataloader:\n",
    "        datas = datas.to(device)\n",
    "        labels = labels.to(device)\n",
    "        # 前向计算\n",
    "        logits = model(datas)\n",
    "        loss = loss_fct(logits, labels)         # 验证集损失\n",
    "        loss_list.append(loss.item())\n",
    "        \n",
    "        preds = logits.argmax(axis=-1)    # 验证集预测\n",
    "        pred_list.extend(preds.cpu().numpy().tolist())\n",
    "        label_list.extend(labels.cpu().numpy().tolist())\n",
    "        \n",
    "    acc = accuracy_score(label_list, pred_list)\n",
    "    return np.mean(loss_list), acc\n"
   ],
   "outputs": [],
   "execution_count": 16
  },
  {
   "attachments": {},
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### TensorBoard 可视化\n",
    "\n",
    "\n",
    "训练过程中可以使用如下命令启动tensorboard服务。\n",
    "\n",
    "```shell\n",
    "tensorboard \\\n",
    "    --logdir=runs \\     # log 存放路径\n",
    "    --host 0.0.0.0 \\    # ip\n",
    "    --port 8848         # 端口\n",
    "```"
   ]
  },
  {
   "cell_type": "code",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-02-27T06:20:28.654614Z",
     "start_time": "2025-02-27T06:20:28.478157Z"
    }
   },
   "source": [
    "from torch.utils.tensorboard import SummaryWriter\n",
    "\n",
    "\n",
    "class TensorBoardCallback:\n",
    "    def __init__(self, log_dir, flush_secs=10):\n",
    "        \"\"\"\n",
    "        Args:\n",
    "            log_dir (str): dir to write log.\n",
    "            flush_secs (int, optional): write to dsk each flush_secs seconds. Defaults to 10.\n",
    "        \"\"\"\n",
    "        self.writer = SummaryWriter(log_dir=log_dir, flush_secs=flush_secs)\n",
    "\n",
    "    def draw_model(self, model, input_shape):\n",
    "        self.writer.add_graph(model, input_to_model=torch.randn(input_shape))\n",
    "        \n",
    "    def add_loss_scalars(self, step, loss, val_loss):\n",
    "        self.writer.add_scalars(\n",
    "            main_tag=\"training/loss\", \n",
    "            tag_scalar_dict={\"loss\": loss, \"val_loss\": val_loss},\n",
    "            global_step=step,\n",
    "            )\n",
    "        \n",
    "    def add_acc_scalars(self, step, acc, val_acc):\n",
    "        self.writer.add_scalars(\n",
    "            main_tag=\"training/accuracy\",\n",
    "            tag_scalar_dict={\"accuracy\": acc, \"val_accuracy\": val_acc},\n",
    "            global_step=step,\n",
    "        )\n",
    "        \n",
    "    def add_lr_scalars(self, step, learning_rate):\n",
    "        self.writer.add_scalars(\n",
    "            main_tag=\"training/learning_rate\",\n",
    "            tag_scalar_dict={\"learning_rate\": learning_rate},\n",
    "            global_step=step,\n",
    "            \n",
    "        )\n",
    "    \n",
    "    def __call__(self, step, **kwargs):\n",
    "        # add loss\n",
    "        loss = kwargs.pop(\"loss\", None)\n",
    "        val_loss = kwargs.pop(\"val_loss\", None)\n",
    "        if loss is not None and val_loss is not None:\n",
    "            self.add_loss_scalars(step, loss, val_loss)\n",
    "        # add acc\n",
    "        acc = kwargs.pop(\"acc\", None)\n",
    "        val_acc = kwargs.pop(\"val_acc\", None)\n",
    "        if acc is not None and val_acc is not None:\n",
    "            self.add_acc_scalars(step, acc, val_acc)\n",
    "        # add lr\n",
    "        learning_rate = kwargs.pop(\"lr\", None)\n",
    "        if learning_rate is not None:\n",
    "            self.add_lr_scalars(step, learning_rate)\n"
   ],
   "outputs": [
    {
     "ename": "ModuleNotFoundError",
     "evalue": "No module named 'tensorboard'",
     "output_type": "error",
     "traceback": [
      "\u001B[1;31m---------------------------------------------------------------------------\u001B[0m",
      "\u001B[1;31mModuleNotFoundError\u001B[0m                       Traceback (most recent call last)",
      "Cell \u001B[1;32mIn[17], line 1\u001B[0m\n\u001B[1;32m----> 1\u001B[0m \u001B[38;5;28;01mfrom\u001B[39;00m\u001B[38;5;250m \u001B[39m\u001B[38;5;21;01mtorch\u001B[39;00m\u001B[38;5;21;01m.\u001B[39;00m\u001B[38;5;21;01mutils\u001B[39;00m\u001B[38;5;21;01m.\u001B[39;00m\u001B[38;5;21;01mtensorboard\u001B[39;00m\u001B[38;5;250m \u001B[39m\u001B[38;5;28;01mimport\u001B[39;00m SummaryWriter\n\u001B[0;32m      4\u001B[0m \u001B[38;5;28;01mclass\u001B[39;00m\u001B[38;5;250m \u001B[39m\u001B[38;5;21;01mTensorBoardCallback\u001B[39;00m:\n\u001B[0;32m      5\u001B[0m     \u001B[38;5;28;01mdef\u001B[39;00m\u001B[38;5;250m \u001B[39m\u001B[38;5;21m__init__\u001B[39m(\u001B[38;5;28mself\u001B[39m, log_dir, flush_secs\u001B[38;5;241m=\u001B[39m\u001B[38;5;241m10\u001B[39m):\n",
      "File \u001B[1;32m~\\venv\\Lib\\site-packages\\torch\\utils\\tensorboard\\__init__.py:1\u001B[0m\n\u001B[1;32m----> 1\u001B[0m \u001B[38;5;28;01mimport\u001B[39;00m\u001B[38;5;250m \u001B[39m\u001B[38;5;21;01mtensorboard\u001B[39;00m\n\u001B[0;32m      2\u001B[0m \u001B[38;5;28;01mfrom\u001B[39;00m\u001B[38;5;250m \u001B[39m\u001B[38;5;21;01mtorch\u001B[39;00m\u001B[38;5;21;01m.\u001B[39;00m\u001B[38;5;21;01m_vendor\u001B[39;00m\u001B[38;5;21;01m.\u001B[39;00m\u001B[38;5;21;01mpackaging\u001B[39;00m\u001B[38;5;21;01m.\u001B[39;00m\u001B[38;5;21;01mversion\u001B[39;00m\u001B[38;5;250m \u001B[39m\u001B[38;5;28;01mimport\u001B[39;00m Version\n\u001B[0;32m      4\u001B[0m \u001B[38;5;28;01mif\u001B[39;00m \u001B[38;5;129;01mnot\u001B[39;00m \u001B[38;5;28mhasattr\u001B[39m(tensorboard, \u001B[38;5;124m\"\u001B[39m\u001B[38;5;124m__version__\u001B[39m\u001B[38;5;124m\"\u001B[39m) \u001B[38;5;129;01mor\u001B[39;00m Version(\n\u001B[0;32m      5\u001B[0m     tensorboard\u001B[38;5;241m.\u001B[39m__version__\n\u001B[0;32m      6\u001B[0m ) \u001B[38;5;241m<\u001B[39m Version(\u001B[38;5;124m\"\u001B[39m\u001B[38;5;124m1.15\u001B[39m\u001B[38;5;124m\"\u001B[39m):\n",
      "\u001B[1;31mModuleNotFoundError\u001B[0m: No module named 'tensorboard'"
     ]
    }
   ],
   "execution_count": 17
  },
  {
   "attachments": {},
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Save Best\n"
   ]
  },
  {
   "cell_type": "code",
   "metadata": {},
   "source": [
    "class SaveCheckpointsCallback:\n",
    "    def __init__(self, save_dir, save_step=5000, save_best_only=True):\n",
    "        \"\"\"\n",
    "        Save checkpoints each save_epoch epoch. \n",
    "        We save checkpoint by epoch in this implementation.\n",
    "        Usually, training scripts with pytorch evaluating model and save checkpoint by step.\n",
    "\n",
    "        Args:\n",
    "            save_dir (str): dir to save checkpoint\n",
    "            save_epoch (int, optional): the frequency to save checkpoint. Defaults to 1.\n",
    "            save_best_only (bool, optional): If True, only save the best model or save each model at every epoch.\n",
    "        \"\"\"\n",
    "        self.save_dir = save_dir\n",
    "        self.save_step = save_step\n",
    "        self.save_best_only = save_best_only\n",
    "        self.best_metrics = -1\n",
    "        \n",
    "        # mkdir\n",
    "        if not os.path.exists(self.save_dir):\n",
    "            os.mkdir(self.save_dir)\n",
    "        \n",
    "    def __call__(self, step, state_dict, metric=None):\n",
    "        if step % self.save_step > 0:\n",
    "            return\n",
    "        \n",
    "        if self.save_best_only:\n",
    "            assert metric is not None\n",
    "            if metric >= self.best_metrics:\n",
    "                # save checkpoints\n",
    "                torch.save(state_dict, os.path.join(self.save_dir, \"best.ckpt\"))\n",
    "                # update best metrics\n",
    "                self.best_metrics = metric\n",
    "        else:\n",
    "            torch.save(state_dict, os.path.join(self.save_dir, f\"{step}.ckpt\"))\n",
    "\n"
   ],
   "outputs": [],
   "execution_count": null
  },
  {
   "attachments": {},
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Early Stop"
   ]
  },
  {
   "cell_type": "code",
   "metadata": {},
   "source": [
    "class EarlyStopCallback:\n",
    "    def __init__(self, patience=5, min_delta=0.01):\n",
    "        \"\"\"\n",
    "\n",
    "        Args:\n",
    "            patience (int, optional): Number of epochs with no improvement after which training will be stopped.. Defaults to 5.\n",
    "            min_delta (float, optional): Minimum change in the monitored quantity to qualify as an improvement, i.e. an absolute \n",
    "                change of less than min_delta, will count as no improvement. Defaults to 0.01.\n",
    "        \"\"\"\n",
    "        self.patience = patience\n",
    "        self.min_delta = min_delta\n",
    "        self.best_metric = -1\n",
    "        self.counter = 0\n",
    "        \n",
    "    def __call__(self, metric):\n",
    "        if metric >= self.best_metric + self.min_delta:\n",
    "            # update best metric\n",
    "            self.best_metric = metric\n",
    "            # reset counter \n",
    "            self.counter = 0\n",
    "        else: \n",
    "            self.counter += 1\n",
    "            \n",
    "    @property\n",
    "    def early_stop(self):\n",
    "        return self.counter >= self.patience\n"
   ],
   "outputs": [],
   "execution_count": null
  },
  {
   "cell_type": "code",
   "metadata": {},
   "source": [
    "# 训练\n",
    "def training(\n",
    "    model, \n",
    "    train_loader, \n",
    "    val_loader, \n",
    "    epoch, \n",
    "    loss_fct, \n",
    "    optimizer, \n",
    "    tensorboard_callback=None,\n",
    "    save_ckpt_callback=None,\n",
    "    early_stop_callback=None,\n",
    "    eval_step=500,\n",
    "    ):\n",
    "    record_dict = {\n",
    "        \"train\": [],\n",
    "        \"val\": []\n",
    "    }\n",
    "    \n",
    "    global_step = 0\n",
    "    model.train()\n",
    "    with tqdm(total=epoch * len(train_loader)) as pbar:\n",
    "        for epoch_id in range(epoch):\n",
    "            # training\n",
    "            for datas, labels in train_loader:\n",
    "                datas = datas.to(device)\n",
    "                labels = labels.to(device)\n",
    "                # 梯度清空\n",
    "                optimizer.zero_grad()\n",
    "                # 模型前向计算\n",
    "                logits = model(datas)\n",
    "                # 计算损失\n",
    "                loss = loss_fct(logits, labels)\n",
    "                # 梯度回传\n",
    "                loss.backward()\n",
    "                # 调整优化器，包括学习率的变动等\n",
    "                optimizer.step()\n",
    "                preds = logits.argmax(axis=-1)\n",
    "            \n",
    "                acc = accuracy_score(labels.cpu().numpy(), preds.cpu().numpy())    \n",
    "                loss = loss.cpu().item()\n",
    "                # record\n",
    "                \n",
    "                record_dict[\"train\"].append({\n",
    "                    \"loss\": loss, \"acc\": acc, \"step\": global_step\n",
    "                })\n",
    "                \n",
    "                # evaluating\n",
    "                if global_step % eval_step == 0:\n",
    "                    model.eval()\n",
    "                    val_loss, val_acc = evaluating(model, val_loader, loss_fct)\n",
    "                    record_dict[\"val\"].append({\n",
    "                        \"loss\": val_loss, \"acc\": val_acc, \"step\": global_step\n",
    "                    })\n",
    "                    model.train()\n",
    "                    \n",
    "                    # 1. 使用 tensorboard 可视化\n",
    "                    if tensorboard_callback is not None:\n",
    "                        tensorboard_callback(\n",
    "                            global_step, \n",
    "                            loss=loss, val_loss=val_loss,\n",
    "                            acc=acc, val_acc=val_acc,\n",
    "                            lr=optimizer.param_groups[0][\"lr\"],\n",
    "                            )\n",
    "                \n",
    "                    # 2. 保存模型权重 save model checkpoint\n",
    "                    if save_ckpt_callback is not None:\n",
    "                        save_ckpt_callback(global_step, model.state_dict(), metric=val_acc)\n",
    "\n",
    "                    # 3. 早停 Early Stop\n",
    "                    if early_stop_callback is not None:\n",
    "                        early_stop_callback(val_acc)\n",
    "                        if early_stop_callback.early_stop:\n",
    "                            print(f\"Early stop at epoch {epoch_id} / global_step {global_step}\")\n",
    "                            return record_dict\n",
    "                    \n",
    "                # udate step\n",
    "                global_step += 1\n",
    "                pbar.update(1)\n",
    "                pbar.set_postfix({\"epoch\": epoch_id})\n",
    "        \n",
    "    return record_dict\n",
    "        \n",
    "\n",
    "epoch = 20\n",
    "\n",
    "model = ResNet50(num_classes=10)\n",
    "\n",
    "# 1. 定义损失函数 采用交叉熵损失\n",
    "loss_fct = nn.CrossEntropyLoss()\n",
    "# 2. 定义优化器 采用 sgd\n",
    "# Optimizers specified in the torch.optim package\n",
    "optimizer = torch.optim.SGD(model.parameters(), lr=0.01, momentum=0.0)\n",
    "\n",
    "# 1. tensorboard 可视化\n",
    "if not os.path.exists(\"runs\"):\n",
    "    os.mkdir(\"runs\")\n",
    "# tensorboard_callback = TensorBoardCallback(\"runs/monkeys-resnet50\")\n",
    "# tensorboard_callback.draw_model(model, [1, 3, img_h, img_w])\n",
    "# 2. save best\n",
    "if not os.path.exists(\"checkpoints\"):\n",
    "    os.makedirs(\"checkpoints\")\n",
    "save_ckpt_callback = SaveCheckpointsCallback(\"checkpoints/monkeys-resnet50\", save_step=len(train_loader), save_best_only=True)\n",
    "# 3. early stop\n",
    "early_stop_callback = EarlyStopCallback(patience=5)\n",
    "\n",
    "model = model.to(device)\n",
    "record = training(\n",
    "    model, \n",
    "    train_loader, \n",
    "    val_loader, \n",
    "    epoch, \n",
    "    loss_fct, \n",
    "    optimizer, \n",
    "    tensorboard_callback=None,\n",
    "    save_ckpt_callback=save_ckpt_callback,\n",
    "    early_stop_callback=early_stop_callback,\n",
    "    eval_step=len(train_loader)\n",
    "    )"
   ],
   "outputs": [],
   "execution_count": null
  },
  {
   "cell_type": "code",
   "source": [
    "# 画图\n",
    "\n",
    "import torch\n",
    "from torchviz import make_dot\n",
    "\n",
    "# Assuming your model is already defined and named 'model'\n",
    "# Construct a dummy input\n",
    "dummy_input = torch.randn(1, 3, 224, 224)  # Replace with your input shape\n",
    "\n",
    "# Forward pass to generate the computation graph\n",
    "output = model(dummy_input)\n",
    "\n",
    "# Visualize the model architecture\n",
    "dot = make_dot(output, params=dict(model.named_parameters()))\n",
    "dot.render(\"model_architecture\", format=\"png\")  # Save the visualization as an image\n"
   ],
   "metadata": {
    "collapsed": false
   },
   "outputs": [],
   "execution_count": null
  },
  {
   "cell_type": "code",
   "metadata": {},
   "source": [
    "#画线要注意的是损失是不一定在零到1之间的\n",
    "def plot_learning_curves(record_dict, sample_step=500):\n",
    "    # build DataFrame\n",
    "    train_df = pd.DataFrame(record_dict[\"train\"]).set_index(\"step\").iloc[::sample_step]\n",
    "    val_df = pd.DataFrame(record_dict[\"val\"]).set_index(\"step\")\n",
    "\n",
    "    # plot\n",
    "    fig_num = len(train_df.columns)\n",
    "    fig, axs = plt.subplots(1, fig_num, figsize=(5 * fig_num, 5))\n",
    "    for idx, item in enumerate(train_df.columns):    \n",
    "        axs[idx].plot(train_df.index, train_df[item], label=f\"train_{item}\")\n",
    "        axs[idx].plot(val_df.index, val_df[item], label=f\"val_{item}\")\n",
    "        axs[idx].grid()\n",
    "        axs[idx].legend()\n",
    "        # axs[idx].set_xticks(range(0, train_df.index[-1], 5000))\n",
    "        # axs[idx].set_xticklabels(map(lambda x: f\"{int(x/1000)}k\", range(0, train_df.index[-1], 5000)))\n",
    "        axs[idx].set_xlabel(\"step\")\n",
    "    \n",
    "    plt.show()\n",
    "\n",
    "plot_learning_curves(record, sample_step=10)  #横坐标是 steps"
   ],
   "outputs": [],
   "execution_count": null
  },
  {
   "attachments": {},
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# 评估"
   ]
  },
  {
   "cell_type": "code",
   "metadata": {},
   "source": [
    "# dataload for evaluating\n",
    "\n",
    "# load checkpoints\n",
    "model.load_state_dict(torch.load(\"checkpoints/monkeys-resnet50/best.ckpt\", map_location=\"cpu\"))\n",
    "\n",
    "model.eval()\n",
    "loss, acc = evaluating(model, val_loader, loss_fct)\n",
    "print(f\"loss:     {loss:.4f}\\naccuracy: {acc:.4f}\")"
   ],
   "outputs": [],
   "execution_count": null
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "pytorch",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.10.8"
  },
  "orig_nbformat": 4
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
