{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 19,
   "metadata": {},
   "outputs": [],
   "source": [
    "import torch\n",
    "import torch.nn as nn\n",
    "import torch.nn.functional as F\n",
    "from torchsummary import summary"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "论文提出的几种不同深度的网络从大的结构上讲都分为五组，每组之内的卷积层参数都是一样的，构造一个通用的类，减少很多重复代码。"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 25,
   "metadata": {},
   "outputs": [],
   "source": [
    "class VGG(nn.Module):\n",
    "    \"\"\"\n",
    "    VGG builder\n",
    "    \"\"\"\n",
    "    def __init__(self,arch:object,num_classes=1000) -> object:\n",
    "        super(VGG,self).__init__()\n",
    "        self.in_channels=3\n",
    "        self.conv3_64=self.__make_layer(64,arch[0])\n",
    "        self.conv3_128=self.__make_layer(128,arch[1])\n",
    "        self.conv3_256=self.__make_layer(256,arch[2])\n",
    "        self.conv3_512a=self.__make_layer(512,arch[3])\n",
    "        self.conv3_512b=self.__make_layer(512,arch[4])\n",
    "        self.fc1=nn.Linear(7*7*512,4096)\n",
    "        self.bn1=nn.BatchNorm1d(4096)\n",
    "        self.bn2=nn.BatchNorm1d(4096)\n",
    "        self.fc2=nn.Linear(4096,4096)\n",
    "        self.fc3=nn.Linear(4096,num_classes)\n",
    "        \n",
    "    def __make_layer(self,channels,num):\n",
    "        layers=[]\n",
    "        for i in range(num):\n",
    "            layers.append(nn.Conv2d(self.in_channels,channels,3,stride=1,padding=1,\n",
    "bias=False))\n",
    "            layers.append(nn.BatchNorm2d(channels))\n",
    "            layers.append(nn.ReLU())\n",
    "            self.in_channels=channels\n",
    "        return nn.Sequential(*layers)\n",
    "    \n",
    "    def forward(self,x):\n",
    "        out=self.conv3_64(x)\n",
    "        out=F.max_pool2d(out,2)\n",
    "        out=self.conv3_128(out)\n",
    "        out=F.max_pool2d(out,2)\n",
    "        out=self.conv3_256(out)\n",
    "        out=F.max_pool2d(out,2)\n",
    "        out=self.conv3_512a(out)\n",
    "        out=F.max_pool2d(out,2)\n",
    "        out=self.conv3_512b(out)\n",
    "        out=F.max_pool2d(out,2)\n",
    "        out=out.view(out.size(0),-1)\n",
    "        out=self.fc1(out)\n",
    "        out=self.bn1(out)\n",
    "        out=F.relu(out)\n",
    "        out=self.fc2(out)\n",
    "        out=self.bn2(out)\n",
    "        out=F.relu(out)\n",
    "        return F.softmax(self.fc3(out))\n",
    "        "
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "网络的深入通过一个数组控制，数组的元素没每个卷积层组内卷积层的数量"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 26,
   "metadata": {},
   "outputs": [],
   "source": [
    "def VGG_11():\n",
    "    return VGG([1,1,2,2,2],num_classes=1000)\n",
    "\n",
    "def VGG_13():\n",
    "    return VGG([1,1,2,2,2],num_classes=1000)\n",
    "\n",
    "def VGG_16():\n",
    "    return VGG([2,2,3,3,3],num_classes=1000)\n",
    "\n",
    "def VGG_19():\n",
    "    return VGG([2,2,4,4,4],num_classes=1000)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "测试网络"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 27,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "----------------------------------------------------------------\n",
      "        Layer (type)               Output Shape         Param #\n",
      "================================================================\n",
      "            Conv2d-1         [-1, 64, 224, 224]           1,728\n",
      "       BatchNorm2d-2         [-1, 64, 224, 224]             128\n",
      "              ReLU-3         [-1, 64, 224, 224]               0\n",
      "            Conv2d-4         [-1, 64, 224, 224]          36,864\n",
      "       BatchNorm2d-5         [-1, 64, 224, 224]             128\n",
      "              ReLU-6         [-1, 64, 224, 224]               0\n",
      "            Conv2d-7        [-1, 128, 112, 112]          73,728\n",
      "       BatchNorm2d-8        [-1, 128, 112, 112]             256\n",
      "              ReLU-9        [-1, 128, 112, 112]               0\n",
      "           Conv2d-10        [-1, 128, 112, 112]         147,456\n",
      "      BatchNorm2d-11        [-1, 128, 112, 112]             256\n",
      "             ReLU-12        [-1, 128, 112, 112]               0\n",
      "           Conv2d-13          [-1, 256, 56, 56]         294,912\n",
      "      BatchNorm2d-14          [-1, 256, 56, 56]             512\n",
      "             ReLU-15          [-1, 256, 56, 56]               0\n",
      "           Conv2d-16          [-1, 256, 56, 56]         589,824\n",
      "      BatchNorm2d-17          [-1, 256, 56, 56]             512\n",
      "             ReLU-18          [-1, 256, 56, 56]               0\n",
      "           Conv2d-19          [-1, 256, 56, 56]         589,824\n",
      "      BatchNorm2d-20          [-1, 256, 56, 56]             512\n",
      "             ReLU-21          [-1, 256, 56, 56]               0\n",
      "           Conv2d-22          [-1, 256, 56, 56]         589,824\n",
      "      BatchNorm2d-23          [-1, 256, 56, 56]             512\n",
      "             ReLU-24          [-1, 256, 56, 56]               0\n",
      "           Conv2d-25          [-1, 512, 28, 28]       1,179,648\n",
      "      BatchNorm2d-26          [-1, 512, 28, 28]           1,024\n",
      "             ReLU-27          [-1, 512, 28, 28]               0\n",
      "           Conv2d-28          [-1, 512, 28, 28]       2,359,296\n",
      "      BatchNorm2d-29          [-1, 512, 28, 28]           1,024\n",
      "             ReLU-30          [-1, 512, 28, 28]               0\n",
      "           Conv2d-31          [-1, 512, 28, 28]       2,359,296\n",
      "      BatchNorm2d-32          [-1, 512, 28, 28]           1,024\n",
      "             ReLU-33          [-1, 512, 28, 28]               0\n",
      "           Conv2d-34          [-1, 512, 28, 28]       2,359,296\n",
      "      BatchNorm2d-35          [-1, 512, 28, 28]           1,024\n",
      "             ReLU-36          [-1, 512, 28, 28]               0\n",
      "           Conv2d-37          [-1, 512, 14, 14]       2,359,296\n",
      "      BatchNorm2d-38          [-1, 512, 14, 14]           1,024\n",
      "             ReLU-39          [-1, 512, 14, 14]               0\n",
      "           Conv2d-40          [-1, 512, 14, 14]       2,359,296\n",
      "      BatchNorm2d-41          [-1, 512, 14, 14]           1,024\n",
      "             ReLU-42          [-1, 512, 14, 14]               0\n",
      "           Conv2d-43          [-1, 512, 14, 14]       2,359,296\n",
      "      BatchNorm2d-44          [-1, 512, 14, 14]           1,024\n",
      "             ReLU-45          [-1, 512, 14, 14]               0\n",
      "           Conv2d-46          [-1, 512, 14, 14]       2,359,296\n",
      "      BatchNorm2d-47          [-1, 512, 14, 14]           1,024\n",
      "             ReLU-48          [-1, 512, 14, 14]               0\n",
      "           Linear-49                 [-1, 4096]     102,764,544\n",
      "      BatchNorm1d-50                 [-1, 4096]           8,192\n",
      "           Linear-51                 [-1, 4096]      16,781,312\n",
      "      BatchNorm1d-52                 [-1, 4096]           8,192\n",
      "           Linear-53                 [-1, 1000]       4,097,000\n",
      "================================================================\n",
      "Total params: 143,689,128\n",
      "Trainable params: 143,689,128\n",
      "Non-trainable params: 0\n",
      "----------------------------------------------------------------\n",
      "Input size (MB): 0.57\n",
      "Forward/backward pass size (MB): 340.07\n",
      "Params size (MB): 548.13\n",
      "Estimated Total Size (MB): 888.77\n",
      "----------------------------------------------------------------\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "C:\\Users\\Administrator\\AppData\\Local\\Temp\\ipykernel_11120\\765094004.py:47: UserWarning: Implicit dimension choice for softmax has been deprecated. Change the call to include dim=X as an argument.\n",
      "  return F.softmax(self.fc3(out))\n"
     ]
    }
   ],
   "source": [
    "# net=VGG_11()\n",
    "# net=VGG_13()\n",
    "# net=VGG_16()\n",
    "net=VGG_19()\n",
    "device=torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\")\n",
    "net.to(device)\n",
    "summary(net,(3,224,224))"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "base",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.9.13"
  },
  "orig_nbformat": 4
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
