{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [],
   "source": [
    "import torch\n",
    "import torch.nn as nn\n",
    "from torchvision import models\n",
    "model=models.resnet34()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### resnet34"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {
    "scrolled": false
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "ResNet(\n",
       "  (conv1): Conv2d(3, 64, kernel_size=(7, 7), stride=(2, 2), padding=(3, 3), bias=False)\n",
       "  (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "  (relu): ReLU(inplace)\n",
       "  (maxpool): MaxPool2d(kernel_size=3, stride=2, padding=1, dilation=1, ceil_mode=False)\n",
       "  (layer1): Sequential(\n",
       "    (0): BasicBlock(\n",
       "      (conv1): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace)\n",
       "      (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "    )\n",
       "    (1): BasicBlock(\n",
       "      (conv1): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace)\n",
       "      (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "    )\n",
       "    (2): BasicBlock(\n",
       "      (conv1): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace)\n",
       "      (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "    )\n",
       "  )\n",
       "  (layer2): Sequential(\n",
       "    (0): BasicBlock(\n",
       "      (conv1): Conv2d(64, 128, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace)\n",
       "      (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (downsample): Sequential(\n",
       "        (0): Conv2d(64, 128, kernel_size=(1, 1), stride=(2, 2), bias=False)\n",
       "        (1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      )\n",
       "    )\n",
       "    (1): BasicBlock(\n",
       "      (conv1): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace)\n",
       "      (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "    )\n",
       "    (2): BasicBlock(\n",
       "      (conv1): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace)\n",
       "      (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "    )\n",
       "    (3): BasicBlock(\n",
       "      (conv1): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace)\n",
       "      (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "    )\n",
       "  )\n",
       "  (layer3): Sequential(\n",
       "    (0): BasicBlock(\n",
       "      (conv1): Conv2d(128, 256, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (downsample): Sequential(\n",
       "        (0): Conv2d(128, 256, kernel_size=(1, 1), stride=(2, 2), bias=False)\n",
       "        (1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      )\n",
       "    )\n",
       "    (1): BasicBlock(\n",
       "      (conv1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "    )\n",
       "    (2): BasicBlock(\n",
       "      (conv1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "    )\n",
       "    (3): BasicBlock(\n",
       "      (conv1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "    )\n",
       "    (4): BasicBlock(\n",
       "      (conv1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "    )\n",
       "    (5): BasicBlock(\n",
       "      (conv1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace)\n",
       "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "    )\n",
       "  )\n",
       "  (layer4): Sequential(\n",
       "    (0): BasicBlock(\n",
       "      (conv1): Conv2d(256, 512, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace)\n",
       "      (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (downsample): Sequential(\n",
       "        (0): Conv2d(256, 512, kernel_size=(1, 1), stride=(2, 2), bias=False)\n",
       "        (1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      )\n",
       "    )\n",
       "    (1): BasicBlock(\n",
       "      (conv1): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace)\n",
       "      (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "    )\n",
       "    (2): BasicBlock(\n",
       "      (conv1): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "      (relu): ReLU(inplace)\n",
       "      (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
       "      (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "    )\n",
       "  )\n",
       "  (avgpool): AdaptiveAvgPool2d(output_size=(1, 1))\n",
       "  (fc): Linear(in_features=512, out_features=1000, bias=True)\n",
       ")"
      ]
     },
     "execution_count": 2,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "model"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### FCN32s"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [],
   "source": [
    "class FCN32s(nn.Module):\n",
    "    def __init__(self,pretrained_net, n_class=10):\n",
    "        super().__init__()\n",
    "        self.pretrained_net=pretrained_net\n",
    "        self.n_class = n_class\n",
    "        self.relu    = nn.ReLU(inplace=True)\n",
    "        self.deconv1 = nn.ConvTranspose2d(512, 512, kernel_size=3, stride=2, padding=1, dilation=1, output_padding=1)\n",
    "        self.bn1     = nn.BatchNorm2d(512)\n",
    "        self.deconv2 = nn.ConvTranspose2d(512, 256, kernel_size=3, stride=2, padding=1, dilation=1, output_padding=1)\n",
    "        self.bn2     = nn.BatchNorm2d(256)\n",
    "        self.deconv3 = nn.ConvTranspose2d(256, 128, kernel_size=3, stride=2, padding=1, dilation=1, output_padding=1)\n",
    "        self.bn3     = nn.BatchNorm2d(128)\n",
    "        self.deconv4 = nn.ConvTranspose2d(128, 64, kernel_size=3, stride=2, padding=1, dilation=1, output_padding=1)\n",
    "        self.bn4     = nn.BatchNorm2d(64)\n",
    "        self.deconv5 = nn.ConvTranspose2d(64, 32, kernel_size=3, stride=2, padding=1, dilation=1, output_padding=1)\n",
    "        self.bn5     = nn.BatchNorm2d(32)\n",
    "        self.classifier = nn.Conv2d(32, n_class, kernel_size=1)\n",
    "\n",
    "    def forward(self, x):\n",
    "        x32 = nn.Sequential(*list(self.pretrained_net.children())[:-2])(x)  # size=(N, 512, x.H/32, x.W/32)\n",
    "        score = self.bn1(self.relu(self.deconv1(x32)))     # size=(N, 512, x.H/16, x.W/16)\n",
    "        score = self.bn2(self.relu(self.deconv2(score)))  # size=(N, 256, x.H/8, x.W/8)\n",
    "        score = self.bn3(self.relu(self.deconv3(score)))  # size=(N, 128, x.H/4, x.W/4)\n",
    "        score = self.bn4(self.relu(self.deconv4(score)))  # size=(N, 64, x.H/2, x.W/2)\n",
    "        score = self.bn5(self.relu(self.deconv5(score)))  # size=(N, 32, x.H, x.W)\n",
    "        score = self.classifier(score)                    # size=(N, n_class, x.H/1, x.W/1)\n",
    "        return score                                      # size=(N, n_class, x.H/1, x.W/1)\n",
    "    "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {
    "scrolled": false
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "----------------------------------------------------------------\n",
      "        Layer (type)               Output Shape         Param #\n",
      "================================================================\n",
      "            Conv2d-1         [-1, 64, 112, 112]           9,408\n",
      "       BatchNorm2d-2         [-1, 64, 112, 112]             128\n",
      "              ReLU-3         [-1, 64, 112, 112]               0\n",
      "         MaxPool2d-4           [-1, 64, 56, 56]               0\n",
      "            Conv2d-5           [-1, 64, 56, 56]          36,864\n",
      "       BatchNorm2d-6           [-1, 64, 56, 56]             128\n",
      "              ReLU-7           [-1, 64, 56, 56]               0\n",
      "            Conv2d-8           [-1, 64, 56, 56]          36,864\n",
      "       BatchNorm2d-9           [-1, 64, 56, 56]             128\n",
      "             ReLU-10           [-1, 64, 56, 56]               0\n",
      "       BasicBlock-11           [-1, 64, 56, 56]               0\n",
      "           Conv2d-12           [-1, 64, 56, 56]          36,864\n",
      "      BatchNorm2d-13           [-1, 64, 56, 56]             128\n",
      "             ReLU-14           [-1, 64, 56, 56]               0\n",
      "           Conv2d-15           [-1, 64, 56, 56]          36,864\n",
      "      BatchNorm2d-16           [-1, 64, 56, 56]             128\n",
      "             ReLU-17           [-1, 64, 56, 56]               0\n",
      "       BasicBlock-18           [-1, 64, 56, 56]               0\n",
      "           Conv2d-19           [-1, 64, 56, 56]          36,864\n",
      "      BatchNorm2d-20           [-1, 64, 56, 56]             128\n",
      "             ReLU-21           [-1, 64, 56, 56]               0\n",
      "           Conv2d-22           [-1, 64, 56, 56]          36,864\n",
      "      BatchNorm2d-23           [-1, 64, 56, 56]             128\n",
      "             ReLU-24           [-1, 64, 56, 56]               0\n",
      "       BasicBlock-25           [-1, 64, 56, 56]               0\n",
      "           Conv2d-26          [-1, 128, 28, 28]          73,728\n",
      "      BatchNorm2d-27          [-1, 128, 28, 28]             256\n",
      "             ReLU-28          [-1, 128, 28, 28]               0\n",
      "           Conv2d-29          [-1, 128, 28, 28]         147,456\n",
      "      BatchNorm2d-30          [-1, 128, 28, 28]             256\n",
      "           Conv2d-31          [-1, 128, 28, 28]           8,192\n",
      "      BatchNorm2d-32          [-1, 128, 28, 28]             256\n",
      "             ReLU-33          [-1, 128, 28, 28]               0\n",
      "       BasicBlock-34          [-1, 128, 28, 28]               0\n",
      "           Conv2d-35          [-1, 128, 28, 28]         147,456\n",
      "      BatchNorm2d-36          [-1, 128, 28, 28]             256\n",
      "             ReLU-37          [-1, 128, 28, 28]               0\n",
      "           Conv2d-38          [-1, 128, 28, 28]         147,456\n",
      "      BatchNorm2d-39          [-1, 128, 28, 28]             256\n",
      "             ReLU-40          [-1, 128, 28, 28]               0\n",
      "       BasicBlock-41          [-1, 128, 28, 28]               0\n",
      "           Conv2d-42          [-1, 128, 28, 28]         147,456\n",
      "      BatchNorm2d-43          [-1, 128, 28, 28]             256\n",
      "             ReLU-44          [-1, 128, 28, 28]               0\n",
      "           Conv2d-45          [-1, 128, 28, 28]         147,456\n",
      "      BatchNorm2d-46          [-1, 128, 28, 28]             256\n",
      "             ReLU-47          [-1, 128, 28, 28]               0\n",
      "       BasicBlock-48          [-1, 128, 28, 28]               0\n",
      "           Conv2d-49          [-1, 128, 28, 28]         147,456\n",
      "      BatchNorm2d-50          [-1, 128, 28, 28]             256\n",
      "             ReLU-51          [-1, 128, 28, 28]               0\n",
      "           Conv2d-52          [-1, 128, 28, 28]         147,456\n",
      "      BatchNorm2d-53          [-1, 128, 28, 28]             256\n",
      "             ReLU-54          [-1, 128, 28, 28]               0\n",
      "       BasicBlock-55          [-1, 128, 28, 28]               0\n",
      "           Conv2d-56          [-1, 256, 14, 14]         294,912\n",
      "      BatchNorm2d-57          [-1, 256, 14, 14]             512\n",
      "             ReLU-58          [-1, 256, 14, 14]               0\n",
      "           Conv2d-59          [-1, 256, 14, 14]         589,824\n",
      "      BatchNorm2d-60          [-1, 256, 14, 14]             512\n",
      "           Conv2d-61          [-1, 256, 14, 14]          32,768\n",
      "      BatchNorm2d-62          [-1, 256, 14, 14]             512\n",
      "             ReLU-63          [-1, 256, 14, 14]               0\n",
      "       BasicBlock-64          [-1, 256, 14, 14]               0\n",
      "           Conv2d-65          [-1, 256, 14, 14]         589,824\n",
      "      BatchNorm2d-66          [-1, 256, 14, 14]             512\n",
      "             ReLU-67          [-1, 256, 14, 14]               0\n",
      "           Conv2d-68          [-1, 256, 14, 14]         589,824\n",
      "      BatchNorm2d-69          [-1, 256, 14, 14]             512\n",
      "             ReLU-70          [-1, 256, 14, 14]               0\n",
      "       BasicBlock-71          [-1, 256, 14, 14]               0\n",
      "           Conv2d-72          [-1, 256, 14, 14]         589,824\n",
      "      BatchNorm2d-73          [-1, 256, 14, 14]             512\n",
      "             ReLU-74          [-1, 256, 14, 14]               0\n",
      "           Conv2d-75          [-1, 256, 14, 14]         589,824\n",
      "      BatchNorm2d-76          [-1, 256, 14, 14]             512\n",
      "             ReLU-77          [-1, 256, 14, 14]               0\n",
      "       BasicBlock-78          [-1, 256, 14, 14]               0\n",
      "           Conv2d-79          [-1, 256, 14, 14]         589,824\n",
      "      BatchNorm2d-80          [-1, 256, 14, 14]             512\n",
      "             ReLU-81          [-1, 256, 14, 14]               0\n",
      "           Conv2d-82          [-1, 256, 14, 14]         589,824\n",
      "      BatchNorm2d-83          [-1, 256, 14, 14]             512\n",
      "             ReLU-84          [-1, 256, 14, 14]               0\n",
      "       BasicBlock-85          [-1, 256, 14, 14]               0\n",
      "           Conv2d-86          [-1, 256, 14, 14]         589,824\n",
      "      BatchNorm2d-87          [-1, 256, 14, 14]             512\n",
      "             ReLU-88          [-1, 256, 14, 14]               0\n",
      "           Conv2d-89          [-1, 256, 14, 14]         589,824\n",
      "      BatchNorm2d-90          [-1, 256, 14, 14]             512\n",
      "             ReLU-91          [-1, 256, 14, 14]               0\n",
      "       BasicBlock-92          [-1, 256, 14, 14]               0\n",
      "           Conv2d-93          [-1, 256, 14, 14]         589,824\n",
      "      BatchNorm2d-94          [-1, 256, 14, 14]             512\n",
      "             ReLU-95          [-1, 256, 14, 14]               0\n",
      "           Conv2d-96          [-1, 256, 14, 14]         589,824\n",
      "      BatchNorm2d-97          [-1, 256, 14, 14]             512\n",
      "             ReLU-98          [-1, 256, 14, 14]               0\n",
      "       BasicBlock-99          [-1, 256, 14, 14]               0\n",
      "          Conv2d-100            [-1, 512, 7, 7]       1,179,648\n",
      "     BatchNorm2d-101            [-1, 512, 7, 7]           1,024\n",
      "            ReLU-102            [-1, 512, 7, 7]               0\n",
      "          Conv2d-103            [-1, 512, 7, 7]       2,359,296\n",
      "     BatchNorm2d-104            [-1, 512, 7, 7]           1,024\n",
      "          Conv2d-105            [-1, 512, 7, 7]         131,072\n",
      "     BatchNorm2d-106            [-1, 512, 7, 7]           1,024\n",
      "            ReLU-107            [-1, 512, 7, 7]               0\n",
      "      BasicBlock-108            [-1, 512, 7, 7]               0\n",
      "          Conv2d-109            [-1, 512, 7, 7]       2,359,296\n",
      "     BatchNorm2d-110            [-1, 512, 7, 7]           1,024\n",
      "            ReLU-111            [-1, 512, 7, 7]               0\n",
      "          Conv2d-112            [-1, 512, 7, 7]       2,359,296\n",
      "     BatchNorm2d-113            [-1, 512, 7, 7]           1,024\n",
      "            ReLU-114            [-1, 512, 7, 7]               0\n",
      "      BasicBlock-115            [-1, 512, 7, 7]               0\n",
      "          Conv2d-116            [-1, 512, 7, 7]       2,359,296\n",
      "     BatchNorm2d-117            [-1, 512, 7, 7]           1,024\n",
      "            ReLU-118            [-1, 512, 7, 7]               0\n",
      "          Conv2d-119            [-1, 512, 7, 7]       2,359,296\n",
      "     BatchNorm2d-120            [-1, 512, 7, 7]           1,024\n",
      "            ReLU-121            [-1, 512, 7, 7]               0\n",
      "      BasicBlock-122            [-1, 512, 7, 7]               0\n",
      " ConvTranspose2d-123          [-1, 512, 14, 14]       2,359,808\n",
      "            ReLU-124          [-1, 512, 14, 14]               0\n",
      "     BatchNorm2d-125          [-1, 512, 14, 14]           1,024\n",
      " ConvTranspose2d-126          [-1, 256, 28, 28]       1,179,904\n",
      "            ReLU-127          [-1, 256, 28, 28]               0\n",
      "     BatchNorm2d-128          [-1, 256, 28, 28]             512\n",
      " ConvTranspose2d-129          [-1, 128, 56, 56]         295,040\n",
      "            ReLU-130          [-1, 128, 56, 56]               0\n",
      "     BatchNorm2d-131          [-1, 128, 56, 56]             256\n",
      " ConvTranspose2d-132         [-1, 64, 112, 112]          73,792\n",
      "            ReLU-133         [-1, 64, 112, 112]               0\n",
      "     BatchNorm2d-134         [-1, 64, 112, 112]             128\n",
      " ConvTranspose2d-135         [-1, 32, 224, 224]          18,464\n",
      "            ReLU-136         [-1, 32, 224, 224]               0\n",
      "     BatchNorm2d-137         [-1, 32, 224, 224]              64\n",
      "          Conv2d-138         [-1, 10, 224, 224]             330\n",
      "================================================================\n",
      "Total params: 25,213,994\n",
      "Trainable params: 25,213,994\n",
      "Non-trainable params: 0\n",
      "----------------------------------------------------------------\n",
      "Input size (MB): 0.57\n",
      "Forward/backward pass size (MB): 171.31\n",
      "Params size (MB): 96.18\n",
      "Estimated Total Size (MB): 268.07\n",
      "----------------------------------------------------------------\n"
     ]
    }
   ],
   "source": [
    "fcn32=FCN32s(model)\n",
    "from torchsummary import summary\n",
    "summary(fcn32, (3,224,224))"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### FCN16"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [],
   "source": [
    "class FCN16s(nn.Module):\n",
    "    def __init__(self, pretrained_net, n_class=10):\n",
    "        super().__init__()\n",
    "        \n",
    "        self.stage1 = nn.Sequential(*list(pretrained_net.children())[:-3]) # 第一段\n",
    "        self.stage2 = list(pretrained_net.children())[-3] # 第二段\n",
    "        \n",
    "        self.n_class = n_class\n",
    "        self.pretrained_net = pretrained_net\n",
    "        self.relu    = nn.ReLU(inplace=True)\n",
    "        \n",
    "        self.deconv1 = nn.ConvTranspose2d(512, 256, kernel_size=3, stride=2, padding=1, dilation=1, output_padding=1)\n",
    "        self.bn1     = nn.BatchNorm2d(256)\n",
    "        \n",
    "        self.deconv2 = nn.ConvTranspose2d(256, 256, kernel_size=3, stride=2, padding=1, dilation=1, output_padding=1)\n",
    "        self.bn2     = nn.BatchNorm2d(256)\n",
    "        \n",
    "        self.deconv3 = nn.ConvTranspose2d(256, 128, kernel_size=3, stride=2, padding=1, dilation=1, output_padding=1)\n",
    "        self.bn3     = nn.BatchNorm2d(128)\n",
    "        self.deconv4 = nn.ConvTranspose2d(128, 64, kernel_size=3, stride=2, padding=1, dilation=1, output_padding=1)\n",
    "        self.bn4     = nn.BatchNorm2d(64)\n",
    "        self.deconv5 = nn.ConvTranspose2d(64, 32, kernel_size=3, stride=2, padding=1, dilation=1, output_padding=1)\n",
    "        self.bn5     = nn.BatchNorm2d(32)\n",
    "        self.classifier = nn.Conv2d(32, n_class, kernel_size=1)\n",
    "\n",
    "    def forward(self, x):\n",
    "        x16 = self.stage1(x)   # size=(N, 256, x.H/16, x.W/16)\n",
    "        x32 = self.stage2(x16)   # size=(N, 512, x.H/32, x.W/32)\n",
    "\n",
    "        score = self.relu(self.deconv1(x32))               # size=(N, 256, x.H/16, x.W/16)\n",
    "        score = self.bn1(score + x16)                      # element-wise add, size=(N, 256, x.H/16, x.W/16)\n",
    "        \n",
    "        score = self.bn2(self.relu(self.deconv2(score)))  # size=(N, 256, x.H/8, x.W/8)\n",
    "        score = self.bn3(self.relu(self.deconv3(score)))  # size=(N, 128, x.H/4, x.W/4)\n",
    "        score = self.bn4(self.relu(self.deconv4(score)))  # size=(N, 64, x.H/2, x.W/2)\n",
    "        score = self.bn5(self.relu(self.deconv5(score)))  # size=(N, 32, x.H, x.W)\n",
    "        score = self.classifier(score)                    # size=(N, n_class, x.H/1, x.W/1)\n",
    "\n",
    "        return score"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {
    "scrolled": false
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "----------------------------------------------------------------\n",
      "        Layer (type)               Output Shape         Param #\n",
      "================================================================\n",
      "            Conv2d-1         [-1, 64, 112, 112]           9,408\n",
      "            Conv2d-2         [-1, 64, 112, 112]           9,408\n",
      "       BatchNorm2d-3         [-1, 64, 112, 112]             128\n",
      "       BatchNorm2d-4         [-1, 64, 112, 112]             128\n",
      "              ReLU-5         [-1, 64, 112, 112]               0\n",
      "              ReLU-6         [-1, 64, 112, 112]               0\n",
      "         MaxPool2d-7           [-1, 64, 56, 56]               0\n",
      "         MaxPool2d-8           [-1, 64, 56, 56]               0\n",
      "            Conv2d-9           [-1, 64, 56, 56]          36,864\n",
      "           Conv2d-10           [-1, 64, 56, 56]          36,864\n",
      "      BatchNorm2d-11           [-1, 64, 56, 56]             128\n",
      "      BatchNorm2d-12           [-1, 64, 56, 56]             128\n",
      "             ReLU-13           [-1, 64, 56, 56]               0\n",
      "             ReLU-14           [-1, 64, 56, 56]               0\n",
      "           Conv2d-15           [-1, 64, 56, 56]          36,864\n",
      "           Conv2d-16           [-1, 64, 56, 56]          36,864\n",
      "      BatchNorm2d-17           [-1, 64, 56, 56]             128\n",
      "      BatchNorm2d-18           [-1, 64, 56, 56]             128\n",
      "             ReLU-19           [-1, 64, 56, 56]               0\n",
      "             ReLU-20           [-1, 64, 56, 56]               0\n",
      "       BasicBlock-21           [-1, 64, 56, 56]               0\n",
      "       BasicBlock-22           [-1, 64, 56, 56]               0\n",
      "           Conv2d-23           [-1, 64, 56, 56]          36,864\n",
      "           Conv2d-24           [-1, 64, 56, 56]          36,864\n",
      "      BatchNorm2d-25           [-1, 64, 56, 56]             128\n",
      "      BatchNorm2d-26           [-1, 64, 56, 56]             128\n",
      "             ReLU-27           [-1, 64, 56, 56]               0\n",
      "             ReLU-28           [-1, 64, 56, 56]               0\n",
      "           Conv2d-29           [-1, 64, 56, 56]          36,864\n",
      "           Conv2d-30           [-1, 64, 56, 56]          36,864\n",
      "      BatchNorm2d-31           [-1, 64, 56, 56]             128\n",
      "      BatchNorm2d-32           [-1, 64, 56, 56]             128\n",
      "             ReLU-33           [-1, 64, 56, 56]               0\n",
      "             ReLU-34           [-1, 64, 56, 56]               0\n",
      "       BasicBlock-35           [-1, 64, 56, 56]               0\n",
      "       BasicBlock-36           [-1, 64, 56, 56]               0\n",
      "           Conv2d-37           [-1, 64, 56, 56]          36,864\n",
      "           Conv2d-38           [-1, 64, 56, 56]          36,864\n",
      "      BatchNorm2d-39           [-1, 64, 56, 56]             128\n",
      "      BatchNorm2d-40           [-1, 64, 56, 56]             128\n",
      "             ReLU-41           [-1, 64, 56, 56]               0\n",
      "             ReLU-42           [-1, 64, 56, 56]               0\n",
      "           Conv2d-43           [-1, 64, 56, 56]          36,864\n",
      "           Conv2d-44           [-1, 64, 56, 56]          36,864\n",
      "      BatchNorm2d-45           [-1, 64, 56, 56]             128\n",
      "      BatchNorm2d-46           [-1, 64, 56, 56]             128\n",
      "             ReLU-47           [-1, 64, 56, 56]               0\n",
      "             ReLU-48           [-1, 64, 56, 56]               0\n",
      "       BasicBlock-49           [-1, 64, 56, 56]               0\n",
      "       BasicBlock-50           [-1, 64, 56, 56]               0\n",
      "           Conv2d-51          [-1, 128, 28, 28]          73,728\n",
      "           Conv2d-52          [-1, 128, 28, 28]          73,728\n",
      "      BatchNorm2d-53          [-1, 128, 28, 28]             256\n",
      "      BatchNorm2d-54          [-1, 128, 28, 28]             256\n",
      "             ReLU-55          [-1, 128, 28, 28]               0\n",
      "             ReLU-56          [-1, 128, 28, 28]               0\n",
      "           Conv2d-57          [-1, 128, 28, 28]         147,456\n",
      "           Conv2d-58          [-1, 128, 28, 28]         147,456\n",
      "      BatchNorm2d-59          [-1, 128, 28, 28]             256\n",
      "      BatchNorm2d-60          [-1, 128, 28, 28]             256\n",
      "           Conv2d-61          [-1, 128, 28, 28]           8,192\n",
      "           Conv2d-62          [-1, 128, 28, 28]           8,192\n",
      "      BatchNorm2d-63          [-1, 128, 28, 28]             256\n",
      "      BatchNorm2d-64          [-1, 128, 28, 28]             256\n",
      "             ReLU-65          [-1, 128, 28, 28]               0\n",
      "             ReLU-66          [-1, 128, 28, 28]               0\n",
      "       BasicBlock-67          [-1, 128, 28, 28]               0\n",
      "       BasicBlock-68          [-1, 128, 28, 28]               0\n",
      "           Conv2d-69          [-1, 128, 28, 28]         147,456\n",
      "           Conv2d-70          [-1, 128, 28, 28]         147,456\n",
      "      BatchNorm2d-71          [-1, 128, 28, 28]             256\n",
      "      BatchNorm2d-72          [-1, 128, 28, 28]             256\n",
      "             ReLU-73          [-1, 128, 28, 28]               0\n",
      "             ReLU-74          [-1, 128, 28, 28]               0\n",
      "           Conv2d-75          [-1, 128, 28, 28]         147,456\n",
      "           Conv2d-76          [-1, 128, 28, 28]         147,456\n",
      "      BatchNorm2d-77          [-1, 128, 28, 28]             256\n",
      "      BatchNorm2d-78          [-1, 128, 28, 28]             256\n",
      "             ReLU-79          [-1, 128, 28, 28]               0\n",
      "             ReLU-80          [-1, 128, 28, 28]               0\n",
      "       BasicBlock-81          [-1, 128, 28, 28]               0\n",
      "       BasicBlock-82          [-1, 128, 28, 28]               0\n",
      "           Conv2d-83          [-1, 128, 28, 28]         147,456\n",
      "           Conv2d-84          [-1, 128, 28, 28]         147,456\n",
      "      BatchNorm2d-85          [-1, 128, 28, 28]             256\n",
      "      BatchNorm2d-86          [-1, 128, 28, 28]             256\n",
      "             ReLU-87          [-1, 128, 28, 28]               0\n",
      "             ReLU-88          [-1, 128, 28, 28]               0\n",
      "           Conv2d-89          [-1, 128, 28, 28]         147,456\n",
      "           Conv2d-90          [-1, 128, 28, 28]         147,456\n",
      "      BatchNorm2d-91          [-1, 128, 28, 28]             256\n",
      "      BatchNorm2d-92          [-1, 128, 28, 28]             256\n",
      "             ReLU-93          [-1, 128, 28, 28]               0\n",
      "             ReLU-94          [-1, 128, 28, 28]               0\n",
      "       BasicBlock-95          [-1, 128, 28, 28]               0\n",
      "       BasicBlock-96          [-1, 128, 28, 28]               0\n",
      "           Conv2d-97          [-1, 128, 28, 28]         147,456\n",
      "           Conv2d-98          [-1, 128, 28, 28]         147,456\n",
      "      BatchNorm2d-99          [-1, 128, 28, 28]             256\n",
      "     BatchNorm2d-100          [-1, 128, 28, 28]             256\n",
      "            ReLU-101          [-1, 128, 28, 28]               0\n",
      "            ReLU-102          [-1, 128, 28, 28]               0\n",
      "          Conv2d-103          [-1, 128, 28, 28]         147,456\n",
      "          Conv2d-104          [-1, 128, 28, 28]         147,456\n",
      "     BatchNorm2d-105          [-1, 128, 28, 28]             256\n",
      "     BatchNorm2d-106          [-1, 128, 28, 28]             256\n",
      "            ReLU-107          [-1, 128, 28, 28]               0\n",
      "            ReLU-108          [-1, 128, 28, 28]               0\n",
      "      BasicBlock-109          [-1, 128, 28, 28]               0\n",
      "      BasicBlock-110          [-1, 128, 28, 28]               0\n",
      "          Conv2d-111          [-1, 256, 14, 14]         294,912\n",
      "          Conv2d-112          [-1, 256, 14, 14]         294,912\n",
      "     BatchNorm2d-113          [-1, 256, 14, 14]             512\n",
      "     BatchNorm2d-114          [-1, 256, 14, 14]             512\n",
      "            ReLU-115          [-1, 256, 14, 14]               0\n",
      "            ReLU-116          [-1, 256, 14, 14]               0\n",
      "          Conv2d-117          [-1, 256, 14, 14]         589,824\n",
      "          Conv2d-118          [-1, 256, 14, 14]         589,824\n",
      "     BatchNorm2d-119          [-1, 256, 14, 14]             512\n",
      "     BatchNorm2d-120          [-1, 256, 14, 14]             512\n",
      "          Conv2d-121          [-1, 256, 14, 14]          32,768\n",
      "          Conv2d-122          [-1, 256, 14, 14]          32,768\n",
      "     BatchNorm2d-123          [-1, 256, 14, 14]             512\n",
      "     BatchNorm2d-124          [-1, 256, 14, 14]             512\n",
      "            ReLU-125          [-1, 256, 14, 14]               0\n",
      "            ReLU-126          [-1, 256, 14, 14]               0\n",
      "      BasicBlock-127          [-1, 256, 14, 14]               0\n",
      "      BasicBlock-128          [-1, 256, 14, 14]               0\n",
      "          Conv2d-129          [-1, 256, 14, 14]         589,824\n",
      "          Conv2d-130          [-1, 256, 14, 14]         589,824\n",
      "     BatchNorm2d-131          [-1, 256, 14, 14]             512\n",
      "     BatchNorm2d-132          [-1, 256, 14, 14]             512\n",
      "            ReLU-133          [-1, 256, 14, 14]               0\n",
      "            ReLU-134          [-1, 256, 14, 14]               0\n",
      "          Conv2d-135          [-1, 256, 14, 14]         589,824\n",
      "          Conv2d-136          [-1, 256, 14, 14]         589,824\n",
      "     BatchNorm2d-137          [-1, 256, 14, 14]             512\n",
      "     BatchNorm2d-138          [-1, 256, 14, 14]             512\n",
      "            ReLU-139          [-1, 256, 14, 14]               0\n",
      "            ReLU-140          [-1, 256, 14, 14]               0\n",
      "      BasicBlock-141          [-1, 256, 14, 14]               0\n",
      "      BasicBlock-142          [-1, 256, 14, 14]               0\n",
      "          Conv2d-143          [-1, 256, 14, 14]         589,824\n",
      "          Conv2d-144          [-1, 256, 14, 14]         589,824\n",
      "     BatchNorm2d-145          [-1, 256, 14, 14]             512\n",
      "     BatchNorm2d-146          [-1, 256, 14, 14]             512\n",
      "            ReLU-147          [-1, 256, 14, 14]               0\n",
      "            ReLU-148          [-1, 256, 14, 14]               0\n",
      "          Conv2d-149          [-1, 256, 14, 14]         589,824\n",
      "          Conv2d-150          [-1, 256, 14, 14]         589,824\n",
      "     BatchNorm2d-151          [-1, 256, 14, 14]             512\n",
      "     BatchNorm2d-152          [-1, 256, 14, 14]             512\n",
      "            ReLU-153          [-1, 256, 14, 14]               0\n",
      "            ReLU-154          [-1, 256, 14, 14]               0\n",
      "      BasicBlock-155          [-1, 256, 14, 14]               0\n",
      "      BasicBlock-156          [-1, 256, 14, 14]               0\n",
      "          Conv2d-157          [-1, 256, 14, 14]         589,824\n",
      "          Conv2d-158          [-1, 256, 14, 14]         589,824\n",
      "     BatchNorm2d-159          [-1, 256, 14, 14]             512\n",
      "     BatchNorm2d-160          [-1, 256, 14, 14]             512\n",
      "            ReLU-161          [-1, 256, 14, 14]               0\n",
      "            ReLU-162          [-1, 256, 14, 14]               0\n",
      "          Conv2d-163          [-1, 256, 14, 14]         589,824\n",
      "          Conv2d-164          [-1, 256, 14, 14]         589,824\n",
      "     BatchNorm2d-165          [-1, 256, 14, 14]             512\n",
      "     BatchNorm2d-166          [-1, 256, 14, 14]             512\n",
      "            ReLU-167          [-1, 256, 14, 14]               0\n",
      "            ReLU-168          [-1, 256, 14, 14]               0\n",
      "      BasicBlock-169          [-1, 256, 14, 14]               0\n",
      "      BasicBlock-170          [-1, 256, 14, 14]               0\n",
      "          Conv2d-171          [-1, 256, 14, 14]         589,824\n",
      "          Conv2d-172          [-1, 256, 14, 14]         589,824\n",
      "     BatchNorm2d-173          [-1, 256, 14, 14]             512\n",
      "     BatchNorm2d-174          [-1, 256, 14, 14]             512\n",
      "            ReLU-175          [-1, 256, 14, 14]               0\n",
      "            ReLU-176          [-1, 256, 14, 14]               0\n",
      "          Conv2d-177          [-1, 256, 14, 14]         589,824\n",
      "          Conv2d-178          [-1, 256, 14, 14]         589,824\n",
      "     BatchNorm2d-179          [-1, 256, 14, 14]             512\n",
      "     BatchNorm2d-180          [-1, 256, 14, 14]             512\n",
      "            ReLU-181          [-1, 256, 14, 14]               0\n",
      "            ReLU-182          [-1, 256, 14, 14]               0\n",
      "      BasicBlock-183          [-1, 256, 14, 14]               0\n",
      "      BasicBlock-184          [-1, 256, 14, 14]               0\n",
      "          Conv2d-185          [-1, 256, 14, 14]         589,824\n",
      "          Conv2d-186          [-1, 256, 14, 14]         589,824\n",
      "     BatchNorm2d-187          [-1, 256, 14, 14]             512\n",
      "     BatchNorm2d-188          [-1, 256, 14, 14]             512\n",
      "            ReLU-189          [-1, 256, 14, 14]               0\n",
      "            ReLU-190          [-1, 256, 14, 14]               0\n",
      "          Conv2d-191          [-1, 256, 14, 14]         589,824\n",
      "          Conv2d-192          [-1, 256, 14, 14]         589,824\n",
      "     BatchNorm2d-193          [-1, 256, 14, 14]             512\n",
      "     BatchNorm2d-194          [-1, 256, 14, 14]             512\n",
      "            ReLU-195          [-1, 256, 14, 14]               0\n",
      "            ReLU-196          [-1, 256, 14, 14]               0\n",
      "      BasicBlock-197          [-1, 256, 14, 14]               0\n",
      "      BasicBlock-198          [-1, 256, 14, 14]               0\n",
      "          Conv2d-199            [-1, 512, 7, 7]       1,179,648\n",
      "          Conv2d-200            [-1, 512, 7, 7]       1,179,648\n",
      "     BatchNorm2d-201            [-1, 512, 7, 7]           1,024\n",
      "     BatchNorm2d-202            [-1, 512, 7, 7]           1,024\n",
      "            ReLU-203            [-1, 512, 7, 7]               0\n",
      "            ReLU-204            [-1, 512, 7, 7]               0\n",
      "          Conv2d-205            [-1, 512, 7, 7]       2,359,296\n",
      "          Conv2d-206            [-1, 512, 7, 7]       2,359,296\n",
      "     BatchNorm2d-207            [-1, 512, 7, 7]           1,024\n",
      "     BatchNorm2d-208            [-1, 512, 7, 7]           1,024\n",
      "          Conv2d-209            [-1, 512, 7, 7]         131,072\n",
      "          Conv2d-210            [-1, 512, 7, 7]         131,072\n",
      "     BatchNorm2d-211            [-1, 512, 7, 7]           1,024\n",
      "     BatchNorm2d-212            [-1, 512, 7, 7]           1,024\n",
      "            ReLU-213            [-1, 512, 7, 7]               0\n",
      "            ReLU-214            [-1, 512, 7, 7]               0\n",
      "      BasicBlock-215            [-1, 512, 7, 7]               0\n",
      "      BasicBlock-216            [-1, 512, 7, 7]               0\n",
      "          Conv2d-217            [-1, 512, 7, 7]       2,359,296\n",
      "          Conv2d-218            [-1, 512, 7, 7]       2,359,296\n",
      "     BatchNorm2d-219            [-1, 512, 7, 7]           1,024\n",
      "     BatchNorm2d-220            [-1, 512, 7, 7]           1,024\n",
      "            ReLU-221            [-1, 512, 7, 7]               0\n",
      "            ReLU-222            [-1, 512, 7, 7]               0\n",
      "          Conv2d-223            [-1, 512, 7, 7]       2,359,296\n",
      "          Conv2d-224            [-1, 512, 7, 7]       2,359,296\n",
      "     BatchNorm2d-225            [-1, 512, 7, 7]           1,024\n",
      "     BatchNorm2d-226            [-1, 512, 7, 7]           1,024\n",
      "            ReLU-227            [-1, 512, 7, 7]               0\n",
      "            ReLU-228            [-1, 512, 7, 7]               0\n",
      "      BasicBlock-229            [-1, 512, 7, 7]               0\n",
      "      BasicBlock-230            [-1, 512, 7, 7]               0\n",
      "          Conv2d-231            [-1, 512, 7, 7]       2,359,296\n",
      "          Conv2d-232            [-1, 512, 7, 7]       2,359,296\n",
      "     BatchNorm2d-233            [-1, 512, 7, 7]           1,024\n",
      "     BatchNorm2d-234            [-1, 512, 7, 7]           1,024\n",
      "            ReLU-235            [-1, 512, 7, 7]               0\n",
      "            ReLU-236            [-1, 512, 7, 7]               0\n",
      "          Conv2d-237            [-1, 512, 7, 7]       2,359,296\n",
      "          Conv2d-238            [-1, 512, 7, 7]       2,359,296\n",
      "     BatchNorm2d-239            [-1, 512, 7, 7]           1,024\n",
      "     BatchNorm2d-240            [-1, 512, 7, 7]           1,024\n",
      "            ReLU-241            [-1, 512, 7, 7]               0\n",
      "            ReLU-242            [-1, 512, 7, 7]               0\n",
      "      BasicBlock-243            [-1, 512, 7, 7]               0\n",
      "      BasicBlock-244            [-1, 512, 7, 7]               0\n",
      " ConvTranspose2d-245          [-1, 256, 14, 14]       1,179,904\n",
      "            ReLU-246          [-1, 256, 14, 14]               0\n",
      "     BatchNorm2d-247          [-1, 256, 14, 14]             512\n",
      " ConvTranspose2d-248          [-1, 256, 28, 28]         590,080\n",
      "            ReLU-249          [-1, 256, 28, 28]               0\n",
      "     BatchNorm2d-250          [-1, 256, 28, 28]             512\n",
      " ConvTranspose2d-251          [-1, 128, 56, 56]         295,040\n",
      "            ReLU-252          [-1, 128, 56, 56]               0\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "     BatchNorm2d-253          [-1, 128, 56, 56]             256\n",
      " ConvTranspose2d-254         [-1, 64, 112, 112]          73,792\n",
      "            ReLU-255         [-1, 64, 112, 112]               0\n",
      "     BatchNorm2d-256         [-1, 64, 112, 112]             128\n",
      " ConvTranspose2d-257         [-1, 32, 224, 224]          18,464\n",
      "            ReLU-258         [-1, 32, 224, 224]               0\n",
      "     BatchNorm2d-259         [-1, 32, 224, 224]              64\n",
      "          Conv2d-260         [-1, 10, 224, 224]             330\n",
      "================================================================\n",
      "Total params: 44,728,426\n",
      "Trainable params: 44,728,426\n",
      "Non-trainable params: 0\n",
      "----------------------------------------------------------------\n",
      "Input size (MB): 0.57\n",
      "Forward/backward pass size (MB): 266.44\n",
      "Params size (MB): 170.63\n",
      "Estimated Total Size (MB): 437.64\n",
      "----------------------------------------------------------------\n"
     ]
    }
   ],
   "source": [
    "fcn16=FCN16s(model)\n",
    "from torchsummary import summary\n",
    "summary(fcn16, (3,224,224))"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### FCN8s"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {},
   "outputs": [],
   "source": [
    "class FCN8s(nn.Module):\n",
    "    def __init__(self, pretrained_net, n_class=10):\n",
    "        super().__init__()\n",
    "        self.n_class = n_class\n",
    "        self.pretrained_net = pretrained_net\n",
    "        self.relu    = nn.ReLU(inplace=True)\n",
    "        \n",
    "        self.stage1 = nn.Sequential(*list(pretrained_net.children())[:-4]) # 512  channel\n",
    "        self.stage2 = list(model.children())[-4] # 第二段\n",
    "        self.stage3=list(model.children())[-3]\n",
    "        \n",
    "        self.deconv1 = nn.ConvTranspose2d(512, 256, kernel_size=3, stride=2, padding=1, dilation=1, output_padding=1)\n",
    "        self.bn1     = nn.BatchNorm2d(256)\n",
    "        self.deconv2 = nn.ConvTranspose2d(256, 128, kernel_size=3, stride=2, padding=1, dilation=1, output_padding=1)\n",
    "        self.bn2     = nn.BatchNorm2d(128)\n",
    "        self.deconv3 = nn.ConvTranspose2d(128, 128, kernel_size=3, stride=2, padding=1, dilation=1, output_padding=1)\n",
    "        self.bn3     = nn.BatchNorm2d(128)\n",
    "        self.deconv4 = nn.ConvTranspose2d(128, 64, kernel_size=3, stride=2, padding=1, dilation=1, output_padding=1)\n",
    "        self.bn4     = nn.BatchNorm2d(64)\n",
    "        self.deconv5 = nn.ConvTranspose2d(64, 32, kernel_size=3, stride=2, padding=1, dilation=1, output_padding=1)\n",
    "        self.bn5     = nn.BatchNorm2d(32)\n",
    "        self.classifier = nn.Conv2d(32, n_class, kernel_size=1)\n",
    "\n",
    "    def forward(self, x):\n",
    "        x8=self.stage1(x)\n",
    "        x16=self.stage2(x8)\n",
    "        x32=self.stage3(x16)\n",
    "\n",
    "        score = self.relu(self.deconv1(x32))               # size=(N, 256, x.H/16, x.W/16)\n",
    "        score = self.bn1(score + x16)                      # element-wise add, size=(N, 256, x.H/16, x.W/16)\n",
    "        \n",
    "        score = self.relu(self.deconv2(score))            # size=(N, 128, x.H/8, x.W/8)\n",
    "        score = self.bn2(score + x8)                      # element-wise add, size=(N, 128, x.H/8, x.W/8)\n",
    "        \n",
    "        score = self.bn3(self.relu(self.deconv3(score)))  # size=(N, 128, x.H/4, x.W/4)\n",
    "        score = self.bn4(self.relu(self.deconv4(score)))  # size=(N, 64, x.H/2, x.W/2)\n",
    "        score = self.bn5(self.relu(self.deconv5(score)))  # size=(N, 32, x.H, x.W)\n",
    "        score = self.classifier(score)                    # size=(N, n_class, x.H/1, x.W/1)\n",
    "\n",
    "        return score  # size=(N, n_class, x.H/1, x.W/1)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {
    "scrolled": false
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "----------------------------------------------------------------\n",
      "        Layer (type)               Output Shape         Param #\n",
      "================================================================\n",
      "            Conv2d-1         [-1, 64, 112, 112]           9,408\n",
      "            Conv2d-2         [-1, 64, 112, 112]           9,408\n",
      "       BatchNorm2d-3         [-1, 64, 112, 112]             128\n",
      "       BatchNorm2d-4         [-1, 64, 112, 112]             128\n",
      "              ReLU-5         [-1, 64, 112, 112]               0\n",
      "              ReLU-6         [-1, 64, 112, 112]               0\n",
      "         MaxPool2d-7           [-1, 64, 56, 56]               0\n",
      "         MaxPool2d-8           [-1, 64, 56, 56]               0\n",
      "            Conv2d-9           [-1, 64, 56, 56]          36,864\n",
      "           Conv2d-10           [-1, 64, 56, 56]          36,864\n",
      "      BatchNorm2d-11           [-1, 64, 56, 56]             128\n",
      "      BatchNorm2d-12           [-1, 64, 56, 56]             128\n",
      "             ReLU-13           [-1, 64, 56, 56]               0\n",
      "             ReLU-14           [-1, 64, 56, 56]               0\n",
      "           Conv2d-15           [-1, 64, 56, 56]          36,864\n",
      "           Conv2d-16           [-1, 64, 56, 56]          36,864\n",
      "      BatchNorm2d-17           [-1, 64, 56, 56]             128\n",
      "      BatchNorm2d-18           [-1, 64, 56, 56]             128\n",
      "             ReLU-19           [-1, 64, 56, 56]               0\n",
      "             ReLU-20           [-1, 64, 56, 56]               0\n",
      "       BasicBlock-21           [-1, 64, 56, 56]               0\n",
      "       BasicBlock-22           [-1, 64, 56, 56]               0\n",
      "           Conv2d-23           [-1, 64, 56, 56]          36,864\n",
      "           Conv2d-24           [-1, 64, 56, 56]          36,864\n",
      "      BatchNorm2d-25           [-1, 64, 56, 56]             128\n",
      "      BatchNorm2d-26           [-1, 64, 56, 56]             128\n",
      "             ReLU-27           [-1, 64, 56, 56]               0\n",
      "             ReLU-28           [-1, 64, 56, 56]               0\n",
      "           Conv2d-29           [-1, 64, 56, 56]          36,864\n",
      "           Conv2d-30           [-1, 64, 56, 56]          36,864\n",
      "      BatchNorm2d-31           [-1, 64, 56, 56]             128\n",
      "      BatchNorm2d-32           [-1, 64, 56, 56]             128\n",
      "             ReLU-33           [-1, 64, 56, 56]               0\n",
      "             ReLU-34           [-1, 64, 56, 56]               0\n",
      "       BasicBlock-35           [-1, 64, 56, 56]               0\n",
      "       BasicBlock-36           [-1, 64, 56, 56]               0\n",
      "           Conv2d-37           [-1, 64, 56, 56]          36,864\n",
      "           Conv2d-38           [-1, 64, 56, 56]          36,864\n",
      "      BatchNorm2d-39           [-1, 64, 56, 56]             128\n",
      "      BatchNorm2d-40           [-1, 64, 56, 56]             128\n",
      "             ReLU-41           [-1, 64, 56, 56]               0\n",
      "             ReLU-42           [-1, 64, 56, 56]               0\n",
      "           Conv2d-43           [-1, 64, 56, 56]          36,864\n",
      "           Conv2d-44           [-1, 64, 56, 56]          36,864\n",
      "      BatchNorm2d-45           [-1, 64, 56, 56]             128\n",
      "      BatchNorm2d-46           [-1, 64, 56, 56]             128\n",
      "             ReLU-47           [-1, 64, 56, 56]               0\n",
      "             ReLU-48           [-1, 64, 56, 56]               0\n",
      "       BasicBlock-49           [-1, 64, 56, 56]               0\n",
      "       BasicBlock-50           [-1, 64, 56, 56]               0\n",
      "           Conv2d-51          [-1, 128, 28, 28]          73,728\n",
      "           Conv2d-52          [-1, 128, 28, 28]          73,728\n",
      "      BatchNorm2d-53          [-1, 128, 28, 28]             256\n",
      "      BatchNorm2d-54          [-1, 128, 28, 28]             256\n",
      "             ReLU-55          [-1, 128, 28, 28]               0\n",
      "             ReLU-56          [-1, 128, 28, 28]               0\n",
      "           Conv2d-57          [-1, 128, 28, 28]         147,456\n",
      "           Conv2d-58          [-1, 128, 28, 28]         147,456\n",
      "      BatchNorm2d-59          [-1, 128, 28, 28]             256\n",
      "      BatchNorm2d-60          [-1, 128, 28, 28]             256\n",
      "           Conv2d-61          [-1, 128, 28, 28]           8,192\n",
      "           Conv2d-62          [-1, 128, 28, 28]           8,192\n",
      "      BatchNorm2d-63          [-1, 128, 28, 28]             256\n",
      "      BatchNorm2d-64          [-1, 128, 28, 28]             256\n",
      "             ReLU-65          [-1, 128, 28, 28]               0\n",
      "             ReLU-66          [-1, 128, 28, 28]               0\n",
      "       BasicBlock-67          [-1, 128, 28, 28]               0\n",
      "       BasicBlock-68          [-1, 128, 28, 28]               0\n",
      "           Conv2d-69          [-1, 128, 28, 28]         147,456\n",
      "           Conv2d-70          [-1, 128, 28, 28]         147,456\n",
      "      BatchNorm2d-71          [-1, 128, 28, 28]             256\n",
      "      BatchNorm2d-72          [-1, 128, 28, 28]             256\n",
      "             ReLU-73          [-1, 128, 28, 28]               0\n",
      "             ReLU-74          [-1, 128, 28, 28]               0\n",
      "           Conv2d-75          [-1, 128, 28, 28]         147,456\n",
      "           Conv2d-76          [-1, 128, 28, 28]         147,456\n",
      "      BatchNorm2d-77          [-1, 128, 28, 28]             256\n",
      "      BatchNorm2d-78          [-1, 128, 28, 28]             256\n",
      "             ReLU-79          [-1, 128, 28, 28]               0\n",
      "             ReLU-80          [-1, 128, 28, 28]               0\n",
      "       BasicBlock-81          [-1, 128, 28, 28]               0\n",
      "       BasicBlock-82          [-1, 128, 28, 28]               0\n",
      "           Conv2d-83          [-1, 128, 28, 28]         147,456\n",
      "           Conv2d-84          [-1, 128, 28, 28]         147,456\n",
      "      BatchNorm2d-85          [-1, 128, 28, 28]             256\n",
      "      BatchNorm2d-86          [-1, 128, 28, 28]             256\n",
      "             ReLU-87          [-1, 128, 28, 28]               0\n",
      "             ReLU-88          [-1, 128, 28, 28]               0\n",
      "           Conv2d-89          [-1, 128, 28, 28]         147,456\n",
      "           Conv2d-90          [-1, 128, 28, 28]         147,456\n",
      "      BatchNorm2d-91          [-1, 128, 28, 28]             256\n",
      "      BatchNorm2d-92          [-1, 128, 28, 28]             256\n",
      "             ReLU-93          [-1, 128, 28, 28]               0\n",
      "             ReLU-94          [-1, 128, 28, 28]               0\n",
      "       BasicBlock-95          [-1, 128, 28, 28]               0\n",
      "       BasicBlock-96          [-1, 128, 28, 28]               0\n",
      "           Conv2d-97          [-1, 128, 28, 28]         147,456\n",
      "           Conv2d-98          [-1, 128, 28, 28]         147,456\n",
      "      BatchNorm2d-99          [-1, 128, 28, 28]             256\n",
      "     BatchNorm2d-100          [-1, 128, 28, 28]             256\n",
      "            ReLU-101          [-1, 128, 28, 28]               0\n",
      "            ReLU-102          [-1, 128, 28, 28]               0\n",
      "          Conv2d-103          [-1, 128, 28, 28]         147,456\n",
      "          Conv2d-104          [-1, 128, 28, 28]         147,456\n",
      "     BatchNorm2d-105          [-1, 128, 28, 28]             256\n",
      "     BatchNorm2d-106          [-1, 128, 28, 28]             256\n",
      "            ReLU-107          [-1, 128, 28, 28]               0\n",
      "            ReLU-108          [-1, 128, 28, 28]               0\n",
      "      BasicBlock-109          [-1, 128, 28, 28]               0\n",
      "      BasicBlock-110          [-1, 128, 28, 28]               0\n",
      "          Conv2d-111          [-1, 256, 14, 14]         294,912\n",
      "          Conv2d-112          [-1, 256, 14, 14]         294,912\n",
      "     BatchNorm2d-113          [-1, 256, 14, 14]             512\n",
      "     BatchNorm2d-114          [-1, 256, 14, 14]             512\n",
      "            ReLU-115          [-1, 256, 14, 14]               0\n",
      "            ReLU-116          [-1, 256, 14, 14]               0\n",
      "          Conv2d-117          [-1, 256, 14, 14]         589,824\n",
      "          Conv2d-118          [-1, 256, 14, 14]         589,824\n",
      "     BatchNorm2d-119          [-1, 256, 14, 14]             512\n",
      "     BatchNorm2d-120          [-1, 256, 14, 14]             512\n",
      "          Conv2d-121          [-1, 256, 14, 14]          32,768\n",
      "          Conv2d-122          [-1, 256, 14, 14]          32,768\n",
      "     BatchNorm2d-123          [-1, 256, 14, 14]             512\n",
      "     BatchNorm2d-124          [-1, 256, 14, 14]             512\n",
      "            ReLU-125          [-1, 256, 14, 14]               0\n",
      "            ReLU-126          [-1, 256, 14, 14]               0\n",
      "      BasicBlock-127          [-1, 256, 14, 14]               0\n",
      "      BasicBlock-128          [-1, 256, 14, 14]               0\n",
      "          Conv2d-129          [-1, 256, 14, 14]         589,824\n",
      "          Conv2d-130          [-1, 256, 14, 14]         589,824\n",
      "     BatchNorm2d-131          [-1, 256, 14, 14]             512\n",
      "     BatchNorm2d-132          [-1, 256, 14, 14]             512\n",
      "            ReLU-133          [-1, 256, 14, 14]               0\n",
      "            ReLU-134          [-1, 256, 14, 14]               0\n",
      "          Conv2d-135          [-1, 256, 14, 14]         589,824\n",
      "          Conv2d-136          [-1, 256, 14, 14]         589,824\n",
      "     BatchNorm2d-137          [-1, 256, 14, 14]             512\n",
      "     BatchNorm2d-138          [-1, 256, 14, 14]             512\n",
      "            ReLU-139          [-1, 256, 14, 14]               0\n",
      "            ReLU-140          [-1, 256, 14, 14]               0\n",
      "      BasicBlock-141          [-1, 256, 14, 14]               0\n",
      "      BasicBlock-142          [-1, 256, 14, 14]               0\n",
      "          Conv2d-143          [-1, 256, 14, 14]         589,824\n",
      "          Conv2d-144          [-1, 256, 14, 14]         589,824\n",
      "     BatchNorm2d-145          [-1, 256, 14, 14]             512\n",
      "     BatchNorm2d-146          [-1, 256, 14, 14]             512\n",
      "            ReLU-147          [-1, 256, 14, 14]               0\n",
      "            ReLU-148          [-1, 256, 14, 14]               0\n",
      "          Conv2d-149          [-1, 256, 14, 14]         589,824\n",
      "          Conv2d-150          [-1, 256, 14, 14]         589,824\n",
      "     BatchNorm2d-151          [-1, 256, 14, 14]             512\n",
      "     BatchNorm2d-152          [-1, 256, 14, 14]             512\n",
      "            ReLU-153          [-1, 256, 14, 14]               0\n",
      "            ReLU-154          [-1, 256, 14, 14]               0\n",
      "      BasicBlock-155          [-1, 256, 14, 14]               0\n",
      "      BasicBlock-156          [-1, 256, 14, 14]               0\n",
      "          Conv2d-157          [-1, 256, 14, 14]         589,824\n",
      "          Conv2d-158          [-1, 256, 14, 14]         589,824\n",
      "     BatchNorm2d-159          [-1, 256, 14, 14]             512\n",
      "     BatchNorm2d-160          [-1, 256, 14, 14]             512\n",
      "            ReLU-161          [-1, 256, 14, 14]               0\n",
      "            ReLU-162          [-1, 256, 14, 14]               0\n",
      "          Conv2d-163          [-1, 256, 14, 14]         589,824\n",
      "          Conv2d-164          [-1, 256, 14, 14]         589,824\n",
      "     BatchNorm2d-165          [-1, 256, 14, 14]             512\n",
      "     BatchNorm2d-166          [-1, 256, 14, 14]             512\n",
      "            ReLU-167          [-1, 256, 14, 14]               0\n",
      "            ReLU-168          [-1, 256, 14, 14]               0\n",
      "      BasicBlock-169          [-1, 256, 14, 14]               0\n",
      "      BasicBlock-170          [-1, 256, 14, 14]               0\n",
      "          Conv2d-171          [-1, 256, 14, 14]         589,824\n",
      "          Conv2d-172          [-1, 256, 14, 14]         589,824\n",
      "     BatchNorm2d-173          [-1, 256, 14, 14]             512\n",
      "     BatchNorm2d-174          [-1, 256, 14, 14]             512\n",
      "            ReLU-175          [-1, 256, 14, 14]               0\n",
      "            ReLU-176          [-1, 256, 14, 14]               0\n",
      "          Conv2d-177          [-1, 256, 14, 14]         589,824\n",
      "          Conv2d-178          [-1, 256, 14, 14]         589,824\n",
      "     BatchNorm2d-179          [-1, 256, 14, 14]             512\n",
      "     BatchNorm2d-180          [-1, 256, 14, 14]             512\n",
      "            ReLU-181          [-1, 256, 14, 14]               0\n",
      "            ReLU-182          [-1, 256, 14, 14]               0\n",
      "      BasicBlock-183          [-1, 256, 14, 14]               0\n",
      "      BasicBlock-184          [-1, 256, 14, 14]               0\n",
      "          Conv2d-185          [-1, 256, 14, 14]         589,824\n",
      "          Conv2d-186          [-1, 256, 14, 14]         589,824\n",
      "     BatchNorm2d-187          [-1, 256, 14, 14]             512\n",
      "     BatchNorm2d-188          [-1, 256, 14, 14]             512\n",
      "            ReLU-189          [-1, 256, 14, 14]               0\n",
      "            ReLU-190          [-1, 256, 14, 14]               0\n",
      "          Conv2d-191          [-1, 256, 14, 14]         589,824\n",
      "          Conv2d-192          [-1, 256, 14, 14]         589,824\n",
      "     BatchNorm2d-193          [-1, 256, 14, 14]             512\n",
      "     BatchNorm2d-194          [-1, 256, 14, 14]             512\n",
      "            ReLU-195          [-1, 256, 14, 14]               0\n",
      "            ReLU-196          [-1, 256, 14, 14]               0\n",
      "      BasicBlock-197          [-1, 256, 14, 14]               0\n",
      "      BasicBlock-198          [-1, 256, 14, 14]               0\n",
      "          Conv2d-199            [-1, 512, 7, 7]       1,179,648\n",
      "          Conv2d-200            [-1, 512, 7, 7]       1,179,648\n",
      "     BatchNorm2d-201            [-1, 512, 7, 7]           1,024\n",
      "     BatchNorm2d-202            [-1, 512, 7, 7]           1,024\n",
      "            ReLU-203            [-1, 512, 7, 7]               0\n",
      "            ReLU-204            [-1, 512, 7, 7]               0\n",
      "          Conv2d-205            [-1, 512, 7, 7]       2,359,296\n",
      "          Conv2d-206            [-1, 512, 7, 7]       2,359,296\n",
      "     BatchNorm2d-207            [-1, 512, 7, 7]           1,024\n",
      "     BatchNorm2d-208            [-1, 512, 7, 7]           1,024\n",
      "          Conv2d-209            [-1, 512, 7, 7]         131,072\n",
      "          Conv2d-210            [-1, 512, 7, 7]         131,072\n",
      "     BatchNorm2d-211            [-1, 512, 7, 7]           1,024\n",
      "     BatchNorm2d-212            [-1, 512, 7, 7]           1,024\n",
      "            ReLU-213            [-1, 512, 7, 7]               0\n",
      "            ReLU-214            [-1, 512, 7, 7]               0\n",
      "      BasicBlock-215            [-1, 512, 7, 7]               0\n",
      "      BasicBlock-216            [-1, 512, 7, 7]               0\n",
      "          Conv2d-217            [-1, 512, 7, 7]       2,359,296\n",
      "          Conv2d-218            [-1, 512, 7, 7]       2,359,296\n",
      "     BatchNorm2d-219            [-1, 512, 7, 7]           1,024\n",
      "     BatchNorm2d-220            [-1, 512, 7, 7]           1,024\n",
      "            ReLU-221            [-1, 512, 7, 7]               0\n",
      "            ReLU-222            [-1, 512, 7, 7]               0\n",
      "          Conv2d-223            [-1, 512, 7, 7]       2,359,296\n",
      "          Conv2d-224            [-1, 512, 7, 7]       2,359,296\n",
      "     BatchNorm2d-225            [-1, 512, 7, 7]           1,024\n",
      "     BatchNorm2d-226            [-1, 512, 7, 7]           1,024\n",
      "            ReLU-227            [-1, 512, 7, 7]               0\n",
      "            ReLU-228            [-1, 512, 7, 7]               0\n",
      "      BasicBlock-229            [-1, 512, 7, 7]               0\n",
      "      BasicBlock-230            [-1, 512, 7, 7]               0\n",
      "          Conv2d-231            [-1, 512, 7, 7]       2,359,296\n",
      "          Conv2d-232            [-1, 512, 7, 7]       2,359,296\n",
      "     BatchNorm2d-233            [-1, 512, 7, 7]           1,024\n",
      "     BatchNorm2d-234            [-1, 512, 7, 7]           1,024\n",
      "            ReLU-235            [-1, 512, 7, 7]               0\n",
      "            ReLU-236            [-1, 512, 7, 7]               0\n",
      "          Conv2d-237            [-1, 512, 7, 7]       2,359,296\n",
      "          Conv2d-238            [-1, 512, 7, 7]       2,359,296\n",
      "     BatchNorm2d-239            [-1, 512, 7, 7]           1,024\n",
      "     BatchNorm2d-240            [-1, 512, 7, 7]           1,024\n",
      "            ReLU-241            [-1, 512, 7, 7]               0\n",
      "            ReLU-242            [-1, 512, 7, 7]               0\n",
      "      BasicBlock-243            [-1, 512, 7, 7]               0\n",
      "      BasicBlock-244            [-1, 512, 7, 7]               0\n",
      " ConvTranspose2d-245          [-1, 256, 14, 14]       1,179,904\n",
      "            ReLU-246          [-1, 256, 14, 14]               0\n",
      "     BatchNorm2d-247          [-1, 256, 14, 14]             512\n",
      " ConvTranspose2d-248          [-1, 128, 28, 28]         295,040\n",
      "            ReLU-249          [-1, 128, 28, 28]               0\n",
      "     BatchNorm2d-250          [-1, 128, 28, 28]             256\n",
      " ConvTranspose2d-251          [-1, 128, 56, 56]         147,584\n",
      "            ReLU-252          [-1, 128, 56, 56]               0\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "     BatchNorm2d-253          [-1, 128, 56, 56]             256\n",
      " ConvTranspose2d-254         [-1, 64, 112, 112]          73,792\n",
      "            ReLU-255         [-1, 64, 112, 112]               0\n",
      "     BatchNorm2d-256         [-1, 64, 112, 112]             128\n",
      " ConvTranspose2d-257         [-1, 32, 224, 224]          18,464\n",
      "            ReLU-258         [-1, 32, 224, 224]               0\n",
      "     BatchNorm2d-259         [-1, 32, 224, 224]              64\n",
      "          Conv2d-260         [-1, 10, 224, 224]             330\n",
      "================================================================\n",
      "Total params: 44,285,674\n",
      "Trainable params: 44,285,674\n",
      "Non-trainable params: 0\n",
      "----------------------------------------------------------------\n",
      "Input size (MB): 0.57\n",
      "Forward/backward pass size (MB): 264.14\n",
      "Params size (MB): 168.94\n",
      "Estimated Total Size (MB): 433.65\n",
      "----------------------------------------------------------------\n"
     ]
    }
   ],
   "source": [
    "fcn8=FCN8s(model)\n",
    "from torchsummary import summary\n",
    "summary(fcn8, (3,224,224))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.7"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
