{
 "cells": [
  {
   "cell_type": "code",
   "id": "initial_id",
   "metadata": {
    "collapsed": true,
    "ExecuteTime": {
     "end_time": "2024-12-02T15:27:33.038027Z",
     "start_time": "2024-12-02T15:27:27.863747Z"
    }
   },
   "source": [
    "import torch\n",
    "import torch.nn as nn\n",
    "import torch.nn.functional as F\n",
    "from torchvision import datasets, transforms\n",
    "from torch.utils.data import DataLoader"
   ],
   "outputs": [],
   "execution_count": 1
  },
  {
   "metadata": {
    "ExecuteTime": {
     "end_time": "2024-12-02T15:27:55.590469Z",
     "start_time": "2024-12-02T15:27:55.569172Z"
    }
   },
   "cell_type": "code",
   "source": [
    "data_dir=\"C:/Users/Lenovo/Desktop/深度/实验三数据集/车辆分类数据集\"\n",
    "transform = transforms.Compose([\n",
    "    transforms.Resize((64, 64)),\n",
    "    transforms.ToTensor(),\n",
    "    transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))\n",
    "])\n",
    "\n",
    "dataset = datasets.ImageFolder(data_dir, transform=transform)\n",
    "\n",
    "dataloader = DataLoader(dataset, batch_size=32, shuffle=True,num_workers=4)\n"
   ],
   "id": "a9f6a60731a50ca9",
   "outputs": [],
   "execution_count": 2
  },
  {
   "metadata": {
    "ExecuteTime": {
     "end_time": "2024-12-02T15:27:56.230066Z",
     "start_time": "2024-12-02T15:27:56.218123Z"
    }
   },
   "cell_type": "code",
   "source": [
    "class ResidualBlock(nn.Module):\n",
    "    def __init__(self, in_channels, out_channels, Stride=1):\n",
    "        super(ResidualBlock, self).__init__()\n",
    "        self.shortcut = nn.Sequential()\n",
    "        self.bn = nn.BatchNorm2d(out_channels)\n",
    "        if Stride!=1 or in_channels != out_channels:\n",
    "            self.shortcut = nn.Sequential(\n",
    "                nn.Conv2d(in_channels, out_channels, kernel_size=1, stride=Stride, bias=False),\n",
    "                nn.BatchNorm2d(out_channels)\n",
    "            )\n",
    " \n",
    "    def forward(self, x):\n",
    "        out = self.shortcut(x)\n",
    "        out = self.bn(out)\n",
    "        out = F.relu(out)\n",
    "        return out"
   ],
   "id": "ef54779e38267427",
   "outputs": [],
   "execution_count": 3
  },
  {
   "metadata": {
    "ExecuteTime": {
     "end_time": "2024-12-02T15:32:33.397203Z",
     "start_time": "2024-12-02T15:32:32.855344Z"
    }
   },
   "cell_type": "code",
   "source": [
    "class Net(nn.Module):\n",
    "    def __init__(self):\n",
    "        super(Net, self).__init__()\n",
    "        self.conv1 = nn.Conv2d(3, 64, kernel_size=3, stride=1, padding=1)\n",
    "        self.bn1 = nn.BatchNorm2d(64)\n",
    "        self.jump1=ResidualBlock(64, 64)\n",
    "        self.conv2 = nn.Conv2d(64, 64, kernel_size=3, stride=1, padding=1)\n",
    "        self.bn2 = nn.BatchNorm2d(64)\n",
    "        self.conv3 = nn.Conv2d(64, 64, kernel_size=3, stride=1, padding=1)\n",
    "        self.bn3 = nn.BatchNorm2d(64)\n",
    "        self.conv4 = nn.Conv2d(64, 64, kernel_size=3, stride=1, padding=1)\n",
    "        self.bn4 = nn.BatchNorm2d(64)\n",
    "        self.conv5 = nn.Conv2d(64, 64, kernel_size=3, stride=1, padding=1)\n",
    "        self.bn5 = nn.BatchNorm2d(64)\n",
    "        self.jump2=ResidualBlock(64, 128,Stride=2)\n",
    "        self.jump3=ResidualBlock(128, 128)\n",
    "        self.conv6 = nn.Conv2d(64, 128, kernel_size=3, stride=2, padding=1)\n",
    "        self.bn6 = nn.BatchNorm2d(128)\n",
    "        self.conv7 = nn.Conv2d(128, 128, kernel_size=3, stride=1, padding=1)\n",
    "        self.bn7 = nn.BatchNorm2d(128)\n",
    "        self.conv8 = nn.Conv2d(128, 128, kernel_size=3, stride=1, padding=1)\n",
    "        self.bn8 = nn.BatchNorm2d(128)\n",
    "        self.conv9 = nn.Conv2d(128, 128, kernel_size=3, stride=1, padding=1)\n",
    "        self.bn9 = nn.BatchNorm2d(128)\n",
    "        self.jump4=ResidualBlock(128, 256,Stride=2)\n",
    "        self.jump7=ResidualBlock(256, 256)\n",
    "        self.conv10 = nn.Conv2d(128, 256, kernel_size=3, stride=2, padding=1)\n",
    "        self.bn10 = nn.BatchNorm2d(256)\n",
    "        self.conv11 = nn.Conv2d(256, 256, kernel_size=3, stride=1, padding=1)\n",
    "        self.bn11 = nn.BatchNorm2d(256)\n",
    "        self.conv12 = nn.Conv2d(256, 256, kernel_size=3, stride=1, padding=1)\n",
    "        self.bn12 = nn.BatchNorm2d(256)\n",
    "        self.conv13 = nn.Conv2d(256, 256, kernel_size=3, stride=1, padding=1)\n",
    "        self.bn13 = nn.BatchNorm2d(256)\n",
    "        self.jump5=ResidualBlock(256, 512,Stride=2)\n",
    "        self.jump6=ResidualBlock(512, 512)\n",
    "        self.conv14 = nn.Conv2d(256, 512, kernel_size=3, stride=2, padding=1)\n",
    "        self.bn14 = nn.BatchNorm2d(512)\n",
    "        self.conv15 = nn.Conv2d(512, 512, kernel_size=3, stride=1, padding=1)\n",
    "        self.bn15 = nn.BatchNorm2d(512)\n",
    "        self.conv16 = nn.Conv2d(512, 512, kernel_size=3, stride=1, padding=1)\n",
    "        self.bn16 = nn.BatchNorm2d(512)\n",
    "        self.conv17 = nn.Conv2d(512, 512, kernel_size=3, stride=1, padding=1)   \n",
    "        self.bn17 = nn.BatchNorm2d(512)\n",
    "        self.fc = nn.Linear(512, 3)\n",
    "        \n",
    "        \n",
    "    def forward(self, x):\n",
    "        out = F.relu(self.bn1(self.conv1(x)))\n",
    "        mem = out\n",
    "        out = F.relu(self.bn2(self.conv2(out)))\n",
    "        out = F.relu(self.bn3(self.conv3(out)))+self.jump1(mem)\n",
    "        mem=out\n",
    "        out = F.relu(self.bn4(self.conv4(out)))\n",
    "        out = F.relu(self.bn5(self.conv5(out)))+self.jump1(mem)\n",
    "        mem=out\n",
    "        out = F.relu(self.bn6(self.conv6(out)))\n",
    "        out = F.relu(self.bn7(self.conv7(out)))+self.jump2(mem)\n",
    "        mem=out\n",
    "        out = F.relu(self.bn8(self.conv8(out)))\n",
    "        out = F.relu(self.bn9(self.conv9(out)))+self.jump3(mem)\n",
    "        mem=out\n",
    "        out = self.bn10(self.conv10(out))\n",
    "        out = self.bn11(self.conv11(out))+self.jump4(mem)\n",
    "        mem=out\n",
    "        out = self.bn12(self.conv12(out))\n",
    "        out = self.bn13(self.conv13(out))+self.jump7(mem)\n",
    "        mem=out\n",
    "        out = self.bn14(self.conv14(out))\n",
    "        out = self.bn15(self.conv15(out))+self.jump5(mem)\n",
    "        mem=out\n",
    "        out = self.bn16(self.conv16(out))\n",
    "        out = self.bn17(self.conv17(out))+self.jump6(mem)\n",
    "        \n",
    "        #print(out.shape)\n",
    "        out=F.avg_pool2d(out,8)\n",
    "        out = out.view(out.size(0), -1)\n",
    "        #print(out.shape)\n",
    "        out = F.relu(self.fc(out))\n",
    "        return out\n",
    "        "
   ],
   "id": "55be07cc7a132c1e",
   "outputs": [],
   "execution_count": 13
  },
  {
   "metadata": {
    "ExecuteTime": {
     "end_time": "2024-12-02T15:32:33.498615Z",
     "start_time": "2024-12-02T15:32:33.399218Z"
    }
   },
   "cell_type": "code",
   "source": [
    "net = Net()\n",
    "criterion= nn.CrossEntropyLoss()\n",
    "optimizer= torch.optim.Adam(net.parameters(),lr=0.01)"
   ],
   "id": "e29a79ff46ae7e7a",
   "outputs": [],
   "execution_count": 14
  },
  {
   "metadata": {
    "ExecuteTime": {
     "end_time": "2024-12-02T15:32:34.583393Z",
     "start_time": "2024-12-02T15:32:34.575815Z"
    }
   },
   "cell_type": "code",
   "source": [
    "def train_net(net,dataloader):\n",
    "    net.train()\n",
    "    train_batches=len(dataloader)\n",
    "    \n",
    "    for epoch in range(3):\n",
    "        total_loss=0\n",
    "        correct=0\n",
    "        sample_num=0\n",
    "        for batch_idx, (data, target) in enumerate(dataloader):            \n",
    "            optimizer.zero_grad()\n",
    "            output=net(data)\n",
    "            loss=criterion(output,target)\n",
    "            loss.backward()\n",
    "            optimizer.step()\n",
    "            \n",
    "            #print(\"Batch %d, Loss: %.4f\"%(batch_idx,loss.item()))\n",
    "            \n",
    "            total_loss+=loss.item()\n",
    "            prediction=torch.argmax(output,1)\n",
    "            correct += (prediction==target).sum().item()\n",
    "            sample_num+=len(prediction)\n",
    "            \n",
    "        \n",
    "        loss=total_loss/train_batches\n",
    "        acc=correct/train_batches\n",
    "        print('Loss: {:.4f} Acc: {:.4f}'.format(loss,acc))\n",
    "    return loss,acc"
   ],
   "id": "ecc2a54e4cd594f2",
   "outputs": [],
   "execution_count": 15
  },
  {
   "metadata": {
    "ExecuteTime": {
     "end_time": "2024-12-02T15:40:17.856392Z",
     "start_time": "2024-12-02T15:32:35.291024Z"
    }
   },
   "cell_type": "code",
   "source": "train_net(net,dataloader)   #慢，很慢，非常慢。搞三个意思一下",
   "id": "38137a9ed1b8a039",
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Loss: 0.8644 Acc: 19.3953\n",
      "Loss: 0.6131 Acc: 23.5116\n",
      "Loss: 0.6419 Acc: 23.2791\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "(0.6419333046258882, 23.27906976744186)"
      ]
     },
     "execution_count": 16,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "execution_count": 16
  },
  {
   "metadata": {},
   "cell_type": "code",
   "outputs": [],
   "execution_count": null,
   "source": "",
   "id": "9c6c6e567dfb5430"
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 2
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython2",
   "version": "2.7.6"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
