{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "#### ResNet\n",
    "    ResNet诞生于2015年，当年ImageNet竞赛冠军，Top5错误率为3.57%\n",
    "    Kaiming He, Xiangyu Zhang, Shaoqing Ren. Deep Residual Learning for Image Recognition. In CPVR,2016.\n",
    "    \n",
    "    ResNet提出了层间残差跳连，引入了前方信息，缓解梯度消失，使神经网络层数增加成为可能\n",
    "    \n",
    "    模型名称      网络层数\n",
    "    LeNet        5\n",
    "    AlexNet       8\n",
    "    VGG         16/19\n",
    "    InceptionNet v1 22\n",
    "    \n",
    "    使用卷积实现特征提取时，神经网络的层数增加，效果会更好。\n",
    "    ResNet 引入残差结构最主要的目的是解决网络层数不断加深时导致的梯度消失问题。\n",
    "    \n",
    "    Inception块中的“+”是沿深度方向叠加（千层蛋糕层数叠加）\n",
    "    ResNet块中的“+”是特征图对应元素值相加（矩阵值相加）\n",
    "    \n",
    "###### ResNet块结构：（实线和虚线均表示恒等映射）\n",
    "    ①在堆叠卷积前后，维度相同\n",
    "    实线表示通道相同，计算方式为 H(x) = F(x) + x；\n",
    "    \n",
    "    ②在堆叠卷积前后，维度不同\n",
    "    虚线表示通道不同，计算方式为 H(x) = F(x) + Wx，其中 W 为1*1卷积操作，目的是调整 x 的维度（通道数）。\n",
    "    \n",
    "###### ResNet结构：\n",
    "    in：32*32\n",
    "    ↓\n",
    "    C（核：64*3*3，步长：1，填充：same ）\n",
    "    B（Yes）\n",
    "    A（relu）\n",
    "    P（None）\n",
    "    D（None）\n",
    "    \n",
    "    ↓(x)\n",
    "    \n",
    "    C（核：64*3*3，步长：1，填充：same ）\n",
    "    B（Yes）\n",
    "    A（relu）\n",
    "    P（None）\n",
    "    D（None）\n",
    "    ↓\n",
    "    C（核：64*3*3，步长：1，填充：same ）\n",
    "    B（Yes）\n",
    "    A（relu）\n",
    "    P（None）\n",
    "    D（None）\n",
    "    \n",
    "    ↓H(x) = F(x) + x,(x)\n",
    "    \n",
    "    C（核：64*3*3，步长：1，填充：same ）\n",
    "    B（Yes）\n",
    "    A（relu）\n",
    "    P（None）\n",
    "    D（None）\n",
    "    ↓\n",
    "    C（核：64*3*3，步长：1，填充：same ）\n",
    "    B（Yes）\n",
    "    A（relu）\n",
    "    P（None）\n",
    "    D（None）\n",
    "    \n",
    "    ↓H(x) = F(x) + x,(Wx)\n",
    "    \n",
    "    C（核：128*3*3，步长：2，填充：same ）\n",
    "    B（Yes）\n",
    "    A（relu）\n",
    "    P（None）\n",
    "    D（None）\n",
    "    ↓\n",
    "    C（核：128*3*3，步长：2，填充：same ）\n",
    "    B（Yes）\n",
    "    A（relu）\n",
    "    P（None）\n",
    "    D（None）\n",
    "    \n",
    "    ↓H(x) = F(x) + Wx,(x)\n",
    "    \n",
    "    C（核：128*3*3，步长：2，填充：same ）\n",
    "    B（Yes）\n",
    "    A（relu）\n",
    "    P（None）\n",
    "    D（None）\n",
    "    ↓\n",
    "    C（核：128*3*3，步长：2，填充：same ）\n",
    "    B（Yes）\n",
    "    A（relu）\n",
    "    P（None）\n",
    "    D（None）\n",
    "    \n",
    "    ↓H(x) = F(x) + x,(Wx)\n",
    "    \n",
    "    C（核：256*3*3，步长：2，填充：same ）\n",
    "    B（Yes）\n",
    "    A（relu）\n",
    "    P（None）\n",
    "    D（None）\n",
    "    ↓\n",
    "    C（核：256*3*3，步长：2，填充：same ）\n",
    "    B（Yes）\n",
    "    A（relu）\n",
    "    P（None）\n",
    "    D（None）\n",
    "    \n",
    "    ↓H(x) = F(x) + Wx,(x)\n",
    "    \n",
    "    C（核：256*3*3，步长：2，填充：same ）\n",
    "    B（Yes）\n",
    "    A（relu）\n",
    "    P（None）\n",
    "    D（None）\n",
    "    ↓\n",
    "    C（核：256*3*3，步长：2，填充：same ）\n",
    "    B（Yes）\n",
    "    A（relu）\n",
    "    P（None）\n",
    "    D（None）\n",
    "    \n",
    "    ↓H(x) = F(x) + x,(Wx)\n",
    "    \n",
    "    C（核：512*3*3，步长：2，填充：same ）\n",
    "    B（Yes）\n",
    "    A（relu）\n",
    "    P（None）\n",
    "    D（None）\n",
    "    ↓\n",
    "    C（核：512*3*3，步长：2，填充：same ）\n",
    "    B（Yes）\n",
    "    A（relu）\n",
    "    P（None）\n",
    "    D（None）\n",
    "    \n",
    "    ↓H(x) = F(x) + x,(Wx)\n",
    "    \n",
    "    C（核：512*3*3，步长：2，填充：same ）\n",
    "    B（Yes）\n",
    "    A（relu）\n",
    "    P（None）\n",
    "    D（None）\n",
    "    ↓\n",
    "    C（核：512*3*3，步长：2，填充：same ）\n",
    "    B（Yes）\n",
    "    A（relu）\n",
    "    P（None）\n",
    "    D（None）\n",
    "    \n",
    "    ↓H(x) = F(x) + x\n",
    "    \n",
    "    global avgpoling\n",
    "    ↓\n",
    "    Dense 10"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "import tensorflow as tf\n",
    "import os\n",
    "import numpy as np\n",
    "from matplotlib import pyplot as plt\n",
    "from tensorflow.keras.layers import Conv2D, BatchNormalization, Activation, MaxPool2D, Dropout, Flatten, Dense\n",
    "from tensorflow.keras import Model\n",
    "\n",
    "np.set_printoptions(threshold=np.inf)\n",
    "\n",
    "cifar10 = tf.keras.datasets.cifar10\n",
    "(x_train, y_train), (x_test, y_test) = cifar10.load_data()\n",
    "x_train, x_test = x_train / 255.0, x_test / 255.0\n",
    "\n",
    "#把两种不同的堆叠方式封装在ResNetBlock类中\n",
    "class ResnetBlock(Model):\n",
    "\n",
    "    def __init__(self, filters, strides=1, residual_path=False):\n",
    "        super(ResnetBlock, self).__init__()\n",
    "        self.filters = filters\n",
    "        self.strides = strides\n",
    "        self.residual_path = residual_path\n",
    "\n",
    "        self.c1 = Conv2D(filters, (3, 3), strides=strides, padding='same', use_bias=False)\n",
    "        self.b1 = BatchNormalization()\n",
    "        self.a1 = Activation('relu')\n",
    "\n",
    "        self.c2 = Conv2D(filters, (3, 3), strides=1, padding='same', use_bias=False)\n",
    "        self.b2 = BatchNormalization()\n",
    "        \n",
    "#####################使用1*1卷积核操作#######################\n",
    "        # residual_path为True时，对输入进行下采样，即用1x1的卷积核做卷积操作，保证x能和F(x)维度相同，顺利相加\n",
    "        if residual_path:#residual_path=1，堆叠卷积层前后不同\n",
    "            self.down_c1 = Conv2D(filters, (1, 1), strides=strides, padding='same', use_bias=False)\n",
    "            self.down_b1 = BatchNormalization()\n",
    "#############################################################        \n",
    "        self.a2 = Activation('relu')\n",
    "\n",
    "    def call(self, inputs):\n",
    "        residual = inputs  # residual等于输入值本身，即residual=x\n",
    "        # 将输入通过卷积、BN层、激活层，计算F(x)\n",
    "        x = self.c1(inputs)\n",
    "        x = self.b1(x)\n",
    "        x = self.a1(x)\n",
    "\n",
    "        x = self.c2(x)\n",
    "        y = self.b2(x)\n",
    "        \n",
    "#####################使用1*1卷积核操作#######################\n",
    "        if self.residual_path:\n",
    "            residual = self.down_c1(inputs)\n",
    "            residual = self.down_b1(residual)\n",
    "#############################################################\n",
    "        out = self.a2(y + residual)  # 最后输出的是两部分的和，即F(x)+x或F(x)+Wx,再过激活函数\n",
    "        return out\n",
    "\n",
    "\n",
    "class ResNet18(Model):\n",
    "\n",
    "    def __init__(self, block_list, initial_filters=64):  # block_list表示每个block有几个卷积层\n",
    "        super(ResNet18, self).__init__()\n",
    "        self.num_blocks = len(block_list)  # 共有几个block\n",
    "        self.block_list = block_list\n",
    "        self.out_filters = initial_filters\n",
    "        \n",
    "        self.c1 = Conv2D(self.out_filters, (3, 3), strides=1, padding='same', use_bias=False)#64个3*3的卷积核，步长1，全零填充\n",
    "        self.b1 = BatchNormalization()#BN操作\n",
    "        self.a1 = Activation('relu')#relu激活\n",
    "        \n",
    "        self.blocks = tf.keras.models.Sequential()\n",
    "        # 构建ResNet网络结构\n",
    "        for block_id in range(len(block_list)):  # 第几个resnet block\n",
    "            for layer_id in range(block_list[block_id]):  # 第几个卷积层\n",
    "\n",
    "                if block_id != 0 and layer_id == 0:  # 对除第一个block以外的每个block的输入进行下采样\n",
    "                    block = ResnetBlock(self.out_filters, strides=2, residual_path=True)#虚线连接H(x) = F(x) + Wx\n",
    "                else:\n",
    "                    block = ResnetBlock(self.out_filters, residual_path=False)#实线连接H(x) = F(x) + x\n",
    "                self.blocks.add(block)  # 将构建好的block加入resnet\n",
    "            self.out_filters *= 2  # 下一个block的卷积核数是上一个block的2倍\n",
    "            \n",
    "        self.p1 = tf.keras.layers.GlobalAveragePooling2D()\n",
    "        \n",
    "        self.f1 = tf.keras.layers.Dense(10, activation='softmax', kernel_regularizer=tf.keras.regularizers.l2())\n",
    "\n",
    "    def call(self, inputs):\n",
    "        x = self.c1(inputs)\n",
    "        x = self.b1(x)\n",
    "        x = self.a1(x)\n",
    "        x = self.blocks(x)\n",
    "        x = self.p1(x)\n",
    "        y = self.f1(x)\n",
    "        return y\n",
    "\n",
    "\n",
    "model = ResNet18([2, 2, 2, 2])\n",
    "\n",
    "model.compile(optimizer='adam',\n",
    "              loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=False),\n",
    "              metrics=['sparse_categorical_accuracy'])\n",
    "\n",
    "checkpoint_save_path = \"./checkpoint/ResNet18.ckpt\"\n",
    "if os.path.exists(checkpoint_save_path + '.index'):\n",
    "    print('-------------load the model-----------------')\n",
    "    model.load_weights(checkpoint_save_path)\n",
    "\n",
    "cp_callback = tf.keras.callbacks.ModelCheckpoint(filepath=checkpoint_save_path,\n",
    "                                                 save_weights_only=True,\n",
    "                                                 save_best_only=True)\n",
    "\n",
    "history = model.fit(x_train, y_train, batch_size=32, epochs=5, validation_data=(x_test, y_test), validation_freq=1,\n",
    "                    callbacks=[cp_callback])\n",
    "model.summary()\n",
    "\n",
    "# print(model.trainable_variables)\n",
    "file = open('./weights.txt', 'w')\n",
    "for v in model.trainable_variables:\n",
    "    file.write(str(v.name) + '\\n')\n",
    "    file.write(str(v.shape) + '\\n')\n",
    "    file.write(str(v.numpy()) + '\\n')\n",
    "file.close()\n",
    "\n",
    "###############################################    show   ###############################################\n",
    "\n",
    "# 显示训练集和验证集的acc和loss曲线\n",
    "acc = history.history['sparse_categorical_accuracy']\n",
    "val_acc = history.history['val_sparse_categorical_accuracy']\n",
    "loss = history.history['loss']\n",
    "val_loss = history.history['val_loss']\n",
    "\n",
    "plt.subplot(1, 2, 1)\n",
    "plt.plot(acc, label='Training Accuracy')\n",
    "plt.plot(val_acc, label='Validation Accuracy')\n",
    "plt.title('Training and Validation Accuracy')\n",
    "plt.legend()\n",
    "\n",
    "plt.subplot(1, 2, 2)\n",
    "plt.plot(loss, label='Training Loss')\n",
    "plt.plot(val_loss, label='Validation Loss')\n",
    "plt.title('Training and Validation Loss')\n",
    "plt.legend()\n",
    "plt.show()\n"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.7.6"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 4
}
