{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [],
   "source": [
    "import tensorflow as tf\n",
    "from unicodedata import normalize\n",
    "from tensorflow import keras\n",
    "import os\n",
    "from tensorflow.keras.models import Sequential, load_model, clone_model\n",
    "from tensorflow.keras.optimizers import RMSprop\n",
    "from collections import Counter\n",
    "import numpy as np\n",
    "import pandas as pd\n",
    "from matplotlib import pyplot as plt\n",
    "from tensorflow.keras.layers import Conv2D, ZeroPadding2D, AveragePooling2D, BatchNormalization, Activation, Dense, \\\n",
    "    Input, MaxPooling2D,Flatten,Dropout\n",
    "from tensorflow.keras import Model, layers, regularizers\n",
    "import tensorflow.keras.backend as K\n",
    "from tensorflow.keras.datasets import cifar10\n",
    "from tensorflow.keras.callbacks import LearningRateScheduler, ModelCheckpoint, EarlyStopping, ReduceLROnPlateau\n",
    "from tensorflow.keras.preprocessing.image import ImageDataGenerator\n",
    "from tensorflow.keras.initializers import he_normal\n",
    "from tensorflow.keras import optimizers\n",
    "from sklearn.preprocessing import MinMaxScaler\n",
    "from sklearn.model_selection import train_test_split\n",
    "from sklearn import preprocessing\n",
    "from sklearn.metrics import accuracy_score\n",
    "from tqdm import tqdm\n",
    "import csv\n",
    "import cv2\n",
    "#importing model \n",
    "from tensorflow.keras.applications import ResNet50,densenet"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|█████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 43/43 [00:03<00:00, 13.36it/s]\n"
     ]
    }
   ],
   "source": [
    "X_data = []\n",
    "Y_data = []\n",
    "\n",
    "input_shape=(32,32,3)#3通道图像数据\n",
    "num_class = 43#数据类别数目\n",
    "\n",
    "path='../../data/data_set/GTSRB/Final_Training/Images'\n",
    "for file in tqdm(os.listdir(path)):\n",
    "    lab=int(file)\n",
    "    for photo_file in os.listdir(path+'/'+file):\n",
    "        if photo_file[0]=='G':\n",
    "            continue\n",
    "        photo_file_path=path+'/'+file+'/'+photo_file\n",
    "        img = cv2.imread(photo_file_path,1)\n",
    "        img = cv2.resize(img,(input_shape[0],input_shape[1]))\n",
    "        X_data.append(img)\n",
    "        Y_data.append(lab)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "39209\n",
      "Counter({2: 2250, 1: 2220, 13: 2160, 12: 2100, 38: 2070, 10: 2010, 4: 1980, 5: 1860, 25: 1500, 9: 1470, 7: 1440, 3: 1410, 8: 1410, 11: 1320, 18: 1200, 35: 1200, 17: 1110, 14: 780, 31: 780, 33: 689, 15: 630, 26: 600, 28: 540, 23: 510, 30: 450, 6: 420, 16: 420, 34: 420, 22: 390, 36: 390, 20: 360, 40: 360, 21: 330, 39: 300, 24: 270, 29: 270, 27: 240, 32: 240, 41: 240, 42: 240, 0: 210, 19: 210, 37: 210})\n"
     ]
    }
   ],
   "source": [
    "print(len(X_data))\n",
    "print(Counter(Y_data))\n",
    "\n",
    "X_data=np.array(X_data)\n",
    "X_data=X_data/255.0\n",
    "Y_data=np.array(Y_data)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [],
   "source": [
    "# 对训练集进行切割，然后进行训练\n",
    "train_x,test_x,train_y,test_y = train_test_split(X_data,Y_data,test_size=0.2)\n",
    "\n",
    "lb=preprocessing.LabelBinarizer().fit(np.array(range(num_class)))#对标签进行ont_hot编码\n",
    "train_y=lb.transform(train_y)#因为是多分类任务，必须进行编码处理\n",
    "test_y=lb.transform(test_y)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "array([[[[1.        , 1.        , 1.        ],\n",
       "         [1.        , 1.        , 1.        ],\n",
       "         [1.        , 1.        , 1.        ],\n",
       "         ...,\n",
       "         [1.        , 1.        , 1.        ],\n",
       "         [1.        , 1.        , 1.        ],\n",
       "         [1.        , 1.        , 1.        ]],\n",
       "\n",
       "        [[1.        , 1.        , 1.        ],\n",
       "         [1.        , 1.        , 1.        ],\n",
       "         [1.        , 1.        , 1.        ],\n",
       "         ...,\n",
       "         [1.        , 1.        , 1.        ],\n",
       "         [1.        , 1.        , 1.        ],\n",
       "         [1.        , 1.        , 1.        ]],\n",
       "\n",
       "        [[1.        , 1.        , 1.        ],\n",
       "         [1.        , 1.        , 1.        ],\n",
       "         [1.        , 1.        , 1.        ],\n",
       "         ...,\n",
       "         [1.        , 1.        , 1.        ],\n",
       "         [1.        , 1.        , 1.        ],\n",
       "         [1.        , 1.        , 1.        ]],\n",
       "\n",
       "        ...,\n",
       "\n",
       "        [[0.16862745, 0.1254902 , 0.10980392],\n",
       "         [0.14901961, 0.11372549, 0.10980392],\n",
       "         [0.14901961, 0.11372549, 0.11372549],\n",
       "         ...,\n",
       "         [0.18039216, 0.15686275, 0.16862745],\n",
       "         [0.16862745, 0.15294118, 0.16470588],\n",
       "         [0.16862745, 0.15294118, 0.15294118]],\n",
       "\n",
       "        [[0.14509804, 0.17647059, 0.16470588],\n",
       "         [0.12941176, 0.16078431, 0.12941176],\n",
       "         [0.16078431, 0.14117647, 0.1254902 ],\n",
       "         ...,\n",
       "         [0.18039216, 0.15686275, 0.16862745],\n",
       "         [0.17254902, 0.14901961, 0.16078431],\n",
       "         [0.15686275, 0.1372549 , 0.1372549 ]],\n",
       "\n",
       "        [[0.42352941, 0.45490196, 0.47058824],\n",
       "         [0.39607843, 0.41176471, 0.30588235],\n",
       "         [0.21176471, 0.19215686, 0.12941176],\n",
       "         ...,\n",
       "         [0.15294118, 0.1372549 , 0.14117647],\n",
       "         [0.15686275, 0.1372549 , 0.1372549 ],\n",
       "         [0.14901961, 0.12941176, 0.12941176]]],\n",
       "\n",
       "\n",
       "       [[[0.2627451 , 0.2627451 , 0.43921569],\n",
       "         [0.27843137, 0.27843137, 0.45882353],\n",
       "         [0.30196078, 0.29803922, 0.48235294],\n",
       "         ...,\n",
       "         [0.28627451, 0.29019608, 0.4745098 ],\n",
       "         [0.29411765, 0.28235294, 0.42352941],\n",
       "         [0.29803922, 0.26666667, 0.32156863]],\n",
       "\n",
       "        [[0.27058824, 0.26666667, 0.44705882],\n",
       "         [0.28235294, 0.27843137, 0.46666667],\n",
       "         [0.30588235, 0.29411765, 0.48627451],\n",
       "         ...,\n",
       "         [0.28235294, 0.28235294, 0.4627451 ],\n",
       "         [0.29411765, 0.27843137, 0.41176471],\n",
       "         [0.30588235, 0.27058824, 0.33333333]],\n",
       "\n",
       "        [[0.27058824, 0.2627451 , 0.43921569],\n",
       "         [0.28235294, 0.27843137, 0.4627451 ],\n",
       "         [0.29803922, 0.29411765, 0.48627451],\n",
       "         ...,\n",
       "         [0.28627451, 0.28235294, 0.45882353],\n",
       "         [0.29019608, 0.27843137, 0.41176471],\n",
       "         [0.29019608, 0.26666667, 0.3254902 ]],\n",
       "\n",
       "        ...,\n",
       "\n",
       "        [[0.72156863, 0.7372549 , 0.8       ],\n",
       "         [0.74117647, 0.74509804, 0.80784314],\n",
       "         [0.7254902 , 0.74509804, 0.8       ],\n",
       "         ...,\n",
       "         [0.27843137, 0.28235294, 0.28235294],\n",
       "         [0.2745098 , 0.25490196, 0.24313725],\n",
       "         [0.28627451, 0.24313725, 0.21960784]],\n",
       "\n",
       "        [[0.60784314, 0.64705882, 0.76078431],\n",
       "         [0.58431373, 0.62352941, 0.74509804],\n",
       "         [0.56078431, 0.61960784, 0.74117647],\n",
       "         ...,\n",
       "         [0.27843137, 0.29019608, 0.29803922],\n",
       "         [0.28235294, 0.2627451 , 0.25490196],\n",
       "         [0.29019608, 0.25098039, 0.22352941]],\n",
       "\n",
       "        [[0.43137255, 0.53333333, 0.68235294],\n",
       "         [0.40784314, 0.51764706, 0.67843137],\n",
       "         [0.4       , 0.51372549, 0.6745098 ],\n",
       "         ...,\n",
       "         [0.29803922, 0.30980392, 0.32156863],\n",
       "         [0.30196078, 0.28235294, 0.27843137],\n",
       "         [0.29411765, 0.25882353, 0.22745098]]],\n",
       "\n",
       "\n",
       "       [[[0.14117647, 0.17254902, 0.17254902],\n",
       "         [0.16862745, 0.21568627, 0.21960784],\n",
       "         [0.21176471, 0.24313725, 0.25882353],\n",
       "         ...,\n",
       "         [0.16078431, 0.18823529, 0.22352941],\n",
       "         [0.18823529, 0.22745098, 0.23137255],\n",
       "         [0.23529412, 0.29803922, 0.32156863]],\n",
       "\n",
       "        [[0.17647059, 0.19607843, 0.18431373],\n",
       "         [0.17254902, 0.20392157, 0.22352941],\n",
       "         [0.16078431, 0.18431373, 0.21176471],\n",
       "         ...,\n",
       "         [0.15686275, 0.19215686, 0.34901961],\n",
       "         [0.17254902, 0.22745098, 0.25490196],\n",
       "         [0.20784314, 0.28627451, 0.2745098 ]],\n",
       "\n",
       "        [[0.14901961, 0.15686275, 0.14509804],\n",
       "         [0.14901961, 0.16862745, 0.16862745],\n",
       "         [0.13333333, 0.15686275, 0.15686275],\n",
       "         ...,\n",
       "         [0.21960784, 0.26666667, 0.23921569],\n",
       "         [0.21176471, 0.32156863, 0.18823529],\n",
       "         [0.29019608, 0.45882353, 0.37647059]],\n",
       "\n",
       "        ...,\n",
       "\n",
       "        [[0.17647059, 0.19607843, 0.21568627],\n",
       "         [0.17254902, 0.2       , 0.17647059],\n",
       "         [0.1254902 , 0.14117647, 0.12941176],\n",
       "         ...,\n",
       "         [0.1372549 , 0.18039216, 0.23137255],\n",
       "         [0.1254902 , 0.14509804, 0.16470588],\n",
       "         [0.10588235, 0.12941176, 0.14117647]],\n",
       "\n",
       "        [[0.14509804, 0.15294118, 0.13333333],\n",
       "         [0.14509804, 0.14901961, 0.14117647],\n",
       "         [0.13333333, 0.14117647, 0.1254902 ],\n",
       "         ...,\n",
       "         [0.16862745, 0.21960784, 0.21568627],\n",
       "         [0.10196078, 0.12156863, 0.12941176],\n",
       "         [0.12941176, 0.14509804, 0.19215686]],\n",
       "\n",
       "        [[0.14901961, 0.14901961, 0.15294118],\n",
       "         [0.1372549 , 0.1372549 , 0.1254902 ],\n",
       "         [0.12156863, 0.13333333, 0.1372549 ],\n",
       "         ...,\n",
       "         [0.1372549 , 0.18431373, 0.30980392],\n",
       "         [0.15686275, 0.18431373, 0.23137255],\n",
       "         [0.14117647, 0.16078431, 0.22745098]]],\n",
       "\n",
       "\n",
       "       ...,\n",
       "\n",
       "\n",
       "       [[[0.92941176, 0.74117647, 0.63529412],\n",
       "         [0.93333333, 0.74117647, 0.62745098],\n",
       "         [0.93333333, 0.7372549 , 0.62352941],\n",
       "         ...,\n",
       "         [0.96470588, 0.76078431, 0.64313725],\n",
       "         [0.98039216, 0.76862745, 0.63529412],\n",
       "         [0.98039216, 0.76470588, 0.63921569]],\n",
       "\n",
       "        [[0.91764706, 0.7254902 , 0.61176471],\n",
       "         [0.92941176, 0.73333333, 0.62352941],\n",
       "         [0.92156863, 0.7254902 , 0.61176471],\n",
       "         ...,\n",
       "         [0.96078431, 0.75686275, 0.64313725],\n",
       "         [0.96078431, 0.76078431, 0.63921569],\n",
       "         [0.95686275, 0.76078431, 0.63921569]],\n",
       "\n",
       "        [[0.90980392, 0.71372549, 0.60784314],\n",
       "         [0.92156863, 0.72156863, 0.62352941],\n",
       "         [0.91372549, 0.72156863, 0.61568627],\n",
       "         ...,\n",
       "         [0.95686275, 0.74901961, 0.63529412],\n",
       "         [0.96078431, 0.75686275, 0.63137255],\n",
       "         [0.96078431, 0.76470588, 0.63921569]],\n",
       "\n",
       "        ...,\n",
       "\n",
       "        [[0.66666667, 0.54901961, 0.50196078],\n",
       "         [0.79607843, 0.67058824, 0.56862745],\n",
       "         [0.8627451 , 0.67058824, 0.58431373],\n",
       "         ...,\n",
       "         [0.86666667, 0.71372549, 0.60392157],\n",
       "         [0.8745098 , 0.70980392, 0.60392157],\n",
       "         [0.86666667, 0.70980392, 0.60392157]],\n",
       "\n",
       "        [[0.74901961, 0.59607843, 0.54117647],\n",
       "         [0.7254902 , 0.58823529, 0.48235294],\n",
       "         [0.74509804, 0.60392157, 0.54117647],\n",
       "         ...,\n",
       "         [0.86666667, 0.70588235, 0.59215686],\n",
       "         [0.87058824, 0.70980392, 0.60392157],\n",
       "         [0.8745098 , 0.70980392, 0.61568627]],\n",
       "\n",
       "        [[0.87843137, 0.69411765, 0.58823529],\n",
       "         [0.82745098, 0.67058824, 0.58039216],\n",
       "         [0.74117647, 0.61176471, 0.52941176],\n",
       "         ...,\n",
       "         [0.8627451 , 0.69803922, 0.6       ],\n",
       "         [0.86666667, 0.70196078, 0.61176471],\n",
       "         [0.87058824, 0.69803922, 0.60392157]]],\n",
       "\n",
       "\n",
       "       [[[0.23137255, 0.29803922, 0.23137255],\n",
       "         [0.29803922, 0.40784314, 0.35686275],\n",
       "         [0.25098039, 0.30980392, 0.27058824],\n",
       "         ...,\n",
       "         [0.22352941, 0.25882353, 0.29803922],\n",
       "         [0.22352941, 0.30980392, 0.3254902 ],\n",
       "         [0.15294118, 0.20392157, 0.18039216]],\n",
       "\n",
       "        [[0.14117647, 0.21176471, 0.21568627],\n",
       "         [0.17254902, 0.2745098 , 0.28235294],\n",
       "         [0.11764706, 0.17647059, 0.17647059],\n",
       "         ...,\n",
       "         [0.31764706, 0.34509804, 0.34509804],\n",
       "         [0.15294118, 0.22745098, 0.22745098],\n",
       "         [0.05882353, 0.10196078, 0.10588235]],\n",
       "\n",
       "        [[0.14509804, 0.17254902, 0.22352941],\n",
       "         [0.15294118, 0.18431373, 0.23529412],\n",
       "         [0.14509804, 0.16078431, 0.19607843],\n",
       "         ...,\n",
       "         [0.2745098 , 0.2745098 , 0.25882353],\n",
       "         [0.10196078, 0.14117647, 0.13333333],\n",
       "         [0.07058824, 0.09411765, 0.11764706]],\n",
       "\n",
       "        ...,\n",
       "\n",
       "        [[0.09803922, 0.11372549, 0.14117647],\n",
       "         [0.10980392, 0.12156863, 0.1254902 ],\n",
       "         [0.16078431, 0.17254902, 0.17254902],\n",
       "         ...,\n",
       "         [0.06666667, 0.07058824, 0.09803922],\n",
       "         [0.0627451 , 0.0627451 , 0.08235294],\n",
       "         [0.05882353, 0.05882353, 0.0745098 ]],\n",
       "\n",
       "        [[0.14117647, 0.18823529, 0.36470588],\n",
       "         [0.12156863, 0.16470588, 0.23921569],\n",
       "         [0.1372549 , 0.18431373, 0.19215686],\n",
       "         ...,\n",
       "         [0.0627451 , 0.0627451 , 0.08235294],\n",
       "         [0.06666667, 0.05882353, 0.07843137],\n",
       "         [0.05490196, 0.05098039, 0.06666667]],\n",
       "\n",
       "        [[0.19607843, 0.24313725, 0.61176471],\n",
       "         [0.16078431, 0.21176471, 0.38431373],\n",
       "         [0.1254902 , 0.18039216, 0.20392157],\n",
       "         ...,\n",
       "         [0.0627451 , 0.05882353, 0.07843137],\n",
       "         [0.0627451 , 0.05882353, 0.07843137],\n",
       "         [0.05098039, 0.05098039, 0.07058824]]],\n",
       "\n",
       "\n",
       "       [[[0.05882353, 0.04705882, 0.05490196],\n",
       "         [0.07058824, 0.05882353, 0.0627451 ],\n",
       "         [0.06666667, 0.05490196, 0.05490196],\n",
       "         ...,\n",
       "         [0.06666667, 0.05882353, 0.0627451 ],\n",
       "         [0.06666667, 0.05882353, 0.0627451 ],\n",
       "         [0.07058824, 0.0627451 , 0.06666667]],\n",
       "\n",
       "        [[0.05882353, 0.04705882, 0.05098039],\n",
       "         [0.06666667, 0.05098039, 0.05098039],\n",
       "         [0.06666667, 0.05490196, 0.05490196],\n",
       "         ...,\n",
       "         [0.07058824, 0.0627451 , 0.06666667],\n",
       "         [0.0745098 , 0.06666667, 0.07058824],\n",
       "         [0.0745098 , 0.0627451 , 0.06666667]],\n",
       "\n",
       "        [[0.05882353, 0.05098039, 0.05490196],\n",
       "         [0.0627451 , 0.05098039, 0.05098039],\n",
       "         [0.0627451 , 0.05098039, 0.05098039],\n",
       "         ...,\n",
       "         [0.0745098 , 0.0627451 , 0.06666667],\n",
       "         [0.0745098 , 0.0627451 , 0.06666667],\n",
       "         [0.07843137, 0.07058824, 0.0745098 ]],\n",
       "\n",
       "        ...,\n",
       "\n",
       "        [[0.05490196, 0.04313725, 0.04313725],\n",
       "         [0.05098039, 0.04313725, 0.04313725],\n",
       "         [0.05098039, 0.04313725, 0.04313725],\n",
       "         ...,\n",
       "         [0.06666667, 0.04313725, 0.04705882],\n",
       "         [0.0627451 , 0.04705882, 0.05098039],\n",
       "         [0.05490196, 0.04705882, 0.05098039]],\n",
       "\n",
       "        [[0.05098039, 0.04313725, 0.04313725],\n",
       "         [0.05098039, 0.04313725, 0.04313725],\n",
       "         [0.05098039, 0.04313725, 0.04705882],\n",
       "         ...,\n",
       "         [0.0627451 , 0.04313725, 0.04705882],\n",
       "         [0.05882353, 0.04705882, 0.05490196],\n",
       "         [0.05098039, 0.04313725, 0.04705882]],\n",
       "\n",
       "        [[0.05098039, 0.04313725, 0.04705882],\n",
       "         [0.05098039, 0.04313725, 0.04705882],\n",
       "         [0.05098039, 0.04313725, 0.04705882],\n",
       "         ...,\n",
       "         [0.0627451 , 0.04705882, 0.05098039],\n",
       "         [0.05882353, 0.04705882, 0.05490196],\n",
       "         [0.05490196, 0.04313725, 0.04705882]]]])"
      ]
     },
     "execution_count": 5,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "test_x"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "31367"
      ]
     },
     "execution_count": 6,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "len(train_x)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [],
   "source": [
    "def scheduler(epoch):\n",
    "    if epoch < 80:\n",
    "        return 0.01\n",
    "    if epoch < 160:\n",
    "        return 0.005\n",
    "    return 0.001"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {},
   "outputs": [],
   "source": [
    "def vgg16():\n",
    "    input_tensor = Input(shape=(32, 32, 3))\n",
    "    x = input_tensor\n",
    "    x = Conv2D(64, (3, 3), activation='relu', padding='same', name='block1_conv1')(input_tensor)\n",
    "    x = Conv2D(64, (3, 3), activation='relu', padding='same', name='block1_conv2')(x)\n",
    "    x = MaxPooling2D((2, 2), strides=(2, 2), name='block1_pool')(x)\n",
    "#     print(x.shape)\n",
    "    # Block 2\n",
    "    x = Conv2D(128, (3, 3), activation='relu', padding='same', name='block2_conv1')(x)\n",
    "    x = Conv2D(128, (3, 3), activation='relu', padding='same', name='block2_conv2')(x)\n",
    "    x = MaxPooling2D((2, 2), strides=(2, 2), name='block2_pool')(x)\n",
    "#     print(x.shape)\n",
    "    # Block 3\n",
    "    x = Conv2D(256, (3, 3), activation='relu', padding='same', name='block3_conv1')(x)\n",
    "    x = Conv2D(256, (3, 3), activation='relu', padding='same', name='block3_conv2')(x)\n",
    "    x = Conv2D(256, (3, 3), activation='relu', padding='same', name='block3_conv3')(x)\n",
    "    x = MaxPooling2D((2, 2), strides=(2, 2), name='block3_pool')(x)\n",
    "#     print(x.shape)\n",
    "    # Block 4\n",
    "    x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block4_conv1')(x)\n",
    "    x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block4_conv2')(x)\n",
    "    x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block4_conv3')(x)\n",
    "    x = MaxPooling2D((2, 2), strides=(2, 2), name='block4_pool')(x)\n",
    "#     print(x.shape)\n",
    "    # # Block 5\n",
    "    # x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block5_conv1')(x)\n",
    "    # x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block5_conv2')(x)\n",
    "    # x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block5_conv3')(x)\n",
    "    # x = MaxPooling2D((2, 2), strides=(2, 2), name='block4_pool')(x)\n",
    "    # print(x.shape)\n",
    "\n",
    "    # Classification block\n",
    "    x = Flatten(name='flatten')(x)\n",
    "    x = Dense(4096, activation='relu', name='fc1')(x)\n",
    "    x = Dense(4096, activation='relu', name='fc2')(x)\n",
    "    x = Dropout(0.5)(x)\n",
    "    x = Dense(43, activation='softmax', name='predictions')(x)\n",
    "    model = Model(inputs=input_tensor, outputs=x, name='VGG16')\n",
    "    return model"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Model: \"VGG16\"\n",
      "_________________________________________________________________\n",
      "Layer (type)                 Output Shape              Param #   \n",
      "=================================================================\n",
      "input_1 (InputLayer)         [(None, 32, 32, 3)]       0         \n",
      "_________________________________________________________________\n",
      "block1_conv1 (Conv2D)        (None, 32, 32, 64)        1792      \n",
      "_________________________________________________________________\n",
      "block1_conv2 (Conv2D)        (None, 32, 32, 64)        36928     \n",
      "_________________________________________________________________\n",
      "block1_pool (MaxPooling2D)   (None, 16, 16, 64)        0         \n",
      "_________________________________________________________________\n",
      "block2_conv1 (Conv2D)        (None, 16, 16, 128)       73856     \n",
      "_________________________________________________________________\n",
      "block2_conv2 (Conv2D)        (None, 16, 16, 128)       147584    \n",
      "_________________________________________________________________\n",
      "block2_pool (MaxPooling2D)   (None, 8, 8, 128)         0         \n",
      "_________________________________________________________________\n",
      "block3_conv1 (Conv2D)        (None, 8, 8, 256)         295168    \n",
      "_________________________________________________________________\n",
      "block3_conv2 (Conv2D)        (None, 8, 8, 256)         590080    \n",
      "_________________________________________________________________\n",
      "block3_conv3 (Conv2D)        (None, 8, 8, 256)         590080    \n",
      "_________________________________________________________________\n",
      "block3_pool (MaxPooling2D)   (None, 4, 4, 256)         0         \n",
      "_________________________________________________________________\n",
      "block4_conv1 (Conv2D)        (None, 4, 4, 512)         1180160   \n",
      "_________________________________________________________________\n",
      "block4_conv2 (Conv2D)        (None, 4, 4, 512)         2359808   \n",
      "_________________________________________________________________\n",
      "block4_conv3 (Conv2D)        (None, 4, 4, 512)         2359808   \n",
      "_________________________________________________________________\n",
      "block4_pool (MaxPooling2D)   (None, 2, 2, 512)         0         \n",
      "_________________________________________________________________\n",
      "flatten (Flatten)            (None, 2048)              0         \n",
      "_________________________________________________________________\n",
      "fc1 (Dense)                  (None, 4096)              8392704   \n",
      "_________________________________________________________________\n",
      "fc2 (Dense)                  (None, 4096)              16781312  \n",
      "_________________________________________________________________\n",
      "dropout (Dropout)            (None, 4096)              0         \n",
      "_________________________________________________________________\n",
      "predictions (Dense)          (None, 43)                176171    \n",
      "=================================================================\n",
      "Total params: 32,985,451\n",
      "Trainable params: 32,985,451\n",
      "Non-trainable params: 0\n",
      "_________________________________________________________________\n"
     ]
    }
   ],
   "source": [
    "model = vgg16()\n",
    "model.summary()#显示模型结构"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {},
   "outputs": [],
   "source": [
    "sgd = optimizers.SGD(lr=.1, momentum=0.9, nesterov=True)\n",
    "model.compile(loss='categorical_crossentropy', optimizer=sgd, metrics=['acc'])\n",
    "\n",
    "change_lr = LearningRateScheduler(scheduler)\n",
    "\n",
    "save_dir = os.path.join(os.getcwd(), 'data/trained_model')\n",
    "model_name = 'vgg19_model.{epoch:02d}-{val_acc:.2f}.h5'\n",
    "if not os.path.isdir(save_dir):\n",
    "    os.makedirs(save_dir)\n",
    "filepath = os.path.join(save_dir, model_name)\n",
    "\n",
    "checkpoint = ModelCheckpoint(filepath=filepath,\n",
    "                             monitor='val_acc',\n",
    "                             verbose=1,\n",
    "                             save_best_only=True)\n",
    "\n",
    "#creating early stopping to prevent model from overfitting \n",
    "early_stopping = EarlyStopping(monitor=\"val_acc\", min_delta=0,\n",
    "                                                  patience=5, verbose=1, \n",
    "                                                  mode=\"auto\", baseline=None, \n",
    "                                                  restore_best_weights=True)\n",
    "\n",
    "cbks = [early_stopping, checkpoint, change_lr]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Using real-time data augmentation.\n"
     ]
    }
   ],
   "source": [
    "print('Using real-time data augmentation.')\n",
    "# datagen = ImageDataGenerator(horizontal_flip=True,\n",
    "#                              width_shift_range=0.125, height_shift_range=0.125, fill_mode='constant', cval=0.)\n",
    "datagen = ImageDataGenerator(featurewise_center=True,\n",
    "                                   featurewise_std_normalization=True,\n",
    "                                   rotation_range=20,\n",
    "                                   width_shift_range=0.2,\n",
    "                                   height_shift_range=0.2,\n",
    "                                   horizontal_flip=True)\n",
    "datagen.fit(train_x)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "WARNING:tensorflow:From <ipython-input-14-c3162b564b49>:8: Model.fit_generator (from tensorflow.python.keras.engine.training) is deprecated and will be removed in a future version.\n",
      "Instructions for updating:\n",
      "Please use Model.fit, which supports generators.\n",
      "Epoch 1/100\n",
      "  1/246 [..............................] - ETA: 0s - loss: 3.7612 - acc: 0.0234WARNING:tensorflow:Callbacks method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0140s vs `on_train_batch_end` time: 0.0309s). Check your callbacks.\n",
      "246/246 [==============================] - ETA: 0s - loss: 3.5305 - acc: 0.0611\n",
      "Epoch 00001: val_acc improved from -inf to 0.09577, saving model to D:\\Projects\\PycharmProjects\\Grad\\GTSRB\\data/trained_model\\vgg19_model.01-0.10.h5\n",
      "246/246 [==============================] - 14s 57ms/step - loss: 3.5305 - acc: 0.0611 - val_loss: 3.4056 - val_acc: 0.0958\n",
      "Epoch 2/100\n",
      "245/246 [============================>.] - ETA: 0s - loss: 3.1095 - acc: 0.1443- ETA: 6s - ETA: 0s - loss: 3.1556 - acc\n",
      "Epoch 00002: val_acc improved from 0.09577 to 0.31625, saving model to D:\\Projects\\PycharmProjects\\Grad\\GTSRB\\data/trained_model\\vgg19_model.02-0.32.h5\n",
      "246/246 [==============================] - 14s 55ms/step - loss: 3.1066 - acc: 0.1451 - val_loss: 2.2307 - val_acc: 0.3162\n",
      "Epoch 3/100\n",
      "245/246 [============================>.] - ETA: 0s - loss: 1.6920 - acc: 0.4370- ETA: 3s - loss: 1.8034 - ac - ETA: \n",
      "Epoch 00003: val_acc improved from 0.31625 to 0.63096, saving model to D:\\Projects\\PycharmProjects\\Grad\\GTSRB\\data/trained_model\\vgg19_model.03-0.63.h5\n",
      "246/246 [==============================] - 13s 55ms/step - loss: 1.6901 - acc: 0.4373 - val_loss: 1.1144 - val_acc: 0.6310\n",
      "Epoch 4/100\n",
      "245/246 [============================>.] - ETA: 0s - loss: 0.7032 - acc: 0.7707\n",
      "Epoch 00004: val_acc improved from 0.63096 to 0.91622, saving model to D:\\Projects\\PycharmProjects\\Grad\\GTSRB\\data/trained_model\\vgg19_model.04-0.92.h5\n",
      "246/246 [==============================] - 14s 55ms/step - loss: 0.7013 - acc: 0.7713 - val_loss: 0.2458 - val_acc: 0.9162\n",
      "Epoch 5/100\n",
      "245/246 [============================>.] - ETA: 0s - loss: 0.1792 - acc: 0.9442\n",
      "Epoch 00005: val_acc improved from 0.91622 to 0.96889, saving model to D:\\Projects\\PycharmProjects\\Grad\\GTSRB\\data/trained_model\\vgg19_model.05-0.97.h5\n",
      "246/246 [==============================] - 14s 55ms/step - loss: 0.1791 - acc: 0.9442 - val_loss: 0.0963 - val_acc: 0.9689\n",
      "Epoch 6/100\n",
      "246/246 [==============================] - ETA: 0s - loss: 0.0801 - acc: 0.9749- ETA: 0s - loss: 0.0810 - acc\n",
      "Epoch 00006: val_acc improved from 0.96889 to 0.98304, saving model to D:\\Projects\\PycharmProjects\\Grad\\GTSRB\\data/trained_model\\vgg19_model.06-0.98.h5\n",
      "246/246 [==============================] - 14s 55ms/step - loss: 0.0801 - acc: 0.9749 - val_loss: 0.0554 - val_acc: 0.9830\n",
      "Epoch 7/100\n",
      "245/246 [============================>.] - ETA: 0s - loss: 0.0488 - acc: 0.9850\n",
      "Epoch 00007: val_acc improved from 0.98304 to 0.98687, saving model to D:\\Projects\\PycharmProjects\\Grad\\GTSRB\\data/trained_model\\vgg19_model.07-0.99.h5\n",
      "246/246 [==============================] - 14s 55ms/step - loss: 0.0489 - acc: 0.9850 - val_loss: 0.0417 - val_acc: 0.9869\n",
      "Epoch 8/100\n",
      "245/246 [============================>.] - ETA: 0s - loss: 0.0281 - acc: 0.9916\n",
      "Epoch 00008: val_acc improved from 0.98687 to 0.98776, saving model to D:\\Projects\\PycharmProjects\\Grad\\GTSRB\\data/trained_model\\vgg19_model.08-0.99.h5\n",
      "246/246 [==============================] - 14s 56ms/step - loss: 0.0280 - acc: 0.9916 - val_loss: 0.0433 - val_acc: 0.9878\n",
      "Epoch 9/100\n",
      "245/246 [============================>.] - ETA: 0s - loss: 0.0272 - acc: 0.9914- ETA: 5s - \n",
      "Epoch 00009: val_acc improved from 0.98776 to 0.99286, saving model to D:\\Projects\\PycharmProjects\\Grad\\GTSRB\\data/trained_model\\vgg19_model.09-0.99.h5\n",
      "246/246 [==============================] - 14s 56ms/step - loss: 0.0272 - acc: 0.9914 - val_loss: 0.0300 - val_acc: 0.9929\n",
      "Epoch 10/100\n",
      "245/246 [============================>.] - ETA: 0s - loss: 0.0201 - acc: 0.9929\n",
      "Epoch 00010: val_acc did not improve from 0.99286\n",
      "246/246 [==============================] - 13s 51ms/step - loss: 0.0200 - acc: 0.9930 - val_loss: 0.0357 - val_acc: 0.9903\n",
      "Epoch 11/100\n",
      "245/246 [============================>.] - ETA: 0s - loss: 0.0190 - acc: 0.9941\n",
      "Epoch 00011: val_acc did not improve from 0.99286\n",
      "246/246 [==============================] - 13s 52ms/step - loss: 0.0190 - acc: 0.9941 - val_loss: 0.0315 - val_acc: 0.9921\n",
      "Epoch 12/100\n",
      "245/246 [============================>.] - ETA: 0s - loss: 0.0127 - acc: 0.9959\n",
      "Epoch 00012: val_acc did not improve from 0.99286\n",
      "246/246 [==============================] - 13s 52ms/step - loss: 0.0127 - acc: 0.9959 - val_loss: 0.0415 - val_acc: 0.9899\n",
      "Epoch 13/100\n",
      "246/246 [==============================] - ETA: 0s - loss: 0.0098 - acc: 0.9969\n",
      "Epoch 00013: val_acc improved from 0.99286 to 0.99413, saving model to D:\\Projects\\PycharmProjects\\Grad\\GTSRB\\data/trained_model\\vgg19_model.13-0.99.h5\n",
      "246/246 [==============================] - 14s 56ms/step - loss: 0.0098 - acc: 0.9969 - val_loss: 0.0242 - val_acc: 0.9941\n",
      "Epoch 14/100\n",
      "246/246 [==============================] - ETA: 0s - loss: 0.0075 - acc: 0.9976\n",
      "Epoch 00014: val_acc improved from 0.99413 to 0.99426, saving model to D:\\Projects\\PycharmProjects\\Grad\\GTSRB\\data/trained_model\\vgg19_model.14-0.99.h5\n",
      "246/246 [==============================] - 14s 57ms/step - loss: 0.0075 - acc: 0.9976 - val_loss: 0.0268 - val_acc: 0.9943\n",
      "Epoch 15/100\n",
      "246/246 [==============================] - ETA: 0s - loss: 0.0088 - acc: 0.9969\n",
      "Epoch 00015: val_acc did not improve from 0.99426\n",
      "246/246 [==============================] - 13s 52ms/step - loss: 0.0088 - acc: 0.9969 - val_loss: 0.0265 - val_acc: 0.9936\n",
      "Epoch 16/100\n",
      "245/246 [============================>.] - ETA: 0s - loss: 0.0043 - acc: 0.9990\n",
      "Epoch 00016: val_acc did not improve from 0.99426\n",
      "246/246 [==============================] - 13s 52ms/step - loss: 0.0042 - acc: 0.9990 - val_loss: 0.0285 - val_acc: 0.9930\n",
      "Epoch 17/100\n",
      "245/246 [============================>.] - ETA: 0s - loss: 0.0054 - acc: 0.9984\n",
      "Epoch 00017: val_acc improved from 0.99426 to 0.99554, saving model to D:\\Projects\\PycharmProjects\\Grad\\GTSRB\\data/trained_model\\vgg19_model.17-1.00.h5\n",
      "246/246 [==============================] - 14s 56ms/step - loss: 0.0053 - acc: 0.9984 - val_loss: 0.0205 - val_acc: 0.9955\n",
      "Epoch 18/100\n",
      "246/246 [==============================] - ETA: 0s - loss: 0.0064 - acc: 0.9983\n",
      "Epoch 00018: val_acc did not improve from 0.99554\n",
      "246/246 [==============================] - 13s 52ms/step - loss: 0.0064 - acc: 0.9983 - val_loss: 0.0282 - val_acc: 0.9940\n",
      "Epoch 19/100\n",
      "246/246 [==============================] - ETA: 0s - loss: 0.0026 - acc: 0.9993- ETA: 3s - loss: 0.0030 - acc: 0.999 - ETA\n",
      "Epoch 00019: val_acc did not improve from 0.99554\n",
      "246/246 [==============================] - 13s 52ms/step - loss: 0.0026 - acc: 0.9993 - val_loss: 0.0224 - val_acc: 0.9950\n",
      "Epoch 20/100\n",
      "245/246 [============================>.] - ETA: 0s - loss: 0.0030 - acc: 0.9990\n",
      "Epoch 00020: val_acc did not improve from 0.99554\n",
      "246/246 [==============================] - 13s 52ms/step - loss: 0.0029 - acc: 0.9990 - val_loss: 0.0246 - val_acc: 0.9948\n",
      "Epoch 21/100\n",
      "245/246 [============================>.] - ETA: 0s - loss: 6.5738e-04 - acc: 0.9999- ETA: 5s - loss:  - ET\n",
      "Epoch 00021: val_acc improved from 0.99554 to 0.99668, saving model to D:\\Projects\\PycharmProjects\\Grad\\GTSRB\\data/trained_model\\vgg19_model.21-1.00.h5\n",
      "246/246 [==============================] - 14s 56ms/step - loss: 6.5487e-04 - acc: 0.9999 - val_loss: 0.0246 - val_acc: 0.9967\n",
      "Epoch 22/100\n",
      "245/246 [============================>.] - ETA: 0s - loss: 0.0054 - acc: 0.9984\n",
      "Epoch 00022: val_acc did not improve from 0.99668\n",
      "246/246 [==============================] - 13s 52ms/step - loss: 0.0053 - acc: 0.9984 - val_loss: 0.0283 - val_acc: 0.9945\n",
      "Epoch 23/100\n",
      "246/246 [==============================] - ETA: 0s - loss: 0.0016 - acc: 0.9995- ETA: 3s - loss: 0.0020 - - ETA: 2s -\n",
      "Epoch 00023: val_acc improved from 0.99668 to 0.99681, saving model to D:\\Projects\\PycharmProjects\\Grad\\GTSRB\\data/trained_model\\vgg19_model.23-1.00.h5\n",
      "246/246 [==============================] - 14s 56ms/step - loss: 0.0016 - acc: 0.9995 - val_loss: 0.0175 - val_acc: 0.9968\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch 24/100\n",
      "245/246 [============================>.] - ETA: 0s - loss: 0.0029 - acc: 0.9992\n",
      "Epoch 00024: val_acc did not improve from 0.99681\n",
      "246/246 [==============================] - 13s 52ms/step - loss: 0.0029 - acc: 0.9992 - val_loss: 0.0218 - val_acc: 0.9943\n",
      "Epoch 25/100\n",
      "246/246 [==============================] - ETA: 0s - loss: 0.0014 - acc: 0.9996    - ETA: 6s - loss: 6.1435e-04 - acc: 0. - ETA: 5s  - ETA:\n",
      "Epoch 00025: val_acc did not improve from 0.99681\n",
      "246/246 [==============================] - 13s 52ms/step - loss: 0.0014 - acc: 0.9996 - val_loss: 0.0267 - val_acc: 0.9934\n",
      "Epoch 26/100\n",
      "245/246 [============================>.] - ETA: 0s - loss: 0.0044 - acc: 0.9992- ETA: 0s - loss: 0.0044 - acc:\n",
      "Epoch 00026: val_acc did not improve from 0.99681\n",
      "246/246 [==============================] - 13s 53ms/step - loss: 0.0044 - acc: 0.9992 - val_loss: 0.0200 - val_acc: 0.9945\n",
      "Epoch 27/100\n",
      "245/246 [============================>.] - ETA: 0s - loss: 7.6452e-04 - acc: 0.9998- ETA: 2s - loss: 7.4\n",
      "Epoch 00027: val_acc did not improve from 0.99681\n",
      "246/246 [==============================] - 13s 53ms/step - loss: 7.6162e-04 - acc: 0.9998 - val_loss: 0.0194 - val_acc: 0.9960\n",
      "Epoch 28/100\n",
      "245/246 [============================>.] - ETA: 0s - loss: 1.1408e-04 - acc: 1.0000- ETA: 0s - loss: 1.1588e-04 - acc: 1.00Restoring model weights from the end of the best epoch.\n",
      "\n",
      "Epoch 00028: val_acc did not improve from 0.99681\n",
      "246/246 [==============================] - 13s 55ms/step - loss: 1.1389e-04 - acc: 1.0000 - val_loss: 0.0219 - val_acc: 0.9966\n",
      "Epoch 00028: early stopping\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "<tensorflow.python.keras.callbacks.History at 0x1fa51ebcda0>"
      ]
     },
     "execution_count": 14,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "batch_size = 128\n",
    "epochs = 100\n",
    "iterations = 246\n",
    "model.fit_generator(datagen.flow(train_x, train_y, batch_size=batch_size),\n",
    "                    steps_per_epoch=iterations,\n",
    "                    epochs=epochs,\n",
    "                    callbacks=cbks,\n",
    "                    validation_data=(test_x, test_y))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [],
   "source": [
    "def build_model_with_resnet50(freeze_wights=False):\n",
    "\n",
    "    Backbone = ResNet50(\n",
    "    include_top=False, weights='imagenet', pooling='avg')\n",
    "\n",
    "    if freeze_wights:\n",
    "        Backbone.trainable = False\n",
    "        for layer in Backbone.layers:\n",
    "            if \"BatchNormalization\" in layer.__class__.__name__:\n",
    "                layer.trainable = True\n",
    "            else:\n",
    "                layer.trainable = False\n",
    "    else:\n",
    "        Backbone.trainable = True\n",
    "\n",
    "    X = BatchNormalization()(Backbone.output)\n",
    "\n",
    "    X = Flatten(name=\"flatten\")(X)\n",
    "    \n",
    "    X = Dense(1024)(X)\n",
    "    X = Activation('relu')(X) \n",
    "\n",
    "    X = BatchNormalization()(X)\n",
    "\n",
    "    X = Dense(num_class)(X)\n",
    "    X = Activation('softmax')(X)\n",
    "\n",
    "    model = Model(Backbone.input, X)\n",
    "\n",
    "    return model"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {},
   "outputs": [],
   "source": [
    "def build_model_with_denseNet121(freeze_wights=False):\n",
    "\n",
    "    Backbone = densenet.DenseNet121(\n",
    "    include_top=False, weights='imagenet', pooling='avg')\n",
    "\n",
    "    if freeze_wights:\n",
    "        Backbone.trainable = False\n",
    "        for layer in Backbone.layers:\n",
    "            if \"BatchNormalization\" in layer.__class__.__name__:\n",
    "                layer.trainable = True\n",
    "            else:\n",
    "                layer.trainable = False\n",
    "    else:\n",
    "        Backbone.trainable = True\n",
    "\n",
    "    X = BatchNormalization()(Backbone.output)\n",
    "\n",
    "    X = Flatten(name=\"flatten\")(X)\n",
    "    \n",
    "    X = Dense(1024)(X)\n",
    "    X = Activation('relu')(X) \n",
    "\n",
    "    X = BatchNormalization()(X)\n",
    "\n",
    "    X = Dense(num_class)(X)\n",
    "    X = Activation('softmax')(X)\n",
    "\n",
    "    model = Model(Backbone.input, X)\n",
    "\n",
    "    return model"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {},
   "outputs": [],
   "source": [
    "ResNet_model = build_model_with_resnet50()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {},
   "outputs": [],
   "source": [
    "loss = 'categorical_crossentropy'\n",
    "optimizer = optimizers.Adam(lr=1e-4)\n",
    "ResNet_model.compile(loss=loss , optimizer= optimizer, metrics=['accuracy'])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {},
   "outputs": [],
   "source": [
    "# sgd = optimizers.SGD(lr=0.01, decay=1e-4, momentum=0.9, nesterov=True)\n",
    "optimizer = optimizers.Adam(lr=1e-4)\n",
    "ResNet_model.compile(loss='categorical_crossentropy', optimizer=optimizer, metrics=['acc'])\n",
    "\n",
    "# change_lr = LearningRateScheduler(scheduler)\n",
    "reduce_lr = ReduceLROnPlateau(monitor='val_loss', factor=0.1, patience=3, verbose=1, min_delta=1e-6)\n",
    "\n",
    "save_dir = os.path.join(os.getcwd(), '../../data/trained_model')\n",
    "model_name = 'densenet121_model.{epoch:02d}-{val_acc:.2f}.h5'\n",
    "if not os.path.isdir(save_dir):\n",
    "    os.makedirs(save_dir)\n",
    "filepath = os.path.join(save_dir, model_name)\n",
    "\n",
    "checkpoint = ModelCheckpoint(filepath=filepath,\n",
    "                             monitor='val_acc',\n",
    "                             verbose=1,\n",
    "                             save_best_only=True)\n",
    "\n",
    "#creating early stopping to prevent model from overfitting \n",
    "early_stopping = EarlyStopping(monitor=\"val_acc\", min_delta=0,\n",
    "                                                  patience=8, verbose=1, \n",
    "                                                  mode=\"auto\", baseline=None, \n",
    "                                                  restore_best_weights=True)\n",
    "\n",
    "cbks = [early_stopping, checkpoint, reduce_lr]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {},
   "outputs": [],
   "source": [
    "train_data_gen = ImageDataGenerator(rotation_range=40,\n",
    "    zoom_range=0.2,\n",
    "    width_shift_range=0.2,\n",
    "    height_shift_range=0.2,\n",
    "    shear_range=0.2,\n",
    "    horizontal_flip=True,\n",
    "    fill_mode=\"nearest\")\n",
    "valid_data_gen = ImageDataGenerator()\n",
    "train_data_gen.fit(train_x)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch 1/200\n",
      "251/251 [==============================] - ETA: 0s - loss: 3.0595 - acc: 0.2288\n",
      "Epoch 00001: val_acc improved from -inf to 0.00880, saving model to D:\\Projects\\PycharmProjects\\Grad_\\runners\\train\\../../data/trained_model\\densenet121_model.01-0.01.h5\n",
      "251/251 [==============================] - 14s 56ms/step - loss: 3.0595 - acc: 0.2288 - val_loss: 20.8584 - val_acc: 0.0088\n",
      "Epoch 2/200\n",
      "251/251 [==============================] - ETA: 0s - loss: 2.0894 - acc: 0.4060\n",
      "Epoch 00002: val_acc improved from 0.00880 to 0.03201, saving model to D:\\Projects\\PycharmProjects\\Grad_\\runners\\train\\../../data/trained_model\\densenet121_model.02-0.03.h5\n",
      "251/251 [==============================] - 13s 52ms/step - loss: 2.0894 - acc: 0.4060 - val_loss: 17.1500 - val_acc: 0.0320\n",
      "Epoch 3/200\n",
      "251/251 [==============================] - ETA: 0s - loss: 1.6794 - acc: 0.4885\n",
      "Epoch 00003: val_acc improved from 0.03201 to 0.22813, saving model to D:\\Projects\\PycharmProjects\\Grad_\\runners\\train\\../../data/trained_model\\densenet121_model.03-0.23.h5\n",
      "251/251 [==============================] - 13s 52ms/step - loss: 1.6794 - acc: 0.4885 - val_loss: 6.0011 - val_acc: 0.2281\n",
      "Epoch 4/200\n",
      "250/251 [============================>.] - ETA: 0s - loss: 1.4391 - acc: 0.5461\n",
      "Epoch 00004: val_acc improved from 0.22813 to 0.47131, saving model to D:\\Projects\\PycharmProjects\\Grad_\\runners\\train\\../../data/trained_model\\densenet121_model.04-0.47.h5\n",
      "251/251 [==============================] - 13s 54ms/step - loss: 1.4386 - acc: 0.5459 - val_loss: 2.0404 - val_acc: 0.4713\n",
      "Epoch 5/200\n",
      "251/251 [==============================] - ETA: 0s - loss: 1.2876 - acc: 0.5904\n",
      "Epoch 00005: val_acc improved from 0.47131 to 0.68796, saving model to D:\\Projects\\PycharmProjects\\Grad_\\runners\\train\\../../data/trained_model\\densenet121_model.05-0.69.h5\n",
      "251/251 [==============================] - 13s 52ms/step - loss: 1.2876 - acc: 0.5904 - val_loss: 0.9747 - val_acc: 0.6880\n",
      "Epoch 6/200\n",
      "251/251 [==============================] - ETA: 0s - loss: 1.1069 - acc: 0.6379\n",
      "Epoch 00006: val_acc improved from 0.68796 to 0.72660, saving model to D:\\Projects\\PycharmProjects\\Grad_\\runners\\train\\../../data/trained_model\\densenet121_model.06-0.73.h5\n",
      "251/251 [==============================] - 13s 52ms/step - loss: 1.1069 - acc: 0.6379 - val_loss: 0.8214 - val_acc: 0.7266\n",
      "Epoch 7/200\n",
      "251/251 [==============================] - ETA: 0s - loss: 0.9714 - acc: 0.6826\n",
      "Epoch 00007: val_acc improved from 0.72660 to 0.76078, saving model to D:\\Projects\\PycharmProjects\\Grad_\\runners\\train\\../../data/trained_model\\densenet121_model.07-0.76.h5\n",
      "251/251 [==============================] - 13s 52ms/step - loss: 0.9714 - acc: 0.6826 - val_loss: 0.6992 - val_acc: 0.7608\n",
      "Epoch 8/200\n",
      "251/251 [==============================] - ETA: 0s - loss: 0.8771 - acc: 0.7089\n",
      "Epoch 00008: val_acc improved from 0.76078 to 0.78628, saving model to D:\\Projects\\PycharmProjects\\Grad_\\runners\\train\\../../data/trained_model\\densenet121_model.08-0.79.h5\n",
      "251/251 [==============================] - 13s 52ms/step - loss: 0.8771 - acc: 0.7089 - val_loss: 0.6417 - val_acc: 0.7863\n",
      "Epoch 9/200\n",
      "250/251 [============================>.] - ETA: 0s - loss: 0.7985 - acc: 0.7270\n",
      "Epoch 00009: val_acc improved from 0.78628 to 0.81905, saving model to D:\\Projects\\PycharmProjects\\Grad_\\runners\\train\\../../data/trained_model\\densenet121_model.09-0.82.h5\n",
      "251/251 [==============================] - 13s 53ms/step - loss: 0.7967 - acc: 0.7276 - val_loss: 0.5542 - val_acc: 0.8191\n",
      "Epoch 10/200\n",
      "251/251 [==============================] - ETA: 0s - loss: 0.7982 - acc: 0.7379\n",
      "Epoch 00010: val_acc improved from 0.81905 to 0.83448, saving model to D:\\Projects\\PycharmProjects\\Grad_\\runners\\train\\../../data/trained_model\\densenet121_model.10-0.83.h5\n",
      "251/251 [==============================] - 13s 52ms/step - loss: 0.7982 - acc: 0.7379 - val_loss: 0.4767 - val_acc: 0.8345\n",
      "Epoch 11/200\n",
      "251/251 [==============================] - ETA: 0s - loss: 0.7127 - acc: 0.7571\n",
      "Epoch 00011: val_acc did not improve from 0.83448\n",
      "251/251 [==============================] - 12s 49ms/step - loss: 0.7127 - acc: 0.7571 - val_loss: 0.4863 - val_acc: 0.8327\n",
      "Epoch 12/200\n",
      "251/251 [==============================] - ETA: 0s - loss: 0.6440 - acc: 0.7780\n",
      "Epoch 00012: val_acc improved from 0.83448 to 0.84672, saving model to D:\\Projects\\PycharmProjects\\Grad_\\runners\\train\\../../data/trained_model\\densenet121_model.12-0.85.h5\n",
      "251/251 [==============================] - 13s 52ms/step - loss: 0.6440 - acc: 0.7780 - val_loss: 0.4703 - val_acc: 0.8467\n",
      "Epoch 13/200\n",
      "251/251 [==============================] - ETA: 0s - loss: 0.5882 - acc: 0.8009\n",
      "Epoch 00013: val_acc improved from 0.84672 to 0.86572, saving model to D:\\Projects\\PycharmProjects\\Grad_\\runners\\train\\../../data/trained_model\\densenet121_model.13-0.87.h5\n",
      "251/251 [==============================] - 13s 52ms/step - loss: 0.5882 - acc: 0.8009 - val_loss: 0.3843 - val_acc: 0.8657\n",
      "Epoch 14/200\n",
      "251/251 [==============================] - ETA: 0s - loss: 0.5874 - acc: 0.8002\n",
      "Epoch 00014: val_acc improved from 0.86572 to 0.89008, saving model to D:\\Projects\\PycharmProjects\\Grad_\\runners\\train\\../../data/trained_model\\densenet121_model.14-0.89.h5\n",
      "251/251 [==============================] - 13s 52ms/step - loss: 0.5874 - acc: 0.8002 - val_loss: 0.3219 - val_acc: 0.8901\n",
      "Epoch 15/200\n",
      "251/251 [==============================] - ETA: 0s - loss: 0.5176 - acc: 0.8238\n",
      "Epoch 00015: val_acc did not improve from 0.89008\n",
      "251/251 [==============================] - 12s 49ms/step - loss: 0.5176 - acc: 0.8238 - val_loss: 0.3411 - val_acc: 0.8810\n",
      "Epoch 16/200\n",
      "251/251 [==============================] - ETA: 0s - loss: 0.4973 - acc: 0.8359\n",
      "Epoch 00016: val_acc improved from 0.89008 to 0.89352, saving model to D:\\Projects\\PycharmProjects\\Grad_\\runners\\train\\../../data/trained_model\\densenet121_model.16-0.89.h5\n",
      "251/251 [==============================] - 13s 52ms/step - loss: 0.4973 - acc: 0.8359 - val_loss: 0.3026 - val_acc: 0.8935\n",
      "Epoch 17/200\n",
      "251/251 [==============================] - ETA: 0s - loss: 0.4763 - acc: 0.8393\n",
      "Epoch 00017: val_acc improved from 0.89352 to 0.89416, saving model to D:\\Projects\\PycharmProjects\\Grad_\\runners\\train\\../../data/trained_model\\densenet121_model.17-0.89.h5\n",
      "251/251 [==============================] - 13s 52ms/step - loss: 0.4763 - acc: 0.8393 - val_loss: 0.3010 - val_acc: 0.8942\n",
      "Epoch 18/200\n",
      "251/251 [==============================] - ETA: 0s - loss: 0.4629 - acc: 0.8428\n",
      "Epoch 00018: val_acc improved from 0.89416 to 0.91469, saving model to D:\\Projects\\PycharmProjects\\Grad_\\runners\\train\\../../data/trained_model\\densenet121_model.18-0.91.h5\n",
      "251/251 [==============================] - 13s 52ms/step - loss: 0.4629 - acc: 0.8428 - val_loss: 0.2556 - val_acc: 0.9147\n",
      "Epoch 19/200\n",
      "251/251 [==============================] - ETA: 0s - loss: 0.4265 - acc: 0.8572\n",
      "Epoch 00019: val_acc did not improve from 0.91469\n",
      "251/251 [==============================] - 13s 50ms/step - loss: 0.4265 - acc: 0.8572 - val_loss: 0.2787 - val_acc: 0.9036\n",
      "Epoch 20/200\n",
      "251/251 [==============================] - ETA: 0s - loss: 0.4025 - acc: 0.8664\n",
      "Epoch 00020: val_acc improved from 0.91469 to 0.91954, saving model to D:\\Projects\\PycharmProjects\\Grad_\\runners\\train\\../../data/trained_model\\densenet121_model.20-0.92.h5\n",
      "251/251 [==============================] - 13s 53ms/step - loss: 0.4025 - acc: 0.8664 - val_loss: 0.2266 - val_acc: 0.9195\n",
      "Epoch 21/200\n",
      " 32/251 [==>...........................] - ETA: 8s - loss: 0.4014 - acc: 0.8750"
     ]
    }
   ],
   "source": [
    "batch_size = 32\n",
    "epochs = 200\n",
    "iterations = 251\n",
    "ResNet_model.fit(train_data_gen.flow(train_x, train_y, batch_size=batch_size),\n",
    "                    steps_per_epoch=iterations,\n",
    "                    epochs=epochs,\n",
    "                    callbacks=cbks,\n",
    "                    validation_data=valid_data_gen.flow(test_x, test_y, batch_size=batch_size))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "X_data = []\n",
    "Y_data = []\n",
    "\n",
    "input_shape=(32,32,3)#3通道图像数据\n",
    "num_class = 43#数据类别数目\n",
    "\n",
    "path='../../data/data_set/GTSRB/Final_Training/Images'\n",
    "for file in tqdm(os.listdir(path)):\n",
    "    lab=int(file)\n",
    "    i = 0\n",
    "    for photo_file in os.listdir(path+'/'+file):\n",
    "        if i > 50:\n",
    "            continue\n",
    "        i = i + 1\n",
    "        if photo_file[0]=='G':\n",
    "            continue\n",
    "        photo_file_path=path+'/'+file+'/'+photo_file\n",
    "        img = cv2.imread(photo_file_path,1)\n",
    "        img = cv2.resize(img,(input_shape[0],input_shape[1]))\n",
    "        X_data.append(img)\n",
    "        Y_data.append(lab)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "lb=preprocessing.LabelBinarizer().fit(np.array(range(num_class)))#对标签进行ont_hot编码\n",
    "Y_data=lb.transform(Y_data)#因为是多分类任务，必须进行编码处理\n",
    "# test_y=lb.transform(test_y)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [],
   "source": [
    "model = load_model('../../data/neural_networks/GTSRB_ResNet50.h5')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {},
   "outputs": [],
   "source": [
    "X_data_perdict = model(X_data)\n",
    "X_data_perdict_arg = np.argmax(X_data_perdict, axis=1)\n",
    "Y_data_arg = np.argmax(Y_data, axis=1)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [],
   "source": [
    "from sklearn.metrics import classification_report\n",
    "report = classification_report(Y_data_arg,X_data_perdict_arg)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "              precision    recall  f1-score   support\n",
      "\n",
      "           0       1.00      0.96      0.98        51\n",
      "           1       0.93      0.84      0.89        51\n",
      "           2       0.69      0.96      0.80        51\n",
      "           3       0.71      0.98      0.83        51\n",
      "           4       1.00      0.90      0.95        51\n",
      "           5       0.72      0.96      0.82        51\n",
      "           6       0.91      1.00      0.95        51\n",
      "           7       0.94      0.65      0.77        51\n",
      "           8       0.90      0.90      0.90        51\n",
      "           9       0.87      0.90      0.88        51\n",
      "          10       0.93      1.00      0.96        51\n",
      "          11       0.70      0.94      0.80        51\n",
      "          12       0.96      0.98      0.97        51\n",
      "          13       0.91      1.00      0.95        51\n",
      "          14       1.00      1.00      1.00        51\n",
      "          15       0.94      0.90      0.92        51\n",
      "          16       1.00      0.65      0.79        51\n",
      "          17       1.00      1.00      1.00        51\n",
      "          18       0.93      0.53      0.67        51\n",
      "          19       0.82      0.92      0.87        51\n",
      "          20       0.92      0.92      0.92        51\n",
      "          21       0.92      0.86      0.89        51\n",
      "          22       0.96      1.00      0.98        51\n",
      "          23       0.77      0.47      0.59        51\n",
      "          24       0.95      0.73      0.82        51\n",
      "          25       0.72      0.96      0.82        51\n",
      "          26       0.71      0.96      0.82        51\n",
      "          27       0.97      0.73      0.83        51\n",
      "          28       0.86      0.98      0.92        51\n",
      "          29       0.84      0.96      0.90        51\n",
      "          30       0.98      0.84      0.91        51\n",
      "          31       1.00      0.86      0.93        51\n",
      "          32       0.80      0.80      0.80        51\n",
      "          33       0.74      0.96      0.84        51\n",
      "          34       1.00      0.96      0.98        51\n",
      "          35       1.00      1.00      1.00        51\n",
      "          36       1.00      0.94      0.97        51\n",
      "          37       0.94      1.00      0.97        51\n",
      "          38       0.72      1.00      0.84        51\n",
      "          39       1.00      0.57      0.72        51\n",
      "          40       0.97      0.69      0.80        51\n",
      "          41       1.00      0.75      0.85        51\n",
      "          42       0.96      0.98      0.97        51\n",
      "\n",
      "    accuracy                           0.88      2193\n",
      "   macro avg       0.90      0.88      0.88      2193\n",
      "weighted avg       0.90      0.88      0.88      2193\n",
      "\n"
     ]
    }
   ],
   "source": [
    "print(report)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "json_config = model.to_json()\n",
    "json_config"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "with open('VGG16_model.json', 'w') as json_file:\n",
    "    json_file.write(json_config)"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "grad",
   "language": "python",
   "name": "grad"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.15"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
