{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {},
   "outputs": [],
   "source": [
    "import tensorflow as tf\n",
    "from unicodedata import normalize\n",
    "from tensorflow import keras\n",
    "import os\n",
    "from tensorflow.keras.models import Sequential, load_model\n",
    "from tensorflow.keras.optimizers import RMSprop\n",
    "from collections import Counter\n",
    "import numpy as np\n",
    "import pandas as pd\n",
    "from matplotlib import pyplot as plt\n",
    "from tensorflow.keras.layers import Conv2D, BatchNormalization, Activation, MaxPool2D, Dropout, Flatten, Dense,Input,Reshape,MaxPooling2D\n",
    "from tensorflow.keras import Model,regularizers\n",
    "from tensorflow.keras.datasets import cifar10\n",
    "from tensorflow.keras.callbacks import LearningRateScheduler, ModelCheckpoint, EarlyStopping\n",
    "from tensorflow.keras.preprocessing.image import ImageDataGenerator\n",
    "from tensorflow.keras.initializers import he_normal\n",
    "from tensorflow.keras import optimizers\n",
    "from sklearn.preprocessing import MinMaxScaler\n",
    "from sklearn.model_selection import train_test_split\n",
    "from sklearn import preprocessing\n",
    "from sklearn.metrics import accuracy_score\n",
    "from tqdm import tqdm\n",
    "import cv2"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|█████████████████████████████████████████████████████████████████████████████████| 43/43 [00:00<00:00, 228.13it/s]\n"
     ]
    }
   ],
   "source": [
    "X_data = []\n",
    "Y_data = []\n",
    "\n",
    "input_shape=(32,32,3)#3通道图像数据\n",
    "num_class = 43#数据类别数目\n",
    "\n",
    "path='../../data/data_set/GTSRB/Final_Training/Images'\n",
    "for file in tqdm(os.listdir(path)):\n",
    "    lab=int(file)\n",
    "    i = 0\n",
    "    for photo_file in os.listdir(path+'/'+file):\n",
    "        if i > 50:\n",
    "            continue\n",
    "        i = i + 1\n",
    "        if photo_file[0]=='G':\n",
    "            continue\n",
    "        photo_file_path=path+'/'+file+'/'+photo_file\n",
    "        img = cv2.imread(photo_file_path,1)\n",
    "        img = cv2.resize(img,(input_shape[0],input_shape[1]))\n",
    "        X_data.append(img)\n",
    "        Y_data.append(lab)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "2193\n",
      "Counter({0: 51, 1: 51, 2: 51, 3: 51, 4: 51, 5: 51, 6: 51, 7: 51, 8: 51, 9: 51, 10: 51, 11: 51, 12: 51, 13: 51, 14: 51, 15: 51, 16: 51, 17: 51, 18: 51, 19: 51, 20: 51, 21: 51, 22: 51, 23: 51, 24: 51, 25: 51, 26: 51, 27: 51, 28: 51, 29: 51, 30: 51, 31: 51, 32: 51, 33: 51, 34: 51, 35: 51, 36: 51, 37: 51, 38: 51, 39: 51, 40: 51, 41: 51, 42: 51})\n"
     ]
    }
   ],
   "source": [
    "print(len(X_data))\n",
    "print(Counter(Y_data))\n",
    "\n",
    "X_data=np.array(X_data)\n",
    "X_data=X_data/255.0\n",
    "Y_data=np.array(Y_data)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {},
   "outputs": [],
   "source": [
    "lb=preprocessing.LabelBinarizer().fit(np.array(range(num_class)))#对标签进行ont_hot编码\n",
    "Y_data=lb.transform(Y_data)#因为是多分类任务，必须进行编码处理\n",
    "# test_y=lb.transform(test_y)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [],
   "source": [
    "# 对训练集进行切割，然后进行训练\n",
    "train_x,test_x,train_y,test_y = train_test_split(X_data,Y_data,test_size=0.2)\n",
    "\n",
    "lb=preprocessing.LabelBinarizer().fit(np.array(range(num_class)))#对标签进行ont_hot编码\n",
    "train_y=lb.transform(train_y)#因为是多分类任务，必须进行编码处理\n",
    "test_y=lb.transform(test_y)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "array([[[[0.24705882, 0.18431373, 0.16078431],\n",
       "         [0.25882353, 0.24313725, 0.20392157],\n",
       "         [0.2745098 , 0.22352941, 0.18823529],\n",
       "         ...,\n",
       "         [0.21568627, 0.18039216, 0.16862745],\n",
       "         [0.22745098, 0.18823529, 0.17254902],\n",
       "         [0.21960784, 0.18431373, 0.16862745]],\n",
       "\n",
       "        [[0.21568627, 0.16078431, 0.14117647],\n",
       "         [0.19607843, 0.14901961, 0.13333333],\n",
       "         [0.20392157, 0.16470588, 0.16470588],\n",
       "         ...,\n",
       "         [0.21176471, 0.17647059, 0.16862745],\n",
       "         [0.22745098, 0.18823529, 0.18039216],\n",
       "         [0.24705882, 0.19607843, 0.17647059]],\n",
       "\n",
       "        [[0.19215686, 0.15686275, 0.14509804],\n",
       "         [0.18431373, 0.15686275, 0.14509804],\n",
       "         [0.2       , 0.16862745, 0.17254902],\n",
       "         ...,\n",
       "         [0.20392157, 0.17647059, 0.16862745],\n",
       "         [0.21176471, 0.18039216, 0.17254902],\n",
       "         [0.21960784, 0.18823529, 0.18039216]],\n",
       "\n",
       "        ...,\n",
       "\n",
       "        [[0.16470588, 0.14117647, 0.1372549 ],\n",
       "         [0.15686275, 0.14117647, 0.13333333],\n",
       "         [0.18039216, 0.16078431, 0.14509804],\n",
       "         ...,\n",
       "         [0.16078431, 0.1372549 , 0.1372549 ],\n",
       "         [0.16862745, 0.14509804, 0.14509804],\n",
       "         [0.18039216, 0.15294118, 0.14117647]],\n",
       "\n",
       "        [[0.27843137, 0.19607843, 0.18823529],\n",
       "         [0.19215686, 0.14901961, 0.14117647],\n",
       "         [0.16470588, 0.12941176, 0.1254902 ],\n",
       "         ...,\n",
       "         [0.14509804, 0.1254902 , 0.1254902 ],\n",
       "         [0.16078431, 0.12941176, 0.1254902 ],\n",
       "         [0.17647059, 0.1372549 , 0.13333333]],\n",
       "\n",
       "        [[0.17647059, 0.14509804, 0.1372549 ],\n",
       "         [0.17647059, 0.14901961, 0.14509804],\n",
       "         [0.15686275, 0.12941176, 0.12941176],\n",
       "         ...,\n",
       "         [0.15686275, 0.12941176, 0.1254902 ],\n",
       "         [0.15294118, 0.1254902 , 0.12156863],\n",
       "         [0.15686275, 0.1254902 , 0.12156863]]],\n",
       "\n",
       "\n",
       "       [[[0.24705882, 0.24313725, 0.23529412],\n",
       "         [0.23921569, 0.23921569, 0.23137255],\n",
       "         [0.24705882, 0.23137255, 0.22745098],\n",
       "         ...,\n",
       "         [0.15686275, 0.14901961, 0.15294118],\n",
       "         [0.16862745, 0.15686275, 0.16078431],\n",
       "         [0.17254902, 0.16078431, 0.16470588]],\n",
       "\n",
       "        [[0.2627451 , 0.23921569, 0.21568627],\n",
       "         [0.23529412, 0.21568627, 0.20392157],\n",
       "         [0.23921569, 0.21568627, 0.21176471],\n",
       "         ...,\n",
       "         [0.16078431, 0.15294118, 0.15686275],\n",
       "         [0.16862745, 0.16078431, 0.16470588],\n",
       "         [0.16862745, 0.16078431, 0.16862745]],\n",
       "\n",
       "        [[0.27058824, 0.23529412, 0.20392157],\n",
       "         [0.23137255, 0.2       , 0.18823529],\n",
       "         [0.21568627, 0.19215686, 0.18823529],\n",
       "         ...,\n",
       "         [0.16078431, 0.15294118, 0.16078431],\n",
       "         [0.16470588, 0.15686275, 0.16862745],\n",
       "         [0.16470588, 0.16078431, 0.16862745]],\n",
       "\n",
       "        ...,\n",
       "\n",
       "        [[0.12941176, 0.11372549, 0.12156863],\n",
       "         [0.12941176, 0.11372549, 0.11764706],\n",
       "         [0.12941176, 0.11372549, 0.11764706],\n",
       "         ...,\n",
       "         [0.11764706, 0.10196078, 0.10196078],\n",
       "         [0.10980392, 0.09803922, 0.09803922],\n",
       "         [0.10588235, 0.09803922, 0.10196078]],\n",
       "\n",
       "        [[0.12156863, 0.10980392, 0.11372549],\n",
       "         [0.11764706, 0.10588235, 0.10980392],\n",
       "         [0.12156863, 0.10588235, 0.11372549],\n",
       "         ...,\n",
       "         [0.10980392, 0.09803922, 0.09803922],\n",
       "         [0.10588235, 0.09411765, 0.10196078],\n",
       "         [0.09803922, 0.09411765, 0.09803922]],\n",
       "\n",
       "        [[0.11764706, 0.10980392, 0.10980392],\n",
       "         [0.11372549, 0.10196078, 0.10980392],\n",
       "         [0.11764706, 0.10588235, 0.10980392],\n",
       "         ...,\n",
       "         [0.09803922, 0.09019608, 0.09019608],\n",
       "         [0.10196078, 0.09411765, 0.09411765],\n",
       "         [0.10196078, 0.09411765, 0.09411765]]],\n",
       "\n",
       "\n",
       "       [[[1.        , 1.        , 1.        ],\n",
       "         [1.        , 1.        , 1.        ],\n",
       "         [1.        , 1.        , 1.        ],\n",
       "         ...,\n",
       "         [0.88627451, 0.89411765, 0.8627451 ],\n",
       "         [0.84313725, 0.85882353, 0.84313725],\n",
       "         [0.92156863, 0.92156863, 0.92941176]],\n",
       "\n",
       "        [[1.        , 1.        , 1.        ],\n",
       "         [1.        , 1.        , 1.        ],\n",
       "         [1.        , 1.        , 1.        ],\n",
       "         ...,\n",
       "         [0.81960784, 0.83137255, 0.79215686],\n",
       "         [0.82352941, 0.85098039, 0.82745098],\n",
       "         [0.96862745, 0.96862745, 0.96470588]],\n",
       "\n",
       "        [[1.        , 1.        , 1.        ],\n",
       "         [1.        , 1.        , 1.        ],\n",
       "         [1.        , 1.        , 1.        ],\n",
       "         ...,\n",
       "         [0.81568627, 0.81568627, 0.76470588],\n",
       "         [0.80392157, 0.83137255, 0.8       ],\n",
       "         [0.92156863, 0.92156863, 0.90196078]],\n",
       "\n",
       "        ...,\n",
       "\n",
       "        [[1.        , 1.        , 1.        ],\n",
       "         [1.        , 1.        , 1.        ],\n",
       "         [1.        , 1.        , 1.        ],\n",
       "         ...,\n",
       "         [0.56470588, 0.60392157, 0.56078431],\n",
       "         [0.54901961, 0.61176471, 0.61176471],\n",
       "         [0.65490196, 0.66666667, 0.70588235]],\n",
       "\n",
       "        [[1.        , 1.        , 1.        ],\n",
       "         [1.        , 1.        , 1.        ],\n",
       "         [1.        , 1.        , 1.        ],\n",
       "         ...,\n",
       "         [0.58823529, 0.61568627, 0.58039216],\n",
       "         [0.57647059, 0.62352941, 0.61176471],\n",
       "         [0.62352941, 0.64705882, 0.6627451 ]],\n",
       "\n",
       "        [[1.        , 1.        , 1.        ],\n",
       "         [1.        , 1.        , 1.        ],\n",
       "         [1.        , 1.        , 1.        ],\n",
       "         ...,\n",
       "         [0.6745098 , 0.68627451, 0.65882353],\n",
       "         [0.58823529, 0.63529412, 0.61568627],\n",
       "         [0.61960784, 0.63529412, 0.62745098]]],\n",
       "\n",
       "\n",
       "       ...,\n",
       "\n",
       "\n",
       "       [[[0.1372549 , 0.11764706, 0.10980392],\n",
       "         [0.1372549 , 0.10980392, 0.11372549],\n",
       "         [0.13333333, 0.10980392, 0.12941176],\n",
       "         ...,\n",
       "         [0.10980392, 0.11764706, 0.09411765],\n",
       "         [0.05882353, 0.05882353, 0.0627451 ],\n",
       "         [0.05882353, 0.05098039, 0.05098039]],\n",
       "\n",
       "        [[0.12941176, 0.11372549, 0.11764706],\n",
       "         [0.12941176, 0.11372549, 0.12156863],\n",
       "         [0.1254902 , 0.11372549, 0.11764706],\n",
       "         ...,\n",
       "         [0.09019608, 0.09019608, 0.07843137],\n",
       "         [0.07843137, 0.0745098 , 0.07058824],\n",
       "         [0.0627451 , 0.05490196, 0.05098039]],\n",
       "\n",
       "        [[0.11764706, 0.10588235, 0.12156863],\n",
       "         [0.10588235, 0.09803922, 0.10588235],\n",
       "         [0.12156863, 0.10588235, 0.09803922],\n",
       "         ...,\n",
       "         [0.08627451, 0.08235294, 0.07843137],\n",
       "         [0.08627451, 0.08235294, 0.07843137],\n",
       "         [0.06666667, 0.05882353, 0.05882353]],\n",
       "\n",
       "        ...,\n",
       "\n",
       "        [[0.09411765, 0.09411765, 0.10196078],\n",
       "         [0.10980392, 0.11372549, 0.11764706],\n",
       "         [0.10588235, 0.10196078, 0.10588235],\n",
       "         ...,\n",
       "         [0.14117647, 0.13333333, 0.12941176],\n",
       "         [0.10196078, 0.1254902 , 0.12156863],\n",
       "         [0.09803922, 0.10588235, 0.10980392]],\n",
       "\n",
       "        [[0.10196078, 0.09803922, 0.1254902 ],\n",
       "         [0.11764706, 0.11372549, 0.12156863],\n",
       "         [0.1254902 , 0.11372549, 0.11764706],\n",
       "         ...,\n",
       "         [0.10980392, 0.10588235, 0.10588235],\n",
       "         [0.09411765, 0.11372549, 0.10980392],\n",
       "         [0.10588235, 0.10588235, 0.10196078]],\n",
       "\n",
       "        [[0.11372549, 0.10196078, 0.12156863],\n",
       "         [0.12941176, 0.12156863, 0.14117647],\n",
       "         [0.11764706, 0.10980392, 0.1254902 ],\n",
       "         ...,\n",
       "         [0.10980392, 0.10980392, 0.10980392],\n",
       "         [0.10196078, 0.10588235, 0.10196078],\n",
       "         [0.11372549, 0.10588235, 0.10980392]]],\n",
       "\n",
       "\n",
       "       [[[0.07058824, 0.05882353, 0.05882353],\n",
       "         [0.07058824, 0.05882353, 0.0627451 ],\n",
       "         [0.07058824, 0.05882353, 0.0627451 ],\n",
       "         ...,\n",
       "         [0.07058824, 0.05882353, 0.0627451 ],\n",
       "         [0.06666667, 0.05490196, 0.05882353],\n",
       "         [0.07058824, 0.05882353, 0.05490196]],\n",
       "\n",
       "        [[0.07058824, 0.05490196, 0.05882353],\n",
       "         [0.07058824, 0.05490196, 0.05882353],\n",
       "         [0.0627451 , 0.05490196, 0.05882353],\n",
       "         ...,\n",
       "         [0.05882353, 0.05098039, 0.05098039],\n",
       "         [0.07058824, 0.05882353, 0.05882353],\n",
       "         [0.07058824, 0.05882353, 0.0627451 ]],\n",
       "\n",
       "        [[0.07058824, 0.05490196, 0.05882353],\n",
       "         [0.06666667, 0.05490196, 0.05882353],\n",
       "         [0.0627451 , 0.05490196, 0.05882353],\n",
       "         ...,\n",
       "         [0.06666667, 0.05490196, 0.05882353],\n",
       "         [0.07058824, 0.05490196, 0.05882353],\n",
       "         [0.07058824, 0.05882353, 0.0627451 ]],\n",
       "\n",
       "        ...,\n",
       "\n",
       "        [[0.12941176, 0.07843137, 0.07843137],\n",
       "         [0.13333333, 0.08235294, 0.0745098 ],\n",
       "         [0.15294118, 0.08627451, 0.08235294],\n",
       "         ...,\n",
       "         [0.12156863, 0.09411765, 0.09019608],\n",
       "         [0.09803922, 0.09019608, 0.0745098 ],\n",
       "         [0.0627451 , 0.05882353, 0.0627451 ]],\n",
       "\n",
       "        [[0.21176471, 0.15686275, 0.1372549 ],\n",
       "         [0.21568627, 0.15294118, 0.1372549 ],\n",
       "         [0.22352941, 0.1372549 , 0.12156863],\n",
       "         ...,\n",
       "         [0.12156863, 0.09019608, 0.07843137],\n",
       "         [0.10196078, 0.09411765, 0.07843137],\n",
       "         [0.06666667, 0.05882353, 0.0627451 ]],\n",
       "\n",
       "        [[0.21176471, 0.16470588, 0.14509804],\n",
       "         [0.20784314, 0.16078431, 0.1372549 ],\n",
       "         [0.22745098, 0.16078431, 0.1372549 ],\n",
       "         ...,\n",
       "         [0.12156863, 0.08627451, 0.06666667],\n",
       "         [0.10196078, 0.09803922, 0.0745098 ],\n",
       "         [0.06666667, 0.0627451 , 0.0627451 ]]],\n",
       "\n",
       "\n",
       "       [[[0.49411765, 0.55686275, 0.81960784],\n",
       "         [0.40784314, 0.55294118, 0.69803922],\n",
       "         [0.35294118, 0.51372549, 0.6       ],\n",
       "         ...,\n",
       "         [1.        , 1.        , 1.        ],\n",
       "         [1.        , 1.        , 1.        ],\n",
       "         [1.        , 1.        , 1.        ]],\n",
       "\n",
       "        [[0.45098039, 0.58039216, 0.74117647],\n",
       "         [0.43137255, 0.61568627, 0.78431373],\n",
       "         [0.41960784, 0.61960784, 0.79607843],\n",
       "         ...,\n",
       "         [0.98039216, 1.        , 1.        ],\n",
       "         [0.97647059, 0.99607843, 1.        ],\n",
       "         [0.98431373, 1.        , 1.        ]],\n",
       "\n",
       "        [[0.48627451, 0.55294118, 0.62745098],\n",
       "         [0.49019608, 0.56078431, 0.7254902 ],\n",
       "         [0.44705882, 0.58039216, 0.79215686],\n",
       "         ...,\n",
       "         [0.91372549, 1.        , 1.        ],\n",
       "         [0.88235294, 0.98039216, 1.        ],\n",
       "         [0.79607843, 0.87843137, 0.90980392]],\n",
       "\n",
       "        ...,\n",
       "\n",
       "        [[0.28627451, 0.36470588, 0.4627451 ],\n",
       "         [0.31372549, 0.4       , 0.45882353],\n",
       "         [0.35686275, 0.45882353, 0.50196078],\n",
       "         ...,\n",
       "         [0.33333333, 0.4745098 , 0.49411765],\n",
       "         [0.38431373, 0.51764706, 0.56862745],\n",
       "         [0.29411765, 0.44313725, 0.52156863]],\n",
       "\n",
       "        [[0.37254902, 0.47843137, 0.64705882],\n",
       "         [0.35686275, 0.4745098 , 0.6       ],\n",
       "         [0.39607843, 0.4745098 , 0.57254902],\n",
       "         ...,\n",
       "         [0.31372549, 0.43529412, 0.44313725],\n",
       "         [0.31764706, 0.43137255, 0.49803922],\n",
       "         [0.27058824, 0.38431373, 0.4627451 ]],\n",
       "\n",
       "        [[0.36862745, 0.4745098 , 0.68627451],\n",
       "         [0.32941176, 0.4745098 , 0.65098039],\n",
       "         [0.38039216, 0.49803922, 0.65490196],\n",
       "         ...,\n",
       "         [0.23921569, 0.34901961, 0.32941176],\n",
       "         [0.28627451, 0.38431373, 0.45882353],\n",
       "         [0.23921569, 0.33333333, 0.38823529]]]])"
      ]
     },
     "execution_count": 5,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "test_x"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "31367"
      ]
     },
     "execution_count": 6,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "len(train_x)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "metadata": {},
   "outputs": [],
   "source": [
    "from tensorflow.keras.applications import ResNet50\n",
    "ResNet_base = ResNet50(include_top=False, weights=None, \n",
    "                              input_shape=(input_shape[0],input_shape[1],3), pooling='max')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Model: \"functional_1\"\n",
      "__________________________________________________________________________________________________\n",
      "Layer (type)                    Output Shape         Param #     Connected to                     \n",
      "==================================================================================================\n",
      "input_2 (InputLayer)            [(None, 32, 32, 3)]  0                                            \n",
      "__________________________________________________________________________________________________\n",
      "conv1_pad (ZeroPadding2D)       (None, 38, 38, 3)    0           input_2[0][0]                    \n",
      "__________________________________________________________________________________________________\n",
      "conv1_conv (Conv2D)             (None, 16, 16, 64)   9472        conv1_pad[0][0]                  \n",
      "__________________________________________________________________________________________________\n",
      "conv1_bn (BatchNormalization)   (None, 16, 16, 64)   256         conv1_conv[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "conv1_relu (Activation)         (None, 16, 16, 64)   0           conv1_bn[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "pool1_pad (ZeroPadding2D)       (None, 18, 18, 64)   0           conv1_relu[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "pool1_pool (MaxPooling2D)       (None, 8, 8, 64)     0           pool1_pad[0][0]                  \n",
      "__________________________________________________________________________________________________\n",
      "conv2_block1_1_conv (Conv2D)    (None, 8, 8, 64)     4160        pool1_pool[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "conv2_block1_1_bn (BatchNormali (None, 8, 8, 64)     256         conv2_block1_1_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv2_block1_1_relu (Activation (None, 8, 8, 64)     0           conv2_block1_1_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv2_block1_2_conv (Conv2D)    (None, 8, 8, 64)     36928       conv2_block1_1_relu[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv2_block1_2_bn (BatchNormali (None, 8, 8, 64)     256         conv2_block1_2_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv2_block1_2_relu (Activation (None, 8, 8, 64)     0           conv2_block1_2_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv2_block1_0_conv (Conv2D)    (None, 8, 8, 256)    16640       pool1_pool[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "conv2_block1_3_conv (Conv2D)    (None, 8, 8, 256)    16640       conv2_block1_2_relu[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv2_block1_0_bn (BatchNormali (None, 8, 8, 256)    1024        conv2_block1_0_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv2_block1_3_bn (BatchNormali (None, 8, 8, 256)    1024        conv2_block1_3_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv2_block1_add (Add)          (None, 8, 8, 256)    0           conv2_block1_0_bn[0][0]          \n",
      "                                                                 conv2_block1_3_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv2_block1_out (Activation)   (None, 8, 8, 256)    0           conv2_block1_add[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv2_block2_1_conv (Conv2D)    (None, 8, 8, 64)     16448       conv2_block1_out[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv2_block2_1_bn (BatchNormali (None, 8, 8, 64)     256         conv2_block2_1_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv2_block2_1_relu (Activation (None, 8, 8, 64)     0           conv2_block2_1_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv2_block2_2_conv (Conv2D)    (None, 8, 8, 64)     36928       conv2_block2_1_relu[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv2_block2_2_bn (BatchNormali (None, 8, 8, 64)     256         conv2_block2_2_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv2_block2_2_relu (Activation (None, 8, 8, 64)     0           conv2_block2_2_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv2_block2_3_conv (Conv2D)    (None, 8, 8, 256)    16640       conv2_block2_2_relu[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv2_block2_3_bn (BatchNormali (None, 8, 8, 256)    1024        conv2_block2_3_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv2_block2_add (Add)          (None, 8, 8, 256)    0           conv2_block1_out[0][0]           \n",
      "                                                                 conv2_block2_3_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv2_block2_out (Activation)   (None, 8, 8, 256)    0           conv2_block2_add[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv2_block3_1_conv (Conv2D)    (None, 8, 8, 64)     16448       conv2_block2_out[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv2_block3_1_bn (BatchNormali (None, 8, 8, 64)     256         conv2_block3_1_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv2_block3_1_relu (Activation (None, 8, 8, 64)     0           conv2_block3_1_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv2_block3_2_conv (Conv2D)    (None, 8, 8, 64)     36928       conv2_block3_1_relu[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv2_block3_2_bn (BatchNormali (None, 8, 8, 64)     256         conv2_block3_2_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv2_block3_2_relu (Activation (None, 8, 8, 64)     0           conv2_block3_2_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv2_block3_3_conv (Conv2D)    (None, 8, 8, 256)    16640       conv2_block3_2_relu[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv2_block3_3_bn (BatchNormali (None, 8, 8, 256)    1024        conv2_block3_3_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv2_block3_add (Add)          (None, 8, 8, 256)    0           conv2_block2_out[0][0]           \n",
      "                                                                 conv2_block3_3_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv2_block3_out (Activation)   (None, 8, 8, 256)    0           conv2_block3_add[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block1_1_conv (Conv2D)    (None, 4, 4, 128)    32896       conv2_block3_out[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block1_1_bn (BatchNormali (None, 4, 4, 128)    512         conv3_block1_1_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block1_1_relu (Activation (None, 4, 4, 128)    0           conv3_block1_1_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block1_2_conv (Conv2D)    (None, 4, 4, 128)    147584      conv3_block1_1_relu[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block1_2_bn (BatchNormali (None, 4, 4, 128)    512         conv3_block1_2_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block1_2_relu (Activation (None, 4, 4, 128)    0           conv3_block1_2_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block1_0_conv (Conv2D)    (None, 4, 4, 512)    131584      conv2_block3_out[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block1_3_conv (Conv2D)    (None, 4, 4, 512)    66048       conv3_block1_2_relu[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block1_0_bn (BatchNormali (None, 4, 4, 512)    2048        conv3_block1_0_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block1_3_bn (BatchNormali (None, 4, 4, 512)    2048        conv3_block1_3_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block1_add (Add)          (None, 4, 4, 512)    0           conv3_block1_0_bn[0][0]          \n",
      "                                                                 conv3_block1_3_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block1_out (Activation)   (None, 4, 4, 512)    0           conv3_block1_add[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block2_1_conv (Conv2D)    (None, 4, 4, 128)    65664       conv3_block1_out[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block2_1_bn (BatchNormali (None, 4, 4, 128)    512         conv3_block2_1_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block2_1_relu (Activation (None, 4, 4, 128)    0           conv3_block2_1_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block2_2_conv (Conv2D)    (None, 4, 4, 128)    147584      conv3_block2_1_relu[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block2_2_bn (BatchNormali (None, 4, 4, 128)    512         conv3_block2_2_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block2_2_relu (Activation (None, 4, 4, 128)    0           conv3_block2_2_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block2_3_conv (Conv2D)    (None, 4, 4, 512)    66048       conv3_block2_2_relu[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block2_3_bn (BatchNormali (None, 4, 4, 512)    2048        conv3_block2_3_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block2_add (Add)          (None, 4, 4, 512)    0           conv3_block1_out[0][0]           \n",
      "                                                                 conv3_block2_3_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block2_out (Activation)   (None, 4, 4, 512)    0           conv3_block2_add[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block3_1_conv (Conv2D)    (None, 4, 4, 128)    65664       conv3_block2_out[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block3_1_bn (BatchNormali (None, 4, 4, 128)    512         conv3_block3_1_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block3_1_relu (Activation (None, 4, 4, 128)    0           conv3_block3_1_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block3_2_conv (Conv2D)    (None, 4, 4, 128)    147584      conv3_block3_1_relu[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block3_2_bn (BatchNormali (None, 4, 4, 128)    512         conv3_block3_2_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block3_2_relu (Activation (None, 4, 4, 128)    0           conv3_block3_2_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block3_3_conv (Conv2D)    (None, 4, 4, 512)    66048       conv3_block3_2_relu[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block3_3_bn (BatchNormali (None, 4, 4, 512)    2048        conv3_block3_3_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block3_add (Add)          (None, 4, 4, 512)    0           conv3_block2_out[0][0]           \n",
      "                                                                 conv3_block3_3_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block3_out (Activation)   (None, 4, 4, 512)    0           conv3_block3_add[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block4_1_conv (Conv2D)    (None, 4, 4, 128)    65664       conv3_block3_out[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block4_1_bn (BatchNormali (None, 4, 4, 128)    512         conv3_block4_1_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block4_1_relu (Activation (None, 4, 4, 128)    0           conv3_block4_1_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block4_2_conv (Conv2D)    (None, 4, 4, 128)    147584      conv3_block4_1_relu[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block4_2_bn (BatchNormali (None, 4, 4, 128)    512         conv3_block4_2_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block4_2_relu (Activation (None, 4, 4, 128)    0           conv3_block4_2_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block4_3_conv (Conv2D)    (None, 4, 4, 512)    66048       conv3_block4_2_relu[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block4_3_bn (BatchNormali (None, 4, 4, 512)    2048        conv3_block4_3_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block4_add (Add)          (None, 4, 4, 512)    0           conv3_block3_out[0][0]           \n",
      "                                                                 conv3_block4_3_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv3_block4_out (Activation)   (None, 4, 4, 512)    0           conv3_block4_add[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block1_1_conv (Conv2D)    (None, 2, 2, 256)    131328      conv3_block4_out[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block1_1_bn (BatchNormali (None, 2, 2, 256)    1024        conv4_block1_1_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block1_1_relu (Activation (None, 2, 2, 256)    0           conv4_block1_1_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block1_2_conv (Conv2D)    (None, 2, 2, 256)    590080      conv4_block1_1_relu[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block1_2_bn (BatchNormali (None, 2, 2, 256)    1024        conv4_block1_2_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block1_2_relu (Activation (None, 2, 2, 256)    0           conv4_block1_2_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block1_0_conv (Conv2D)    (None, 2, 2, 1024)   525312      conv3_block4_out[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block1_3_conv (Conv2D)    (None, 2, 2, 1024)   263168      conv4_block1_2_relu[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block1_0_bn (BatchNormali (None, 2, 2, 1024)   4096        conv4_block1_0_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block1_3_bn (BatchNormali (None, 2, 2, 1024)   4096        conv4_block1_3_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block1_add (Add)          (None, 2, 2, 1024)   0           conv4_block1_0_bn[0][0]          \n",
      "                                                                 conv4_block1_3_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block1_out (Activation)   (None, 2, 2, 1024)   0           conv4_block1_add[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block2_1_conv (Conv2D)    (None, 2, 2, 256)    262400      conv4_block1_out[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block2_1_bn (BatchNormali (None, 2, 2, 256)    1024        conv4_block2_1_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block2_1_relu (Activation (None, 2, 2, 256)    0           conv4_block2_1_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block2_2_conv (Conv2D)    (None, 2, 2, 256)    590080      conv4_block2_1_relu[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block2_2_bn (BatchNormali (None, 2, 2, 256)    1024        conv4_block2_2_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block2_2_relu (Activation (None, 2, 2, 256)    0           conv4_block2_2_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block2_3_conv (Conv2D)    (None, 2, 2, 1024)   263168      conv4_block2_2_relu[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block2_3_bn (BatchNormali (None, 2, 2, 1024)   4096        conv4_block2_3_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block2_add (Add)          (None, 2, 2, 1024)   0           conv4_block1_out[0][0]           \n",
      "                                                                 conv4_block2_3_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block2_out (Activation)   (None, 2, 2, 1024)   0           conv4_block2_add[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block3_1_conv (Conv2D)    (None, 2, 2, 256)    262400      conv4_block2_out[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block3_1_bn (BatchNormali (None, 2, 2, 256)    1024        conv4_block3_1_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block3_1_relu (Activation (None, 2, 2, 256)    0           conv4_block3_1_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block3_2_conv (Conv2D)    (None, 2, 2, 256)    590080      conv4_block3_1_relu[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block3_2_bn (BatchNormali (None, 2, 2, 256)    1024        conv4_block3_2_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block3_2_relu (Activation (None, 2, 2, 256)    0           conv4_block3_2_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block3_3_conv (Conv2D)    (None, 2, 2, 1024)   263168      conv4_block3_2_relu[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block3_3_bn (BatchNormali (None, 2, 2, 1024)   4096        conv4_block3_3_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block3_add (Add)          (None, 2, 2, 1024)   0           conv4_block2_out[0][0]           \n",
      "                                                                 conv4_block3_3_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block3_out (Activation)   (None, 2, 2, 1024)   0           conv4_block3_add[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block4_1_conv (Conv2D)    (None, 2, 2, 256)    262400      conv4_block3_out[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block4_1_bn (BatchNormali (None, 2, 2, 256)    1024        conv4_block4_1_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block4_1_relu (Activation (None, 2, 2, 256)    0           conv4_block4_1_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block4_2_conv (Conv2D)    (None, 2, 2, 256)    590080      conv4_block4_1_relu[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block4_2_bn (BatchNormali (None, 2, 2, 256)    1024        conv4_block4_2_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block4_2_relu (Activation (None, 2, 2, 256)    0           conv4_block4_2_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block4_3_conv (Conv2D)    (None, 2, 2, 1024)   263168      conv4_block4_2_relu[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block4_3_bn (BatchNormali (None, 2, 2, 1024)   4096        conv4_block4_3_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block4_add (Add)          (None, 2, 2, 1024)   0           conv4_block3_out[0][0]           \n",
      "                                                                 conv4_block4_3_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block4_out (Activation)   (None, 2, 2, 1024)   0           conv4_block4_add[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block5_1_conv (Conv2D)    (None, 2, 2, 256)    262400      conv4_block4_out[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block5_1_bn (BatchNormali (None, 2, 2, 256)    1024        conv4_block5_1_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block5_1_relu (Activation (None, 2, 2, 256)    0           conv4_block5_1_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block5_2_conv (Conv2D)    (None, 2, 2, 256)    590080      conv4_block5_1_relu[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block5_2_bn (BatchNormali (None, 2, 2, 256)    1024        conv4_block5_2_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block5_2_relu (Activation (None, 2, 2, 256)    0           conv4_block5_2_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block5_3_conv (Conv2D)    (None, 2, 2, 1024)   263168      conv4_block5_2_relu[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block5_3_bn (BatchNormali (None, 2, 2, 1024)   4096        conv4_block5_3_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block5_add (Add)          (None, 2, 2, 1024)   0           conv4_block4_out[0][0]           \n",
      "                                                                 conv4_block5_3_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block5_out (Activation)   (None, 2, 2, 1024)   0           conv4_block5_add[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block6_1_conv (Conv2D)    (None, 2, 2, 256)    262400      conv4_block5_out[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block6_1_bn (BatchNormali (None, 2, 2, 256)    1024        conv4_block6_1_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block6_1_relu (Activation (None, 2, 2, 256)    0           conv4_block6_1_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block6_2_conv (Conv2D)    (None, 2, 2, 256)    590080      conv4_block6_1_relu[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block6_2_bn (BatchNormali (None, 2, 2, 256)    1024        conv4_block6_2_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block6_2_relu (Activation (None, 2, 2, 256)    0           conv4_block6_2_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block6_3_conv (Conv2D)    (None, 2, 2, 1024)   263168      conv4_block6_2_relu[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block6_3_bn (BatchNormali (None, 2, 2, 1024)   4096        conv4_block6_3_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block6_add (Add)          (None, 2, 2, 1024)   0           conv4_block5_out[0][0]           \n",
      "                                                                 conv4_block6_3_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv4_block6_out (Activation)   (None, 2, 2, 1024)   0           conv4_block6_add[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv5_block1_1_conv (Conv2D)    (None, 1, 1, 512)    524800      conv4_block6_out[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv5_block1_1_bn (BatchNormali (None, 1, 1, 512)    2048        conv5_block1_1_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv5_block1_1_relu (Activation (None, 1, 1, 512)    0           conv5_block1_1_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv5_block1_2_conv (Conv2D)    (None, 1, 1, 512)    2359808     conv5_block1_1_relu[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv5_block1_2_bn (BatchNormali (None, 1, 1, 512)    2048        conv5_block1_2_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv5_block1_2_relu (Activation (None, 1, 1, 512)    0           conv5_block1_2_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv5_block1_0_conv (Conv2D)    (None, 1, 1, 2048)   2099200     conv4_block6_out[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv5_block1_3_conv (Conv2D)    (None, 1, 1, 2048)   1050624     conv5_block1_2_relu[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv5_block1_0_bn (BatchNormali (None, 1, 1, 2048)   8192        conv5_block1_0_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv5_block1_3_bn (BatchNormali (None, 1, 1, 2048)   8192        conv5_block1_3_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv5_block1_add (Add)          (None, 1, 1, 2048)   0           conv5_block1_0_bn[0][0]          \n",
      "                                                                 conv5_block1_3_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv5_block1_out (Activation)   (None, 1, 1, 2048)   0           conv5_block1_add[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv5_block2_1_conv (Conv2D)    (None, 1, 1, 512)    1049088     conv5_block1_out[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv5_block2_1_bn (BatchNormali (None, 1, 1, 512)    2048        conv5_block2_1_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv5_block2_1_relu (Activation (None, 1, 1, 512)    0           conv5_block2_1_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv5_block2_2_conv (Conv2D)    (None, 1, 1, 512)    2359808     conv5_block2_1_relu[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv5_block2_2_bn (BatchNormali (None, 1, 1, 512)    2048        conv5_block2_2_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv5_block2_2_relu (Activation (None, 1, 1, 512)    0           conv5_block2_2_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv5_block2_3_conv (Conv2D)    (None, 1, 1, 2048)   1050624     conv5_block2_2_relu[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv5_block2_3_bn (BatchNormali (None, 1, 1, 2048)   8192        conv5_block2_3_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv5_block2_add (Add)          (None, 1, 1, 2048)   0           conv5_block1_out[0][0]           \n",
      "                                                                 conv5_block2_3_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv5_block2_out (Activation)   (None, 1, 1, 2048)   0           conv5_block2_add[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv5_block3_1_conv (Conv2D)    (None, 1, 1, 512)    1049088     conv5_block2_out[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "conv5_block3_1_bn (BatchNormali (None, 1, 1, 512)    2048        conv5_block3_1_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv5_block3_1_relu (Activation (None, 1, 1, 512)    0           conv5_block3_1_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv5_block3_2_conv (Conv2D)    (None, 1, 1, 512)    2359808     conv5_block3_1_relu[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv5_block3_2_bn (BatchNormali (None, 1, 1, 512)    2048        conv5_block3_2_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv5_block3_2_relu (Activation (None, 1, 1, 512)    0           conv5_block3_2_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv5_block3_3_conv (Conv2D)    (None, 1, 1, 2048)   1050624     conv5_block3_2_relu[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv5_block3_3_bn (BatchNormali (None, 1, 1, 2048)   8192        conv5_block3_3_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "conv5_block3_add (Add)          (None, 1, 1, 2048)   0           conv5_block2_out[0][0]           \n",
      "                                                                 conv5_block3_3_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "conv5_block3_out (Activation)   (None, 1, 1, 2048)   0           conv5_block3_add[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "max_pool (GlobalMaxPooling2D)   (None, 2048)         0           conv5_block3_out[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "flatten (Flatten)               (None, 2048)         0           max_pool[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "dense (Dense)                   (None, 512)          1049088     flatten[0][0]                    \n",
      "__________________________________________________________________________________________________\n",
      "dropout_1 (Dropout)             (None, 512)          0           dense[0][0]                      \n",
      "__________________________________________________________________________________________________\n",
      "dense_1 (Dense)                 (None, 43)           22059       dropout_1[0][0]                  \n",
      "==================================================================================================\n",
      "Total params: 24,658,859\n",
      "Trainable params: 24,605,739\n",
      "Non-trainable params: 53,120\n",
      "__________________________________________________________________________________________________\n"
     ]
    }
   ],
   "source": [
    "x = ResNet_base.output\n",
    "x = Flatten(name='flatten')(x)\n",
    "x = Dense(512, activation='relu', kernel_regularizer=regularizers.l2(0.0001), )(x)\n",
    "# x = BatchNormalization(name='bn_fc_01')(x)\n",
    "x = Dropout(0.5)(x)\n",
    "predictions = Dense(num_class, activation='softmax')(x)\n",
    "ResNet_model = Model(inputs=ResNet_base.input, outputs=predictions)\n",
    "\n",
    "ResNet_model.summary()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "metadata": {},
   "outputs": [],
   "source": [
    "sgd = optimizers.SGD(lr=0.01, decay=1e-4, momentum=0.9, nesterov=True)\n",
    "ResNet_model.compile(loss='categorical_crossentropy', optimizer=sgd, metrics=['acc'])\n",
    "\n",
    "# change_lr = LearningRateScheduler(scheduler)\n",
    "\n",
    "save_dir = os.path.join(os.getcwd(), 'data/trained_model')\n",
    "model_name = 'resnet50_model.{epoch:02d}-{val_acc:.2f}.h5'\n",
    "if not os.path.isdir(save_dir):\n",
    "    os.makedirs(save_dir)\n",
    "filepath = os.path.join(save_dir, model_name)\n",
    "\n",
    "checkpoint = ModelCheckpoint(filepath=filepath,\n",
    "                             monitor='val_acc',\n",
    "                             verbose=1,\n",
    "                             save_best_only=True)\n",
    "\n",
    "#creating early stopping to prevent model from overfitting \n",
    "early_stopping = EarlyStopping(monitor=\"val_acc\", min_delta=0,\n",
    "                                                  patience=20, verbose=1, \n",
    "                                                  mode=\"auto\", baseline=None, \n",
    "                                                  restore_best_weights=True)\n",
    "\n",
    "cbks = [early_stopping, checkpoint]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "metadata": {},
   "outputs": [],
   "source": [
    "train_data_gen = ImageDataGenerator()\n",
    "valid_data_gen = ImageDataGenerator()\n",
    "train_data_gen.fit(train_x)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch 1/200\n",
      "490/490 [==============================] - ETA: 0s - loss: 3.6558 - acc: 0.0638  ETA: 12s - lo - ETA: 10s - loss: - ETA: 6s - - ETA: 3s - loss: 3.6825 - acc: 0.06 - ETA: 3s - loss: 3.6812 - acc: - ETA: 3s - loss: 3.6761 - ac - ETA: 2s - lo\n",
      "Epoch 00001: val_acc improved from -inf to 0.07932, saving model to D:\\Projects\\PycharmProjects\\Grad\\GTSRB\\data/trained_model\\resnet50_model.01-0.08.h5\n",
      "490/490 [==============================] - 27s 56ms/step - loss: 3.6558 - acc: 0.0638 - val_loss: 3.4889 - val_acc: 0.0793\n",
      "Epoch 2/200\n",
      "489/490 [============================>.] - ETA: 0s - loss: 3.2401 - acc: 0.1043  ETA:\n",
      "Epoch 00002: val_acc improved from 0.07932 to 0.13606, saving model to D:\\Projects\\PycharmProjects\\Grad\\GTSRB\\data/trained_model\\resnet50_model.02-0.14.h5\n",
      "490/490 [==============================] - 25s 52ms/step - loss: 3.2394 - acc: 0.1043 - val_loss: 2.9439 - val_acc: 0.1361\n",
      "Epoch 3/200\n",
      "489/490 [============================>.] - ETA: 0s - loss: 2.5641 - acc: 0.2440\n",
      "Epoch 00003: val_acc improved from 0.13606 to 0.40372, saving model to D:\\Projects\\PycharmProjects\\Grad\\GTSRB\\data/trained_model\\resnet50_model.03-0.40.h5\n",
      "490/490 [==============================] - 25s 52ms/step - loss: 2.5633 - acc: 0.2443 - val_loss: 1.9846 - val_acc: 0.4037\n",
      "Epoch 4/200\n",
      "489/490 [============================>.] - ETA: 0s - loss: 1.6982 - acc: 0.4738  ETA: 15s - lo - ETA: 12s - loss: 1. - ETA: 11s - loss: 1.8687 - acc - E - ETA: 2s - loss: 1.7373  - ETA: 1s - loss: 1.720\n",
      "Epoch 00004: val_acc improved from 0.40372 to 0.62280, saving model to D:\\Projects\\PycharmProjects\\Grad\\GTSRB\\data/trained_model\\resnet50_model.04-0.62.h5\n",
      "490/490 [==============================] - 26s 52ms/step - loss: 1.6973 - acc: 0.4741 - val_loss: 1.1566 - val_acc: 0.6228\n",
      "Epoch 5/200\n",
      "490/490 [==============================] - ETA: 0s - loss: 1.1984 - acc: 0.6292- ETA: 9s - loss: 1 - ETA: 7s - loss: 1.2358 - acc: 0. - ETA: 7s - loss: 1 - ETA: 5s - loss\n",
      "Epoch 00005: val_acc improved from 0.62280 to 0.75504, saving model to D:\\Projects\\PycharmProjects\\Grad\\GTSRB\\data/trained_model\\resnet50_model.05-0.76.h5\n",
      "490/490 [==============================] - 26s 52ms/step - loss: 1.1984 - acc: 0.6292 - val_loss: 0.8171 - val_acc: 0.7550\n",
      "Epoch 6/200\n",
      "490/490 [==============================] - ETA: 0s - loss: 0.8704 - acc: 0.7435\n",
      "Epoch 00006: val_acc improved from 0.75504 to 0.84915, saving model to D:\\Projects\\PycharmProjects\\Grad\\GTSRB\\data/trained_model\\resnet50_model.06-0.85.h5\n",
      "490/490 [==============================] - 26s 52ms/step - loss: 0.8704 - acc: 0.7435 - val_loss: 0.5478 - val_acc: 0.8491\n",
      "Epoch 7/200\n",
      "490/490 [==============================] - ETA: 0s - loss: 0.6366 - acc: 0.8308  ETA: 15s - loss: 0.6722 - acc:  - ETA: 15s - loss: 0. - ETA - ETA - ETA: 11s - loss: 0.6663 - acc:  - ETA: 11s - loss: 0. -  - ETA: 3s - loss: 0.6509 - acc: 0. - ETA: 3s - loss: 0. - ETA: 1s - loss: 0.6\n",
      "Epoch 00007: val_acc improved from 0.84915 to 0.89441, saving model to D:\\Projects\\PycharmProjects\\Grad\\GTSRB\\data/trained_model\\resnet50_model.07-0.89.h5\n",
      "490/490 [==============================] - 26s 52ms/step - loss: 0.6366 - acc: 0.8308 - val_loss: 0.4169 - val_acc: 0.8944\n",
      "Epoch 8/200\n",
      "490/490 [==============================] - ETA: 0s - loss: 0.5302 - acc: 0.8645- ETA: 8s - loss: 0.5578 - acc: - ETA: 8s - loss: 0  - ETA:\n",
      "Epoch 00008: val_acc improved from 0.89441 to 0.91252, saving model to D:\\Projects\\PycharmProjects\\Grad\\GTSRB\\data/trained_model\\resnet50_model.08-0.91.h5\n",
      "490/490 [==============================] - 26s 52ms/step - loss: 0.5302 - acc: 0.8645 - val_loss: 0.3511 - val_acc: 0.9125\n",
      "Epoch 9/200\n",
      "489/490 [============================>.] - ETA: 0s - loss: 0.3943 - acc: 0.9088  ETA: 10s - loss: 0.4034 - acc: 0. - ETA: 10s - loss: 0.4048 - acc: 0.90  - ETA: 7s - loss: 0.4016 - acc: 0.9 - ETA: 7s - loss: 0.4011 - acc: 0 -\n",
      "Epoch 00009: val_acc improved from 0.91252 to 0.93063, saving model to D:\\Projects\\PycharmProjects\\Grad\\GTSRB\\data/trained_model\\resnet50_model.09-0.93.h5\n",
      "490/490 [==============================] - 26s 52ms/step - loss: 0.3942 - acc: 0.9087 - val_loss: 0.3193 - val_acc: 0.9306\n",
      "Epoch 10/200\n",
      "489/490 [============================>.] - ETA: 0s - loss: 0.3224 - acc: 0.9292- ETA: 0s - loss: 0.3227 - acc: 0.9 - ETA: 0s - loss: 0.3226 - acc: 0.929\n",
      "Epoch 00010: val_acc improved from 0.93063 to 0.94976, saving model to D:\\Projects\\PycharmProjects\\Grad\\GTSRB\\data/trained_model\\resnet50_model.10-0.95.h5\n",
      "490/490 [==============================] - 26s 52ms/step - loss: 0.3224 - acc: 0.9292 - val_loss: 0.2474 - val_acc: 0.9498\n",
      "Epoch 11/200\n",
      "489/490 [============================>.] - ETA: 0s - loss: 0.4141 - acc: 0.9045  ETA:  - ETA: 15s - loss: 0. - ETA: 14s - loss: 0.2653 - acc: 0.94 - ETA: 14s - loss: 0.2664 - - ETA:  - ETA: 0s - loss: 0.4063 - acc: 0.906 - ETA: 0s - loss: 0.4070 - ac\n",
      "Epoch 00011: val_acc did not improve from 0.94976\n",
      "490/490 [==============================] - 25s 52ms/step - loss: 0.4147 - acc: 0.9044 - val_loss: 0.5789 - val_acc: 0.8472\n",
      "Epoch 12/200\n",
      "490/490 [==============================] - ETA: 0s - loss: 0.3792 - acc: 0.9151\n",
      "Epoch 00012: val_acc did not improve from 0.94976\n",
      "490/490 [==============================] - 26s 52ms/step - loss: 0.3792 - acc: 0.9151 - val_loss: 0.2636 - val_acc: 0.9444\n",
      "Epoch 13/200\n",
      "489/490 [============================>.] - ETA: 0s - loss: 0.2890 - acc: 0.9395\n",
      "Epoch 00013: val_acc improved from 0.94976 to 0.96315, saving model to D:\\Projects\\PycharmProjects\\Grad\\GTSRB\\data/trained_model\\resnet50_model.13-0.96.h5\n",
      "490/490 [==============================] - 26s 53ms/step - loss: 0.2888 - acc: 0.9396 - val_loss: 0.2063 - val_acc: 0.9631\n",
      "Epoch 14/200\n",
      "490/490 [==============================] - ETA: 0s - loss: 0.2242 - acc: 0.9578- ETA: 5s - loss: 0.2273 - acc: 0.956 - ETA: 5s - lo - ETA: 3s - loss: 0.2 - ETA: 1s - loss: 0.2\n",
      "Epoch 00014: val_acc did not improve from 0.96315\n",
      "490/490 [==============================] - 25s 51ms/step - loss: 0.2242 - acc: 0.9578 - val_loss: 0.3901 - val_acc: 0.9554\n",
      "Epoch 15/200\n",
      "489/490 [============================>.] - ETA: 0s - loss: 0.2361 - acc: 0.9555  ETA:  - ET - ETA: \n",
      "Epoch 00015: val_acc did not improve from 0.96315\n",
      "490/490 [==============================] - 26s 52ms/step - loss: 0.2360 - acc: 0.9555 - val_loss: 1.0442 - val_acc: 0.9537\n",
      "Epoch 16/200\n",
      "490/490 [==============================] - ETA: 0s - loss: 0.2078 - acc: 0.9628\n",
      "Epoch 00016: val_acc improved from 0.96315 to 0.96710, saving model to D:\\Projects\\PycharmProjects\\Grad\\GTSRB\\data/trained_model\\resnet50_model.16-0.97.h5\n",
      "490/490 [==============================] - 26s 53ms/step - loss: 0.2078 - acc: 0.9628 - val_loss: 0.1915 - val_acc: 0.9671\n",
      "Epoch 17/200\n",
      "490/490 [==============================] - ETA: 0s - loss: 0.1753 - acc: 0.9724 - ETA: 0s - loss: 0.1756 - acc: 0.9724\n",
      "Epoch 00017: val_acc improved from 0.96710 to 0.97054, saving model to D:\\Projects\\PycharmProjects\\Grad\\GTSRB\\data/trained_model\\resnet50_model.17-0.97.h5\n",
      "490/490 [==============================] - 26s 53ms/step - loss: 0.1756 - acc: 0.9724 - val_loss: 0.1815 - val_acc: 0.9705\n",
      "Epoch 18/200\n",
      "490/490 [==============================] - ETA: 0s - loss: 0.1698 - acc: 0.9741  ETA: 16s - los\n",
      "Epoch 00018: val_acc improved from 0.97054 to 0.97131, saving model to D:\\Projects\\PycharmProjects\\Grad\\GTSRB\\data/trained_model\\resnet50_model.18-0.97.h5\n",
      "490/490 [==============================] - 26s 53ms/step - loss: 0.1698 - acc: 0.9741 - val_loss: 0.1761 - val_acc: 0.9713\n",
      "Epoch 19/200\n",
      "490/490 [==============================] - ETA: 0s - loss: 0.1589 - acc: 0.9755\n",
      "Epoch 00019: val_acc improved from 0.97131 to 0.97896, saving model to D:\\Projects\\PycharmProjects\\Grad\\GTSRB\\data/trained_model\\resnet50_model.19-0.98.h5\n",
      "490/490 [==============================] - 26s 53ms/step - loss: 0.1589 - acc: 0.9755 - val_loss: 0.1525 - val_acc: 0.9790\n",
      "Epoch 20/200\n",
      "489/490 [============================>.] - ETA: 0s - loss: 0.1561 - acc: 0.9768- ETA: 2s - loss: 0.1572 - acc: 0.976 - ETA: 2s - loss: 0.1570 - ac - ETA: 1s - loss: 0.157\n",
      "Epoch 00020: val_acc did not improve from 0.97896\n",
      "490/490 [==============================] - 26s 52ms/step - loss: 0.1560 - acc: 0.9768 - val_loss: 0.1676 - val_acc: 0.9733\n",
      "Epoch 21/200\n",
      "490/490 [==============================] - ETA: 0s - loss: 0.1457 - acc: 0.9802- ETA: 9s - lo\n",
      "Epoch 00021: val_acc did not improve from 0.97896\n",
      "490/490 [==============================] - 26s 52ms/step - loss: 0.1457 - acc: 0.9802 - val_loss: 0.1546 - val_acc: 0.9767\n",
      "Epoch 22/200\n",
      "490/490 [==============================] - ETA: 0s - loss: 0.1396 - acc: 0.9803\n",
      "Epoch 00022: val_acc did not improve from 0.97896\n",
      "490/490 [==============================] - 26s 53ms/step - loss: 0.1396 - acc: 0.9803 - val_loss: 0.1753 - val_acc: 0.9719\n",
      "Epoch 23/200\n",
      "490/490 [==============================] - ETA: 0s - loss: 0.1299 - acc: 0.98342 ETA: 13s - loss: 0.12 - ETA\n",
      "Epoch 00023: val_acc improved from 0.97896 to 0.97972, saving model to D:\\Projects\\PycharmProjects\\Grad\\GTSRB\\data/trained_model\\resnet50_model.23-0.98.h5\n",
      "490/490 [==============================] - 26s 53ms/step - loss: 0.1299 - acc: 0.9834 - val_loss: 0.1471 - val_acc: 0.9797\n",
      "Epoch 24/200\n",
      "489/490 [============================>.] - ETA: 0s - loss: 0.1325 - acc: 0.9830  ETA: 17s - loss: 0.1257 - acc - ETA: 17s - loss: 0. - ETA: 16s - loss: 0.124 - ET\n",
      "Epoch 00024: val_acc did not improve from 0.97972\n",
      "490/490 [==============================] - 25s 52ms/step - loss: 0.1325 - acc: 0.9830 - val_loss: 0.1571 - val_acc: 0.9755\n",
      "Epoch 25/200\n",
      "489/490 [============================>.] - ETA: 0s - loss: 0.1289 - acc: 0.9836\n",
      "Epoch 00025: val_acc improved from 0.97972 to 0.98049, saving model to D:\\Projects\\PycharmProjects\\Grad\\GTSRB\\data/trained_model\\resnet50_model.25-0.98.h5\n",
      "490/490 [==============================] - 26s 53ms/step - loss: 0.1289 - acc: 0.9836 - val_loss: 0.1490 - val_acc: 0.9805\n",
      "Epoch 26/200\n",
      "490/490 [==============================] - ETA: 0s - loss: 0.1211 - acc: 0.9851\n",
      "Epoch 00026: val_acc improved from 0.98049 to 0.98419, saving model to D:\\Projects\\PycharmProjects\\Grad\\GTSRB\\data/trained_model\\resnet50_model.26-0.98.h5\n",
      "490/490 [==============================] - 26s 53ms/step - loss: 0.1211 - acc: 0.9851 - val_loss: 0.1354 - val_acc: 0.9842\n",
      "Epoch 27/200\n",
      "490/490 [==============================] - ETA: 0s - loss: 0.1231 - acc: 0.9841\n",
      "Epoch 00027: val_acc did not improve from 0.98419\n",
      "490/490 [==============================] - 25s 52ms/step - loss: 0.1231 - acc: 0.9841 - val_loss: 0.1407 - val_acc: 0.9821\n",
      "Epoch 28/200\n",
      "490/490 [==============================] - ETA: 0s - loss: 0.1167 - acc: 0.9863- ETA: 1s - loss: 0.1167 -\n",
      "Epoch 00028: val_acc improved from 0.98419 to 0.98559, saving model to D:\\Projects\\PycharmProjects\\Grad\\GTSRB\\data/trained_model\\resnet50_model.28-0.99.h5\n",
      "490/490 [==============================] - 26s 53ms/step - loss: 0.1167 - acc: 0.9863 - val_loss: 0.1287 - val_acc: 0.9856\n",
      "Epoch 29/200\n",
      "489/490 [============================>.] - ETA: 0s - loss: 0.1163 - acc: 0.9861- ETA: 0s - loss: 0.1165 - acc: 0.9\n",
      "Epoch 00029: val_acc did not improve from 0.98559\n",
      "490/490 [==============================] - 25s 52ms/step - loss: 0.1163 - acc: 0.9861 - val_loss: 0.1503 - val_acc: 0.9800\n",
      "Epoch 30/200\n",
      "490/490 [==============================] - ETA: 0s - loss: 0.1200 - acc: 0.9846\n",
      "Epoch 00030: val_acc did not improve from 0.98559\n",
      "490/490 [==============================] - 25s 52ms/step - loss: 0.1200 - acc: 0.9846 - val_loss: 0.1258 - val_acc: 0.9852\n",
      "Epoch 31/200\n",
      "490/490 [==============================] - ETA: 0s - loss: 0.1164 - acc: 0.9861-  - ETA: 0s - loss: 0.1164 - acc: - ETA: 0s - loss: 0.1163 - acc: 0.986\n",
      "Epoch 00031: val_acc did not improve from 0.98559\n",
      "490/490 [==============================] - 25s 52ms/step - loss: 0.1164 - acc: 0.9861 - val_loss: 0.1413 - val_acc: 0.9792\n",
      "Epoch 32/200\n",
      "490/490 [==============================] - ETA: 0s - loss: 0.1114 - acc: 0.9869- ETA: 6s - loss: 0.1121 - acc:  - ETA: 5s - loss: 0.11 - ETA: 0s - loss: 0.1111 - acc: 0\n",
      "Epoch 00032: val_acc did not improve from 0.98559\n",
      "490/490 [==============================] - 25s 52ms/step - loss: 0.1114 - acc: 0.9869 - val_loss: 0.1474 - val_acc: 0.9796\n",
      "Epoch 33/200\n",
      "490/490 [==============================] - ETA: 0s - loss: 0.1136 - acc: 0.9856- ETA: 5s - loss: 0.1153 - a - ETA: 4s - loss: 0.11 - ETA: 2s \n",
      "Epoch 00033: val_acc did not improve from 0.98559\n",
      "490/490 [==============================] - 25s 52ms/step - loss: 0.1136 - acc: 0.9856 - val_loss: 0.1441 - val_acc: 0.9787\n",
      "Epoch 34/200\n",
      "489/490 [============================>.] - ETA: 0s - loss: 0.1114 - acc: 0.9864- ETA: 0s - loss: 0.1117 - acc: 0.986 - ETA: 0s - loss: 0.1117 - acc:\n",
      "Epoch 00034: val_acc improved from 0.98559 to 0.98623, saving model to D:\\Projects\\PycharmProjects\\Grad\\GTSRB\\data/trained_model\\resnet50_model.34-0.99.h5\n",
      "490/490 [==============================] - 26s 52ms/step - loss: 0.1114 - acc: 0.9864 - val_loss: 0.1228 - val_acc: 0.9862\n",
      "Epoch 35/200\n",
      "490/490 [==============================] - ETA: 0s - loss: 0.1149 - acc: 0.9853\n",
      "Epoch 00035: val_acc did not improve from 0.98623\n",
      "490/490 [==============================] - 25s 51ms/step - loss: 0.1149 - acc: 0.9853 - val_loss: 0.1505 - val_acc: 0.9782\n",
      "Epoch 36/200\n",
      "489/490 [============================>.] - ETA: 0s - loss: 0.1031 - acc: 0.9882\n",
      "Epoch 00036: val_acc did not improve from 0.98623\n",
      "490/490 [==============================] - 25s 51ms/step - loss: 0.1030 - acc: 0.9882 - val_loss: 0.1418 - val_acc: 0.9807\n",
      "Epoch 37/200\n",
      "419/490 [========================>.....] - ETA: 3s - loss: 0.1026 - acc: 0.9885  ETA: 12s - lo - ETA: 11s - loss: 0.10 - ETA: 10s - loss: 0.1034 - acc"
     ]
    }
   ],
   "source": [
    "batch_size = 64\n",
    "epochs = 200\n",
    "iterations = 490\n",
    "ResNet_model.fit_generator(train_data_gen.flow(train_x, train_y, batch_size=batch_size),\n",
    "                    steps_per_epoch=iterations,\n",
    "                    epochs=epochs,\n",
    "                    callbacks=cbks,\n",
    "                    validation_data=(test_x, test_y))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [],
   "source": [
    "def scheduler(epoch):\n",
    "    if epoch < 80:\n",
    "        return 0.01\n",
    "    if epoch < 160:\n",
    "        return 0.005\n",
    "    return 0.001"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {},
   "outputs": [],
   "source": [
    "def vgg16():\n",
    "    input_tensor = Input(shape=(32, 32, 3))\n",
    "    x = input_tensor\n",
    "    x = Conv2D(64, (3, 3), activation='relu', padding='same', name='block1_conv1')(input_tensor)\n",
    "    x = Conv2D(64, (3, 3), activation='relu', padding='same', name='block1_conv2')(x)\n",
    "    x = MaxPooling2D((2, 2), strides=(2, 2), name='block1_pool')(x)\n",
    "#     print(x.shape)\n",
    "    # Block 2\n",
    "    x = Conv2D(128, (3, 3), activation='relu', padding='same', name='block2_conv1')(x)\n",
    "    x = Conv2D(128, (3, 3), activation='relu', padding='same', name='block2_conv2')(x)\n",
    "    x = MaxPooling2D((2, 2), strides=(2, 2), name='block2_pool')(x)\n",
    "#     print(x.shape)\n",
    "    # Block 3\n",
    "    x = Conv2D(256, (3, 3), activation='relu', padding='same', name='block3_conv1')(x)\n",
    "    x = Conv2D(256, (3, 3), activation='relu', padding='same', name='block3_conv2')(x)\n",
    "    x = Conv2D(256, (3, 3), activation='relu', padding='same', name='block3_conv3')(x)\n",
    "    x = MaxPooling2D((2, 2), strides=(2, 2), name='block3_pool')(x)\n",
    "#     print(x.shape)\n",
    "    # Block 4\n",
    "    x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block4_conv1')(x)\n",
    "    x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block4_conv2')(x)\n",
    "    x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block4_conv3')(x)\n",
    "    x = MaxPooling2D((2, 2), strides=(2, 2), name='block4_pool')(x)\n",
    "#     print(x.shape)\n",
    "    # # Block 5\n",
    "    # x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block5_conv1')(x)\n",
    "    # x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block5_conv2')(x)\n",
    "    # x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block5_conv3')(x)\n",
    "    # x = MaxPooling2D((2, 2), strides=(2, 2), name='block4_pool')(x)\n",
    "    # print(x.shape)\n",
    "\n",
    "    # Classification block\n",
    "    x = Flatten(name='flatten')(x)\n",
    "    x = Dense(4096, activation='relu', name='fc1')(x)\n",
    "    x = Dense(4096, activation='relu', name='fc2')(x)\n",
    "    x = Dropout(0.5)(x)\n",
    "    x = Dense(43, activation='softmax', name='predictions')(x)\n",
    "    model = Model(inputs=input_tensor, outputs=x, name='VGG16')\n",
    "    return model"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Model: \"VGG16\"\n",
      "_________________________________________________________________\n",
      "Layer (type)                 Output Shape              Param #   \n",
      "=================================================================\n",
      "input_1 (InputLayer)         [(None, 32, 32, 3)]       0         \n",
      "_________________________________________________________________\n",
      "block1_conv1 (Conv2D)        (None, 32, 32, 64)        1792      \n",
      "_________________________________________________________________\n",
      "block1_conv2 (Conv2D)        (None, 32, 32, 64)        36928     \n",
      "_________________________________________________________________\n",
      "block1_pool (MaxPooling2D)   (None, 16, 16, 64)        0         \n",
      "_________________________________________________________________\n",
      "block2_conv1 (Conv2D)        (None, 16, 16, 128)       73856     \n",
      "_________________________________________________________________\n",
      "block2_conv2 (Conv2D)        (None, 16, 16, 128)       147584    \n",
      "_________________________________________________________________\n",
      "block2_pool (MaxPooling2D)   (None, 8, 8, 128)         0         \n",
      "_________________________________________________________________\n",
      "block3_conv1 (Conv2D)        (None, 8, 8, 256)         295168    \n",
      "_________________________________________________________________\n",
      "block3_conv2 (Conv2D)        (None, 8, 8, 256)         590080    \n",
      "_________________________________________________________________\n",
      "block3_conv3 (Conv2D)        (None, 8, 8, 256)         590080    \n",
      "_________________________________________________________________\n",
      "block3_pool (MaxPooling2D)   (None, 4, 4, 256)         0         \n",
      "_________________________________________________________________\n",
      "block4_conv1 (Conv2D)        (None, 4, 4, 512)         1180160   \n",
      "_________________________________________________________________\n",
      "block4_conv2 (Conv2D)        (None, 4, 4, 512)         2359808   \n",
      "_________________________________________________________________\n",
      "block4_conv3 (Conv2D)        (None, 4, 4, 512)         2359808   \n",
      "_________________________________________________________________\n",
      "block4_pool (MaxPooling2D)   (None, 2, 2, 512)         0         \n",
      "_________________________________________________________________\n",
      "flatten (Flatten)            (None, 2048)              0         \n",
      "_________________________________________________________________\n",
      "fc1 (Dense)                  (None, 4096)              8392704   \n",
      "_________________________________________________________________\n",
      "fc2 (Dense)                  (None, 4096)              16781312  \n",
      "_________________________________________________________________\n",
      "dropout (Dropout)            (None, 4096)              0         \n",
      "_________________________________________________________________\n",
      "predictions (Dense)          (None, 43)                176171    \n",
      "=================================================================\n",
      "Total params: 32,985,451\n",
      "Trainable params: 32,985,451\n",
      "Non-trainable params: 0\n",
      "_________________________________________________________________\n"
     ]
    }
   ],
   "source": [
    "model = vgg16()\n",
    "model.summary()#显示模型结构"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {},
   "outputs": [],
   "source": [
    "sgd = optimizers.SGD(lr=.1, momentum=0.9, nesterov=True)\n",
    "model.compile(loss='categorical_crossentropy', optimizer=sgd, metrics=['acc'])\n",
    "\n",
    "change_lr = LearningRateScheduler(scheduler)\n",
    "\n",
    "save_dir = os.path.join(os.getcwd(), 'data/trained_model')\n",
    "model_name = 'vgg19_model.{epoch:02d}-{val_acc:.2f}.h5'\n",
    "if not os.path.isdir(save_dir):\n",
    "    os.makedirs(save_dir)\n",
    "filepath = os.path.join(save_dir, model_name)\n",
    "\n",
    "checkpoint = ModelCheckpoint(filepath=filepath,\n",
    "                             monitor='val_acc',\n",
    "                             verbose=1,\n",
    "                             save_best_only=True)\n",
    "\n",
    "#creating early stopping to prevent model from overfitting \n",
    "early_stopping = EarlyStopping(monitor=\"val_acc\", min_delta=0,\n",
    "                                                  patience=5, verbose=1, \n",
    "                                                  mode=\"auto\", baseline=None, \n",
    "                                                  restore_best_weights=True)\n",
    "\n",
    "cbks = [early_stopping, checkpoint, change_lr]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Using real-time data augmentation.\n"
     ]
    }
   ],
   "source": [
    "print('Using real-time data augmentation.')\n",
    "# datagen = ImageDataGenerator(horizontal_flip=True,\n",
    "#                              width_shift_range=0.125, height_shift_range=0.125, fill_mode='constant', cval=0.)\n",
    "datagen = ImageDataGenerator(featurewise_center=True,\n",
    "                                   featurewise_std_normalization=True,\n",
    "                                   rotation_range=20,\n",
    "                                   width_shift_range=0.2,\n",
    "                                   height_shift_range=0.2,\n",
    "                                   horizontal_flip=True)\n",
    "datagen.fit(train_x)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "WARNING:tensorflow:From <ipython-input-14-c3162b564b49>:8: Model.fit_generator (from tensorflow.python.keras.engine.training) is deprecated and will be removed in a future version.\n",
      "Instructions for updating:\n",
      "Please use Model.fit, which supports generators.\n",
      "Epoch 1/100\n",
      "  1/246 [..............................] - ETA: 0s - loss: 3.7612 - acc: 0.0234WARNING:tensorflow:Callbacks method `on_train_batch_end` is slow compared to the batch time (batch time: 0.0140s vs `on_train_batch_end` time: 0.0309s). Check your callbacks.\n",
      "246/246 [==============================] - ETA: 0s - loss: 3.5305 - acc: 0.0611\n",
      "Epoch 00001: val_acc improved from -inf to 0.09577, saving model to D:\\Projects\\PycharmProjects\\Grad\\GTSRB\\data/trained_model\\vgg19_model.01-0.10.h5\n",
      "246/246 [==============================] - 14s 57ms/step - loss: 3.5305 - acc: 0.0611 - val_loss: 3.4056 - val_acc: 0.0958\n",
      "Epoch 2/100\n",
      "245/246 [============================>.] - ETA: 0s - loss: 3.1095 - acc: 0.1443- ETA: 6s - ETA: 0s - loss: 3.1556 - acc\n",
      "Epoch 00002: val_acc improved from 0.09577 to 0.31625, saving model to D:\\Projects\\PycharmProjects\\Grad\\GTSRB\\data/trained_model\\vgg19_model.02-0.32.h5\n",
      "246/246 [==============================] - 14s 55ms/step - loss: 3.1066 - acc: 0.1451 - val_loss: 2.2307 - val_acc: 0.3162\n",
      "Epoch 3/100\n",
      "245/246 [============================>.] - ETA: 0s - loss: 1.6920 - acc: 0.4370- ETA: 3s - loss: 1.8034 - ac - ETA: \n",
      "Epoch 00003: val_acc improved from 0.31625 to 0.63096, saving model to D:\\Projects\\PycharmProjects\\Grad\\GTSRB\\data/trained_model\\vgg19_model.03-0.63.h5\n",
      "246/246 [==============================] - 13s 55ms/step - loss: 1.6901 - acc: 0.4373 - val_loss: 1.1144 - val_acc: 0.6310\n",
      "Epoch 4/100\n",
      "245/246 [============================>.] - ETA: 0s - loss: 0.7032 - acc: 0.7707\n",
      "Epoch 00004: val_acc improved from 0.63096 to 0.91622, saving model to D:\\Projects\\PycharmProjects\\Grad\\GTSRB\\data/trained_model\\vgg19_model.04-0.92.h5\n",
      "246/246 [==============================] - 14s 55ms/step - loss: 0.7013 - acc: 0.7713 - val_loss: 0.2458 - val_acc: 0.9162\n",
      "Epoch 5/100\n",
      "245/246 [============================>.] - ETA: 0s - loss: 0.1792 - acc: 0.9442\n",
      "Epoch 00005: val_acc improved from 0.91622 to 0.96889, saving model to D:\\Projects\\PycharmProjects\\Grad\\GTSRB\\data/trained_model\\vgg19_model.05-0.97.h5\n",
      "246/246 [==============================] - 14s 55ms/step - loss: 0.1791 - acc: 0.9442 - val_loss: 0.0963 - val_acc: 0.9689\n",
      "Epoch 6/100\n",
      "246/246 [==============================] - ETA: 0s - loss: 0.0801 - acc: 0.9749- ETA: 0s - loss: 0.0810 - acc\n",
      "Epoch 00006: val_acc improved from 0.96889 to 0.98304, saving model to D:\\Projects\\PycharmProjects\\Grad\\GTSRB\\data/trained_model\\vgg19_model.06-0.98.h5\n",
      "246/246 [==============================] - 14s 55ms/step - loss: 0.0801 - acc: 0.9749 - val_loss: 0.0554 - val_acc: 0.9830\n",
      "Epoch 7/100\n",
      "245/246 [============================>.] - ETA: 0s - loss: 0.0488 - acc: 0.9850\n",
      "Epoch 00007: val_acc improved from 0.98304 to 0.98687, saving model to D:\\Projects\\PycharmProjects\\Grad\\GTSRB\\data/trained_model\\vgg19_model.07-0.99.h5\n",
      "246/246 [==============================] - 14s 55ms/step - loss: 0.0489 - acc: 0.9850 - val_loss: 0.0417 - val_acc: 0.9869\n",
      "Epoch 8/100\n",
      "245/246 [============================>.] - ETA: 0s - loss: 0.0281 - acc: 0.9916\n",
      "Epoch 00008: val_acc improved from 0.98687 to 0.98776, saving model to D:\\Projects\\PycharmProjects\\Grad\\GTSRB\\data/trained_model\\vgg19_model.08-0.99.h5\n",
      "246/246 [==============================] - 14s 56ms/step - loss: 0.0280 - acc: 0.9916 - val_loss: 0.0433 - val_acc: 0.9878\n",
      "Epoch 9/100\n",
      "245/246 [============================>.] - ETA: 0s - loss: 0.0272 - acc: 0.9914- ETA: 5s - \n",
      "Epoch 00009: val_acc improved from 0.98776 to 0.99286, saving model to D:\\Projects\\PycharmProjects\\Grad\\GTSRB\\data/trained_model\\vgg19_model.09-0.99.h5\n",
      "246/246 [==============================] - 14s 56ms/step - loss: 0.0272 - acc: 0.9914 - val_loss: 0.0300 - val_acc: 0.9929\n",
      "Epoch 10/100\n",
      "245/246 [============================>.] - ETA: 0s - loss: 0.0201 - acc: 0.9929\n",
      "Epoch 00010: val_acc did not improve from 0.99286\n",
      "246/246 [==============================] - 13s 51ms/step - loss: 0.0200 - acc: 0.9930 - val_loss: 0.0357 - val_acc: 0.9903\n",
      "Epoch 11/100\n",
      "245/246 [============================>.] - ETA: 0s - loss: 0.0190 - acc: 0.9941\n",
      "Epoch 00011: val_acc did not improve from 0.99286\n",
      "246/246 [==============================] - 13s 52ms/step - loss: 0.0190 - acc: 0.9941 - val_loss: 0.0315 - val_acc: 0.9921\n",
      "Epoch 12/100\n",
      "245/246 [============================>.] - ETA: 0s - loss: 0.0127 - acc: 0.9959\n",
      "Epoch 00012: val_acc did not improve from 0.99286\n",
      "246/246 [==============================] - 13s 52ms/step - loss: 0.0127 - acc: 0.9959 - val_loss: 0.0415 - val_acc: 0.9899\n",
      "Epoch 13/100\n",
      "246/246 [==============================] - ETA: 0s - loss: 0.0098 - acc: 0.9969\n",
      "Epoch 00013: val_acc improved from 0.99286 to 0.99413, saving model to D:\\Projects\\PycharmProjects\\Grad\\GTSRB\\data/trained_model\\vgg19_model.13-0.99.h5\n",
      "246/246 [==============================] - 14s 56ms/step - loss: 0.0098 - acc: 0.9969 - val_loss: 0.0242 - val_acc: 0.9941\n",
      "Epoch 14/100\n",
      "246/246 [==============================] - ETA: 0s - loss: 0.0075 - acc: 0.9976\n",
      "Epoch 00014: val_acc improved from 0.99413 to 0.99426, saving model to D:\\Projects\\PycharmProjects\\Grad\\GTSRB\\data/trained_model\\vgg19_model.14-0.99.h5\n",
      "246/246 [==============================] - 14s 57ms/step - loss: 0.0075 - acc: 0.9976 - val_loss: 0.0268 - val_acc: 0.9943\n",
      "Epoch 15/100\n",
      "246/246 [==============================] - ETA: 0s - loss: 0.0088 - acc: 0.9969\n",
      "Epoch 00015: val_acc did not improve from 0.99426\n",
      "246/246 [==============================] - 13s 52ms/step - loss: 0.0088 - acc: 0.9969 - val_loss: 0.0265 - val_acc: 0.9936\n",
      "Epoch 16/100\n",
      "245/246 [============================>.] - ETA: 0s - loss: 0.0043 - acc: 0.9990\n",
      "Epoch 00016: val_acc did not improve from 0.99426\n",
      "246/246 [==============================] - 13s 52ms/step - loss: 0.0042 - acc: 0.9990 - val_loss: 0.0285 - val_acc: 0.9930\n",
      "Epoch 17/100\n",
      "245/246 [============================>.] - ETA: 0s - loss: 0.0054 - acc: 0.9984\n",
      "Epoch 00017: val_acc improved from 0.99426 to 0.99554, saving model to D:\\Projects\\PycharmProjects\\Grad\\GTSRB\\data/trained_model\\vgg19_model.17-1.00.h5\n",
      "246/246 [==============================] - 14s 56ms/step - loss: 0.0053 - acc: 0.9984 - val_loss: 0.0205 - val_acc: 0.9955\n",
      "Epoch 18/100\n",
      "246/246 [==============================] - ETA: 0s - loss: 0.0064 - acc: 0.9983\n",
      "Epoch 00018: val_acc did not improve from 0.99554\n",
      "246/246 [==============================] - 13s 52ms/step - loss: 0.0064 - acc: 0.9983 - val_loss: 0.0282 - val_acc: 0.9940\n",
      "Epoch 19/100\n",
      "246/246 [==============================] - ETA: 0s - loss: 0.0026 - acc: 0.9993- ETA: 3s - loss: 0.0030 - acc: 0.999 - ETA\n",
      "Epoch 00019: val_acc did not improve from 0.99554\n",
      "246/246 [==============================] - 13s 52ms/step - loss: 0.0026 - acc: 0.9993 - val_loss: 0.0224 - val_acc: 0.9950\n",
      "Epoch 20/100\n",
      "245/246 [============================>.] - ETA: 0s - loss: 0.0030 - acc: 0.9990\n",
      "Epoch 00020: val_acc did not improve from 0.99554\n",
      "246/246 [==============================] - 13s 52ms/step - loss: 0.0029 - acc: 0.9990 - val_loss: 0.0246 - val_acc: 0.9948\n",
      "Epoch 21/100\n",
      "245/246 [============================>.] - ETA: 0s - loss: 6.5738e-04 - acc: 0.9999- ETA: 5s - loss:  - ET\n",
      "Epoch 00021: val_acc improved from 0.99554 to 0.99668, saving model to D:\\Projects\\PycharmProjects\\Grad\\GTSRB\\data/trained_model\\vgg19_model.21-1.00.h5\n",
      "246/246 [==============================] - 14s 56ms/step - loss: 6.5487e-04 - acc: 0.9999 - val_loss: 0.0246 - val_acc: 0.9967\n",
      "Epoch 22/100\n",
      "245/246 [============================>.] - ETA: 0s - loss: 0.0054 - acc: 0.9984\n",
      "Epoch 00022: val_acc did not improve from 0.99668\n",
      "246/246 [==============================] - 13s 52ms/step - loss: 0.0053 - acc: 0.9984 - val_loss: 0.0283 - val_acc: 0.9945\n",
      "Epoch 23/100\n",
      "246/246 [==============================] - ETA: 0s - loss: 0.0016 - acc: 0.9995- ETA: 3s - loss: 0.0020 - - ETA: 2s -\n",
      "Epoch 00023: val_acc improved from 0.99668 to 0.99681, saving model to D:\\Projects\\PycharmProjects\\Grad\\GTSRB\\data/trained_model\\vgg19_model.23-1.00.h5\n",
      "246/246 [==============================] - 14s 56ms/step - loss: 0.0016 - acc: 0.9995 - val_loss: 0.0175 - val_acc: 0.9968\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch 24/100\n",
      "245/246 [============================>.] - ETA: 0s - loss: 0.0029 - acc: 0.9992\n",
      "Epoch 00024: val_acc did not improve from 0.99681\n",
      "246/246 [==============================] - 13s 52ms/step - loss: 0.0029 - acc: 0.9992 - val_loss: 0.0218 - val_acc: 0.9943\n",
      "Epoch 25/100\n",
      "246/246 [==============================] - ETA: 0s - loss: 0.0014 - acc: 0.9996    - ETA: 6s - loss: 6.1435e-04 - acc: 0. - ETA: 5s  - ETA:\n",
      "Epoch 00025: val_acc did not improve from 0.99681\n",
      "246/246 [==============================] - 13s 52ms/step - loss: 0.0014 - acc: 0.9996 - val_loss: 0.0267 - val_acc: 0.9934\n",
      "Epoch 26/100\n",
      "245/246 [============================>.] - ETA: 0s - loss: 0.0044 - acc: 0.9992- ETA: 0s - loss: 0.0044 - acc:\n",
      "Epoch 00026: val_acc did not improve from 0.99681\n",
      "246/246 [==============================] - 13s 53ms/step - loss: 0.0044 - acc: 0.9992 - val_loss: 0.0200 - val_acc: 0.9945\n",
      "Epoch 27/100\n",
      "245/246 [============================>.] - ETA: 0s - loss: 7.6452e-04 - acc: 0.9998- ETA: 2s - loss: 7.4\n",
      "Epoch 00027: val_acc did not improve from 0.99681\n",
      "246/246 [==============================] - 13s 53ms/step - loss: 7.6162e-04 - acc: 0.9998 - val_loss: 0.0194 - val_acc: 0.9960\n",
      "Epoch 28/100\n",
      "245/246 [============================>.] - ETA: 0s - loss: 1.1408e-04 - acc: 1.0000- ETA: 0s - loss: 1.1588e-04 - acc: 1.00Restoring model weights from the end of the best epoch.\n",
      "\n",
      "Epoch 00028: val_acc did not improve from 0.99681\n",
      "246/246 [==============================] - 13s 55ms/step - loss: 1.1389e-04 - acc: 1.0000 - val_loss: 0.0219 - val_acc: 0.9966\n",
      "Epoch 00028: early stopping\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "<tensorflow.python.keras.callbacks.History at 0x1fa51ebcda0>"
      ]
     },
     "execution_count": 14,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "batch_size = 128\n",
    "epochs = 100\n",
    "iterations = 246\n",
    "model.fit_generator(datagen.flow(train_x, train_y, batch_size=batch_size),\n",
    "                    steps_per_epoch=iterations,\n",
    "                    epochs=epochs,\n",
    "                    callbacks=cbks,\n",
    "                    validation_data=(test_x, test_y))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [],
   "source": [
    "model = load_model('../../data/neural_networks/GTSRB_VGG16.h5')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {},
   "outputs": [
    {
     "ename": "ResourceExhaustedError",
     "evalue": "OOM when allocating tensor with shape[2193,64,32,32] and type float on /job:localhost/replica:0/task:0/device:GPU:0 by allocator GPU_0_bfc [Op:Conv2D]",
     "output_type": "error",
     "traceback": [
      "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
      "\u001b[1;31mResourceExhaustedError\u001b[0m                    Traceback (most recent call last)",
      "\u001b[1;32m<ipython-input-12-d7d8c4e14834>\u001b[0m in \u001b[0;36m<module>\u001b[1;34m\u001b[0m\n\u001b[1;32m----> 1\u001b[1;33m \u001b[0mX_data_perdict\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mmodel\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mX_data\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m      2\u001b[0m \u001b[0mX_data_perdict_arg\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mnp\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0margmax\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mX_data_perdict\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0maxis\u001b[0m\u001b[1;33m=\u001b[0m\u001b[1;36m1\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m      3\u001b[0m \u001b[0mY_data_arg\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mnp\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0margmax\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mY_data\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0maxis\u001b[0m\u001b[1;33m=\u001b[0m\u001b[1;36m1\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
      "\u001b[1;32mD:\\Anaconda3\\envs\\grad\\lib\\site-packages\\tensorflow\\python\\keras\\engine\\base_layer.py\u001b[0m in \u001b[0;36m__call__\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m    983\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m    984\u001b[0m         \u001b[1;32mwith\u001b[0m \u001b[0mops\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0menable_auto_cast_variables\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_compute_dtype_object\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 985\u001b[1;33m           \u001b[0moutputs\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mcall_fn\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0minputs\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;33m*\u001b[0m\u001b[0margs\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;33m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m    986\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m    987\u001b[0m         \u001b[1;32mif\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_activity_regularizer\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
      "\u001b[1;32mD:\\Anaconda3\\envs\\grad\\lib\\site-packages\\tensorflow\\python\\keras\\engine\\functional.py\u001b[0m in \u001b[0;36mcall\u001b[1;34m(self, inputs, training, mask)\u001b[0m\n\u001b[0;32m    384\u001b[0m     \"\"\"\n\u001b[0;32m    385\u001b[0m     return self._run_internal_graph(\n\u001b[1;32m--> 386\u001b[1;33m         inputs, training=training, mask=mask)\n\u001b[0m\u001b[0;32m    387\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m    388\u001b[0m   \u001b[1;32mdef\u001b[0m \u001b[0mcompute_output_shape\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0minput_shape\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
      "\u001b[1;32mD:\\Anaconda3\\envs\\grad\\lib\\site-packages\\tensorflow\\python\\keras\\engine\\functional.py\u001b[0m in \u001b[0;36m_run_internal_graph\u001b[1;34m(self, inputs, training, mask)\u001b[0m\n\u001b[0;32m    506\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m    507\u001b[0m         \u001b[0margs\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mkwargs\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mnode\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mmap_arguments\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mtensor_dict\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 508\u001b[1;33m         \u001b[0moutputs\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mnode\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mlayer\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m*\u001b[0m\u001b[0margs\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;33m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m    509\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m    510\u001b[0m         \u001b[1;31m# Update tensor_dict.\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
      "\u001b[1;32mD:\\Anaconda3\\envs\\grad\\lib\\site-packages\\tensorflow\\python\\keras\\engine\\base_layer.py\u001b[0m in \u001b[0;36m__call__\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m    983\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m    984\u001b[0m         \u001b[1;32mwith\u001b[0m \u001b[0mops\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0menable_auto_cast_variables\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_compute_dtype_object\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 985\u001b[1;33m           \u001b[0moutputs\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mcall_fn\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0minputs\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;33m*\u001b[0m\u001b[0margs\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;33m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m    986\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m    987\u001b[0m         \u001b[1;32mif\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_activity_regularizer\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
      "\u001b[1;32mD:\\Anaconda3\\envs\\grad\\lib\\site-packages\\tensorflow\\python\\keras\\layers\\convolutional.py\u001b[0m in \u001b[0;36mcall\u001b[1;34m(self, inputs)\u001b[0m\n\u001b[0;32m    245\u001b[0m       \u001b[0minputs\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0marray_ops\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mpad\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0minputs\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_compute_causal_padding\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0minputs\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m    246\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 247\u001b[1;33m     \u001b[0moutputs\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_convolution_op\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0minputs\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mkernel\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m    248\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m    249\u001b[0m     \u001b[1;32mif\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0muse_bias\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
      "\u001b[1;32mD:\\Anaconda3\\envs\\grad\\lib\\site-packages\\tensorflow\\python\\util\\dispatch.py\u001b[0m in \u001b[0;36mwrapper\u001b[1;34m(*args, **kwargs)\u001b[0m\n\u001b[0;32m    199\u001b[0m     \u001b[1;34m\"\"\"Call target, and fall back on dispatchers if there is a TypeError.\"\"\"\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m    200\u001b[0m     \u001b[1;32mtry\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 201\u001b[1;33m       \u001b[1;32mreturn\u001b[0m \u001b[0mtarget\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m*\u001b[0m\u001b[0margs\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;33m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m    202\u001b[0m     \u001b[1;32mexcept\u001b[0m \u001b[1;33m(\u001b[0m\u001b[0mTypeError\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mValueError\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m    203\u001b[0m       \u001b[1;31m# Note: convert_to_eager_tensor currently raises a ValueError, not a\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
      "\u001b[1;32mD:\\Anaconda3\\envs\\grad\\lib\\site-packages\\tensorflow\\python\\ops\\nn_ops.py\u001b[0m in \u001b[0;36mconvolution_v2\u001b[1;34m(input, filters, strides, padding, data_format, dilations, name)\u001b[0m\n\u001b[0;32m   1016\u001b[0m       \u001b[0mdata_format\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0mdata_format\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m   1017\u001b[0m       \u001b[0mdilations\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0mdilations\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m-> 1018\u001b[1;33m       name=name)\n\u001b[0m\u001b[0;32m   1019\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m   1020\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n",
      "\u001b[1;32mD:\\Anaconda3\\envs\\grad\\lib\\site-packages\\tensorflow\\python\\ops\\nn_ops.py\u001b[0m in \u001b[0;36mconvolution_internal\u001b[1;34m(input, filters, strides, padding, data_format, dilations, name, call_from_convolution, num_spatial_dims)\u001b[0m\n\u001b[0;32m   1146\u001b[0m           \u001b[0mdata_format\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0mdata_format\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m   1147\u001b[0m           \u001b[0mdilations\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0mdilations\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m-> 1148\u001b[1;33m           name=name)\n\u001b[0m\u001b[0;32m   1149\u001b[0m     \u001b[1;32melse\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m   1150\u001b[0m       \u001b[1;32mif\u001b[0m \u001b[0mchannel_index\u001b[0m \u001b[1;33m==\u001b[0m \u001b[1;36m1\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
      "\u001b[1;32mD:\\Anaconda3\\envs\\grad\\lib\\site-packages\\tensorflow\\python\\ops\\nn_ops.py\u001b[0m in \u001b[0;36m_conv2d_expanded_batch\u001b[1;34m(input, filters, strides, padding, data_format, dilations, name)\u001b[0m\n\u001b[0;32m   2590\u001b[0m         \u001b[0mdata_format\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0mdata_format\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m   2591\u001b[0m         \u001b[0mdilations\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0mdilations\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m-> 2592\u001b[1;33m         name=name)\n\u001b[0m\u001b[0;32m   2593\u001b[0m   return squeeze_batch_dims(\n\u001b[0;32m   2594\u001b[0m       \u001b[0minput\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
      "\u001b[1;32mD:\\Anaconda3\\envs\\grad\\lib\\site-packages\\tensorflow\\python\\ops\\gen_nn_ops.py\u001b[0m in \u001b[0;36mconv2d\u001b[1;34m(input, filter, strides, padding, use_cudnn_on_gpu, explicit_paddings, data_format, dilations, name)\u001b[0m\n\u001b[0;32m    936\u001b[0m       \u001b[1;32mreturn\u001b[0m \u001b[0m_result\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m    937\u001b[0m     \u001b[1;32mexcept\u001b[0m \u001b[0m_core\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_NotOkStatusException\u001b[0m \u001b[1;32mas\u001b[0m \u001b[0me\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 938\u001b[1;33m       \u001b[0m_ops\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mraise_from_not_ok_status\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0me\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mname\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m    939\u001b[0m     \u001b[1;32mexcept\u001b[0m \u001b[0m_core\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_FallbackException\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m    940\u001b[0m       \u001b[1;32mpass\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
      "\u001b[1;32mD:\\Anaconda3\\envs\\grad\\lib\\site-packages\\tensorflow\\python\\framework\\ops.py\u001b[0m in \u001b[0;36mraise_from_not_ok_status\u001b[1;34m(e, name)\u001b[0m\n\u001b[0;32m   6841\u001b[0m   \u001b[0mmessage\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0me\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mmessage\u001b[0m \u001b[1;33m+\u001b[0m \u001b[1;33m(\u001b[0m\u001b[1;34m\" name: \"\u001b[0m \u001b[1;33m+\u001b[0m \u001b[0mname\u001b[0m \u001b[1;32mif\u001b[0m \u001b[0mname\u001b[0m \u001b[1;32mis\u001b[0m \u001b[1;32mnot\u001b[0m \u001b[1;32mNone\u001b[0m \u001b[1;32melse\u001b[0m \u001b[1;34m\"\"\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m   6842\u001b[0m   \u001b[1;31m# pylint: disable=protected-access\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m-> 6843\u001b[1;33m   \u001b[0msix\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mraise_from\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mcore\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_status_to_exception\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0me\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mcode\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mmessage\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;32mNone\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m   6844\u001b[0m   \u001b[1;31m# pylint: enable=protected-access\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m   6845\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n",
      "\u001b[1;32mD:\\Anaconda3\\envs\\grad\\lib\\site-packages\\six.py\u001b[0m in \u001b[0;36mraise_from\u001b[1;34m(value, from_value)\u001b[0m\n",
      "\u001b[1;31mResourceExhaustedError\u001b[0m: OOM when allocating tensor with shape[2193,64,32,32] and type float on /job:localhost/replica:0/task:0/device:GPU:0 by allocator GPU_0_bfc [Op:Conv2D]"
     ]
    }
   ],
   "source": [
    "X_data_perdict = model(X_data)\n",
    "X_data_perdict_arg = np.argmax(X_data_perdict, axis=1)\n",
    "Y_data_arg = np.argmax(Y_data, axis=1)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "from sklearn.metrics import classification_report\n",
    "report = classification_report(Y_data_arg,X_data_perdict_arg)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "print(report)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "json_config = model.to_json()\n",
    "json_config"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "with open('VGG16_model.json', 'w') as json_file:\n",
    "    json_file.write(json_config)"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "grad",
   "language": "python",
   "name": "grad"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.15"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
