{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "Using TensorFlow backend.\n"
     ]
    }
   ],
   "source": [
    "import os, sys\n",
    "from captcha.image import ImageCaptcha\n",
    "import random\n",
    "import string\n",
    "from keras.utils import plot_model\n",
    "import numpy as np\n",
    "from tensorflow.keras.models import Sequential,Model\n",
    "import tensorflow as tf\n",
    "from tensorflow import keras\n",
    "from tensorflow.keras.layers import Dropout,Dense,MaxPooling2D,Flatten,Conv2D,Input,concatenate\n",
    "from tensorflow.keras.preprocessing import image\n",
    "from tensorflow.keras.applications.mobilenet import preprocess_input, decode_predictions\n",
    "from keras import backend as K\n",
    "from keras.preprocessing.image import ImageDataGenerator"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [],
   "source": [
    "#定义验证码大小，n_class,\n",
    "height,width,batch_size,n_class = 50,100,48,10\n",
    "#生成随机数\n",
    "def get_code():\n",
    "    raw = string.digits\n",
    "#     + string.ascii_lowercase #小写\n",
    "#     + string.ascii_uppercase#大写\n",
    "\n",
    "    random_code = ''.join(random.sample(raw,4))\n",
    "    return raw,random_code\n",
    "\n",
    "#生成验证码\n",
    "def generator(height,width,batch_size,n_class):\n",
    "    X = np.zeros((batch_size,height,width,3))\n",
    "    y = [np.zeros((batch_size,n_class),dtype=np.uint8) for i in range(4)]\n",
    "    \n",
    "    #验证码生成器\n",
    "    generator = ImageCaptcha(height=height,width=width)\n",
    "    while True:\n",
    "        for i in range(batch_size):\n",
    "            raw ,random_code = get_code()\n",
    "            img = np.array((generator.generate_image(random_code)),dtype=np.float32)\n",
    "            X[i] = img/255.0\n",
    "            \n",
    "            for j,ch in enumerate(random_code):\n",
    "                y[j][i,:] = 0\n",
    "                y[j][i,raw.find(ch)] = 1\n",
    "        yield X,y"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [],
   "source": [
    "inputs = Input(shape=(height,width,3))\n",
    "x = inputs\n",
    "x = Conv2D(32,kernel_size=3,strides=1,activation='relu',padding='same')(x)\n",
    "# x = keras.layers.BatchNormalization()(x)\n",
    "x = MaxPooling2D(2)(x)\n",
    "x = Conv2D(64,kernel_size=3,strides=1,activation='relu',padding='same')(x)\n",
    "# x = keras.layers.BatchNormalization()(x)\n",
    "x = MaxPooling2D(2)(x)\n",
    "# x = Conv2D(64,kernel_size=3,strides=1,activation='relu',padding='same')(x)\n",
    "# x = MaxPooling2D(2)(x)\n",
    "# x = Conv2D(128,kernel_size=3,strides=1,activation='relu',padding='same')(x)\n",
    "# x = MaxPooling2D(2)(x)\n",
    "# x = keras.layers.BatchNormalization()(x)\n",
    "x = Flatten()(x)\n",
    "predictions  = [Dense(10, activation='softmax',name='%d'%(i))(x) for i in range(4)]\n",
    "\n",
    "model = Model(inputs=inputs, outputs=predictions)\n",
    "# model.summary()\n",
    "# plot_model(Model,to_file='model.png')\n",
    "# plot_model(model, to_file='modle.png', show_shapes=True)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Train for 100 steps, validate for 5 steps\n",
      "Epoch 1/80\n",
      " 99/100 [============================>.] - ETA: 0s - loss: 6.0251 - 0_loss: 1.3424 - 1_loss: 1.7367 - 2_loss: 1.6369 - 3_loss: 1.3090 - 0_accuracy: 0.5718 - 1_accuracy: 0.4251 - 2_accuracy: 0.4556 - 3_accuracy: 0.5995\n",
      "Epoch 00001: val_loss improved from inf to 5.50702, saving model to ./logs/ep001-loss6.019-val_loss5.507.h5\n",
      "100/100 [==============================] - 47s 474ms/step - loss: 6.0194 - 0_loss: 1.3398 - 1_loss: 1.7351 - 2_loss: 1.6352 - 3_loss: 1.3092 - 0_accuracy: 0.5725 - 1_accuracy: 0.4258 - 2_accuracy: 0.4558 - 3_accuracy: 0.5996 - val_loss: 5.5070 - val_0_loss: 1.2143 - val_1_loss: 1.6627 - val_2_loss: 1.5373 - val_3_loss: 1.0927 - val_0_accuracy: 0.6583 - val_1_accuracy: 0.5042 - val_2_accuracy: 0.4750 - val_3_accuracy: 0.6833\n",
      "Epoch 2/80\n",
      " 99/100 [============================>.] - ETA: 0s - loss: 5.3343 - 0_loss: 1.0987 - 1_loss: 1.5841 - 2_loss: 1.5114 - 3_loss: 1.1400 - 0_accuracy: 0.6688 - 1_accuracy: 0.4762 - 2_accuracy: 0.5042 - 3_accuracy: 0.6439\n",
      "Epoch 00002: val_loss improved from 5.50702 to 4.94470, saving model to ./logs/ep002-loss5.333-val_loss4.945.h5\n",
      "100/100 [==============================] - 45s 454ms/step - loss: 5.3325 - 0_loss: 1.0987 - 1_loss: 1.5844 - 2_loss: 1.5109 - 3_loss: 1.1386 - 0_accuracy: 0.6687 - 1_accuracy: 0.4777 - 2_accuracy: 0.5048 - 3_accuracy: 0.6444 - val_loss: 4.9447 - val_0_loss: 1.0222 - val_1_loss: 1.5201 - val_2_loss: 1.3180 - val_3_loss: 1.0844 - val_0_accuracy: 0.6875 - val_1_accuracy: 0.5125 - val_2_accuracy: 0.5792 - val_3_accuracy: 0.6792\n",
      "Epoch 3/80\n",
      " 99/100 [============================>.] - ETA: 0s - loss: 4.8066 - 0_loss: 0.9335 - 1_loss: 1.4709 - 2_loss: 1.3722 - 3_loss: 1.0301 - 0_accuracy: 0.7117 - 1_accuracy: 0.5370 - 2_accuracy: 0.5541 - 3_accuracy: 0.6854\n",
      "Epoch 00003: val_loss improved from 4.94470 to 3.96135, saving model to ./logs/ep003-loss4.804-val_loss3.961.h5\n",
      "100/100 [==============================] - 45s 448ms/step - loss: 4.8040 - 0_loss: 0.9332 - 1_loss: 1.4687 - 2_loss: 1.3735 - 3_loss: 1.0287 - 0_accuracy: 0.7113 - 1_accuracy: 0.5381 - 2_accuracy: 0.5531 - 3_accuracy: 0.6862 - val_loss: 3.9613 - val_0_loss: 0.7841 - val_1_loss: 1.4416 - val_2_loss: 1.0774 - val_3_loss: 0.6583 - val_0_accuracy: 0.7375 - val_1_accuracy: 0.5208 - val_2_accuracy: 0.6792 - val_3_accuracy: 0.8167\n",
      "Epoch 4/80\n",
      " 99/100 [============================>.] - ETA: 0s - loss: 4.3539 - 0_loss: 0.8195 - 1_loss: 1.3933 - 2_loss: 1.2343 - 3_loss: 0.9068 - 0_accuracy: 0.7496 - 1_accuracy: 0.5614 - 2_accuracy: 0.6153 - 3_accuracy: 0.7235\n",
      "Epoch 00004: val_loss did not improve from 3.96135\n",
      "100/100 [==============================] - 45s 448ms/step - loss: 4.3541 - 0_loss: 0.8203 - 1_loss: 1.3916 - 2_loss: 1.2360 - 3_loss: 0.9062 - 0_accuracy: 0.7490 - 1_accuracy: 0.5617 - 2_accuracy: 0.6137 - 3_accuracy: 0.7231 - val_loss: 4.1674 - val_0_loss: 0.8793 - val_1_loss: 1.3328 - val_2_loss: 1.1289 - val_3_loss: 0.8263 - val_0_accuracy: 0.7417 - val_1_accuracy: 0.5833 - val_2_accuracy: 0.6542 - val_3_accuracy: 0.7667\n",
      "Epoch 5/80\n",
      " 99/100 [============================>.] - ETA: 0s - loss: 4.0061 - 0_loss: 0.7040 - 1_loss: 1.3185 - 2_loss: 1.1703 - 3_loss: 0.8133 - 0_accuracy: 0.7904 - 1_accuracy: 0.5783 - 2_accuracy: 0.6416 - 3_accuracy: 0.7605\n",
      "Epoch 00005: val_loss improved from 3.96135 to 3.55947, saving model to ./logs/ep005-loss4.002-val_loss3.559.h5\n",
      "100/100 [==============================] - 45s 450ms/step - loss: 4.0021 - 0_loss: 0.7028 - 1_loss: 1.3186 - 2_loss: 1.1689 - 3_loss: 0.8117 - 0_accuracy: 0.7906 - 1_accuracy: 0.5775 - 2_accuracy: 0.6421 - 3_accuracy: 0.7608 - val_loss: 3.5595 - val_0_loss: 0.6050 - val_1_loss: 1.2515 - val_2_loss: 0.9931 - val_3_loss: 0.7098 - val_0_accuracy: 0.8417 - val_1_accuracy: 0.6375 - val_2_accuracy: 0.7000 - val_3_accuracy: 0.8208\n",
      "Epoch 6/80\n",
      " 99/100 [============================>.] - ETA: 0s - loss: 3.8167 - 0_loss: 0.6341 - 1_loss: 1.2822 - 2_loss: 1.1372 - 3_loss: 0.7631 - 0_accuracy: 0.8201 - 1_accuracy: 0.5939 - 2_accuracy: 0.6566 - 3_accuracy: 0.7717\n",
      "Epoch 00006: val_loss improved from 3.55947 to 3.27552, saving model to ./logs/ep006-loss3.812-val_loss3.276.h5\n",
      "100/100 [==============================] - 45s 450ms/step - loss: 3.8123 - 0_loss: 0.6331 - 1_loss: 1.2838 - 2_loss: 1.1345 - 3_loss: 0.7609 - 0_accuracy: 0.8202 - 1_accuracy: 0.5935 - 2_accuracy: 0.6567 - 3_accuracy: 0.7721 - val_loss: 3.2755 - val_0_loss: 0.6031 - val_1_loss: 1.1133 - val_2_loss: 0.9854 - val_3_loss: 0.5737 - val_0_accuracy: 0.8208 - val_1_accuracy: 0.6500 - val_2_accuracy: 0.7417 - val_3_accuracy: 0.8750\n",
      "Epoch 7/80\n",
      " 99/100 [============================>.] - ETA: 0s - loss: 3.6333 - 0_loss: 0.5945 - 1_loss: 1.2321 - 2_loss: 1.0933 - 3_loss: 0.7135 - 0_accuracy: 0.8350 - 1_accuracy: 0.6164 - 2_accuracy: 0.6679 - 3_accuracy: 0.7877\n",
      "Epoch 00007: val_loss improved from 3.27552 to 2.66716, saving model to ./logs/ep007-loss3.623-val_loss2.667.h5\n",
      "100/100 [==============================] - 46s 456ms/step - loss: 3.6233 - 0_loss: 0.5933 - 1_loss: 1.2277 - 2_loss: 1.0892 - 3_loss: 0.7131 - 0_accuracy: 0.8354 - 1_accuracy: 0.6179 - 2_accuracy: 0.6692 - 3_accuracy: 0.7877 - val_loss: 2.6672 - val_0_loss: 0.4310 - val_1_loss: 1.0611 - val_2_loss: 0.7529 - val_3_loss: 0.4220 - val_0_accuracy: 0.8792 - val_1_accuracy: 0.6917 - val_2_accuracy: 0.7750 - val_3_accuracy: 0.8708\n",
      "Epoch 8/80\n",
      " 99/100 [============================>.] - ETA: 0s - loss: 3.5031 - 0_loss: 0.5414 - 1_loss: 1.1910 - 2_loss: 1.0703 - 3_loss: 0.7003 - 0_accuracy: 0.8418 - 1_accuracy: 0.6273 - 2_accuracy: 0.6816 - 3_accuracy: 0.8058\n",
      "Epoch 00008: val_loss did not improve from 2.66716\n",
      "100/100 [==============================] - 45s 450ms/step - loss: 3.4976 - 0_loss: 0.5429 - 1_loss: 1.1891 - 2_loss: 1.0680 - 3_loss: 0.6977 - 0_accuracy: 0.8417 - 1_accuracy: 0.6281 - 2_accuracy: 0.6827 - 3_accuracy: 0.8062 - val_loss: 2.7752 - val_0_loss: 0.3991 - val_1_loss: 1.0049 - val_2_loss: 0.9374 - val_3_loss: 0.4338 - val_0_accuracy: 0.8833 - val_1_accuracy: 0.6750 - val_2_accuracy: 0.7375 - val_3_accuracy: 0.8958\n",
      "Epoch 9/80\n",
      " 99/100 [============================>.] - ETA: 0s - loss: 3.4065 - 0_loss: 0.5167 - 1_loss: 1.1794 - 2_loss: 1.0408 - 3_loss: 0.6695 - 0_accuracy: 0.8476 - 1_accuracy: 0.6469 - 2_accuracy: 0.6858 - 3_accuracy: 0.8165\n",
      "Epoch 00009: val_loss did not improve from 2.66716\n",
      "100/100 [==============================] - 45s 452ms/step - loss: 3.3988 - 0_loss: 0.5144 - 1_loss: 1.1774 - 2_loss: 1.0386 - 3_loss: 0.6683 - 0_accuracy: 0.8481 - 1_accuracy: 0.6475 - 2_accuracy: 0.6862 - 3_accuracy: 0.8165 - val_loss: 2.7182 - val_0_loss: 0.3734 - val_1_loss: 0.9812 - val_2_loss: 0.9062 - val_3_loss: 0.4575 - val_0_accuracy: 0.8708 - val_1_accuracy: 0.7042 - val_2_accuracy: 0.7208 - val_3_accuracy: 0.8500\n",
      "Epoch 10/80\n",
      " 99/100 [============================>.] - ETA: 0s - loss: 3.3723 - 0_loss: 0.5008 - 1_loss: 1.1911 - 2_loss: 1.0293 - 3_loss: 0.6510 - 0_accuracy: 0.8569 - 1_accuracy: 0.6355 - 2_accuracy: 0.6886 - 3_accuracy: 0.8131\n",
      "Epoch 00010: val_loss did not improve from 2.66716\n",
      "100/100 [==============================] - 45s 452ms/step - loss: 3.3694 - 0_loss: 0.4997 - 1_loss: 1.1901 - 2_loss: 1.0300 - 3_loss: 0.6496 - 0_accuracy: 0.8573 - 1_accuracy: 0.6358 - 2_accuracy: 0.6881 - 3_accuracy: 0.8135 - val_loss: 2.6783 - val_0_loss: 0.4334 - val_1_loss: 0.9007 - val_2_loss: 0.9336 - val_3_loss: 0.4105 - val_0_accuracy: 0.8833 - val_1_accuracy: 0.7083 - val_2_accuracy: 0.6917 - val_3_accuracy: 0.8625\n",
      "Epoch 11/80\n",
      " 99/100 [============================>.] - ETA: 0s - loss: 3.2700 - 0_loss: 0.4559 - 1_loss: 1.1591 - 2_loss: 0.9793 - 3_loss: 0.6758 - 0_accuracy: 0.8727 - 1_accuracy: 0.6383 - 2_accuracy: 0.7104 - 3_accuracy: 0.8186\n",
      "Epoch 00011: val_loss improved from 2.66716 to 2.43233, saving model to ./logs/ep011-loss3.258-val_loss2.432.h5\n",
      "100/100 [==============================] - 45s 452ms/step - loss: 3.2582 - 0_loss: 0.4542 - 1_loss: 1.1550 - 2_loss: 0.9776 - 3_loss: 0.6714 - 0_accuracy: 0.8731 - 1_accuracy: 0.6390 - 2_accuracy: 0.7115 - 3_accuracy: 0.8198 - val_loss: 2.4323 - val_0_loss: 0.3114 - val_1_loss: 0.9071 - val_2_loss: 0.8141 - val_3_loss: 0.3998 - val_0_accuracy: 0.8833 - val_1_accuracy: 0.7208 - val_2_accuracy: 0.7500 - val_3_accuracy: 0.8708\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch 12/80\n",
      " 99/100 [============================>.] - ETA: 0s - loss: 3.1932 - 0_loss: 0.4235 - 1_loss: 1.1367 - 2_loss: 0.9876 - 3_loss: 0.6454 - 0_accuracy: 0.8864 - 1_accuracy: 0.6673 - 2_accuracy: 0.7155 - 3_accuracy: 0.8251\n",
      "Epoch 00012: val_loss did not improve from 2.43233\n",
      "100/100 [==============================] - 45s 452ms/step - loss: 3.1908 - 0_loss: 0.4218 - 1_loss: 1.1353 - 2_loss: 0.9902 - 3_loss: 0.6435 - 0_accuracy: 0.8867 - 1_accuracy: 0.6683 - 2_accuracy: 0.7154 - 3_accuracy: 0.8256 - val_loss: 2.5205 - val_0_loss: 0.3211 - val_1_loss: 0.9461 - val_2_loss: 0.8253 - val_3_loss: 0.4280 - val_0_accuracy: 0.9375 - val_1_accuracy: 0.7708 - val_2_accuracy: 0.7708 - val_3_accuracy: 0.8708\n",
      "Epoch 13/80\n",
      " 99/100 [============================>.] - ETA: 0s - loss: 3.0299 - 0_loss: 0.4258 - 1_loss: 1.1007 - 2_loss: 0.9437 - 3_loss: 0.5597 - 0_accuracy: 0.8872 - 1_accuracy: 0.6644 - 2_accuracy: 0.7153 - 3_accuracy: 0.8445\n",
      "Epoch 00013: val_loss improved from 2.43233 to 2.41896, saving model to ./logs/ep013-loss3.023-val_loss2.419.h5\n",
      "100/100 [==============================] - 45s 452ms/step - loss: 3.0228 - 0_loss: 0.4251 - 1_loss: 1.0986 - 2_loss: 0.9427 - 3_loss: 0.5564 - 0_accuracy: 0.8871 - 1_accuracy: 0.6650 - 2_accuracy: 0.7160 - 3_accuracy: 0.8456 - val_loss: 2.4190 - val_0_loss: 0.3844 - val_1_loss: 0.9818 - val_2_loss: 0.7064 - val_3_loss: 0.3463 - val_0_accuracy: 0.8625 - val_1_accuracy: 0.7375 - val_2_accuracy: 0.7708 - val_3_accuracy: 0.8958\n",
      "Epoch 14/80\n",
      " 99/100 [============================>.] - ETA: 0s - loss: 3.1038 - 0_loss: 0.4131 - 1_loss: 1.1216 - 2_loss: 0.9636 - 3_loss: 0.6055 - 0_accuracy: 0.8897 - 1_accuracy: 0.6601 - 2_accuracy: 0.7168 - 3_accuracy: 0.8365\n",
      "Epoch 00014: val_loss improved from 2.41896 to 2.16719, saving model to ./logs/ep014-loss3.100-val_loss2.167.h5\n",
      "100/100 [==============================] - 45s 452ms/step - loss: 3.0998 - 0_loss: 0.4153 - 1_loss: 1.1206 - 2_loss: 0.9623 - 3_loss: 0.6015 - 0_accuracy: 0.8896 - 1_accuracy: 0.6604 - 2_accuracy: 0.7173 - 3_accuracy: 0.8377 - val_loss: 2.1672 - val_0_loss: 0.2318 - val_1_loss: 0.9077 - val_2_loss: 0.7535 - val_3_loss: 0.2742 - val_0_accuracy: 0.9333 - val_1_accuracy: 0.7292 - val_2_accuracy: 0.7500 - val_3_accuracy: 0.9083\n",
      "Epoch 15/80\n",
      " 99/100 [============================>.] - ETA: 0s - loss: 3.0037 - 0_loss: 0.3763 - 1_loss: 1.0902 - 2_loss: 0.9500 - 3_loss: 0.5873 - 0_accuracy: 0.8988 - 1_accuracy: 0.6717 - 2_accuracy: 0.7224 - 3_accuracy: 0.8386\n",
      "Epoch 00015: val_loss did not improve from 2.16719\n",
      "100/100 [==============================] - 45s 450ms/step - loss: 2.9972 - 0_loss: 0.3754 - 1_loss: 1.0905 - 2_loss: 0.9471 - 3_loss: 0.5841 - 0_accuracy: 0.8985 - 1_accuracy: 0.6723 - 2_accuracy: 0.7229 - 3_accuracy: 0.8398 - val_loss: 2.3484 - val_0_loss: 0.2769 - val_1_loss: 0.9749 - val_2_loss: 0.7828 - val_3_loss: 0.3137 - val_0_accuracy: 0.9083 - val_1_accuracy: 0.6875 - val_2_accuracy: 0.7583 - val_3_accuracy: 0.8917\n",
      "Epoch 16/80\n",
      " 99/100 [============================>.] - ETA: 0s - loss: 3.0270 - 0_loss: 0.3686 - 1_loss: 1.1319 - 2_loss: 0.9322 - 3_loss: 0.5942 - 0_accuracy: 0.9024 - 1_accuracy: 0.6637 - 2_accuracy: 0.7279 - 3_accuracy: 0.8415\n",
      "Epoch 00016: val_loss improved from 2.16719 to 2.10855, saving model to ./logs/ep016-loss3.018-val_loss2.109.h5\n",
      "100/100 [==============================] - 45s 449ms/step - loss: 3.0182 - 0_loss: 0.3672 - 1_loss: 1.1298 - 2_loss: 0.9294 - 3_loss: 0.5918 - 0_accuracy: 0.9027 - 1_accuracy: 0.6644 - 2_accuracy: 0.7283 - 3_accuracy: 0.8421 - val_loss: 2.1086 - val_0_loss: 0.2666 - val_1_loss: 0.7816 - val_2_loss: 0.7347 - val_3_loss: 0.3257 - val_0_accuracy: 0.9417 - val_1_accuracy: 0.7417 - val_2_accuracy: 0.8000 - val_3_accuracy: 0.9125\n",
      "Epoch 17/80\n",
      " 99/100 [============================>.] - ETA: 0s - loss: 2.9316 - 0_loss: 0.3623 - 1_loss: 1.0867 - 2_loss: 0.9122 - 3_loss: 0.5704 - 0_accuracy: 0.9030 - 1_accuracy: 0.6738 - 2_accuracy: 0.7399 - 3_accuracy: 0.8495\n",
      "Epoch 00017: val_loss improved from 2.10855 to 1.92806, saving model to ./logs/ep017-loss2.922-val_loss1.928.h5\n",
      "100/100 [==============================] - 45s 449ms/step - loss: 2.9217 - 0_loss: 0.3611 - 1_loss: 1.0840 - 2_loss: 0.9085 - 3_loss: 0.5680 - 0_accuracy: 0.9035 - 1_accuracy: 0.6737 - 2_accuracy: 0.7410 - 3_accuracy: 0.8502 - val_loss: 1.9281 - val_0_loss: 0.2270 - val_1_loss: 0.7879 - val_2_loss: 0.6248 - val_3_loss: 0.2883 - val_0_accuracy: 0.9375 - val_1_accuracy: 0.7667 - val_2_accuracy: 0.8167 - val_3_accuracy: 0.8917\n",
      "Epoch 18/80\n",
      " 99/100 [============================>.] - ETA: 0s - loss: 2.9760 - 0_loss: 0.3606 - 1_loss: 1.0921 - 2_loss: 0.9231 - 3_loss: 0.6003 - 0_accuracy: 0.9047 - 1_accuracy: 0.6757 - 2_accuracy: 0.7321 - 3_accuracy: 0.8449\n",
      "Epoch 00018: val_loss did not improve from 1.92806\n",
      "100/100 [==============================] - 45s 453ms/step - loss: 2.9700 - 0_loss: 0.3594 - 1_loss: 1.0890 - 2_loss: 0.9225 - 3_loss: 0.5991 - 0_accuracy: 0.9052 - 1_accuracy: 0.6765 - 2_accuracy: 0.7323 - 3_accuracy: 0.8448 - val_loss: 2.1046 - val_0_loss: 0.1962 - val_1_loss: 0.9414 - val_2_loss: 0.6379 - val_3_loss: 0.3291 - val_0_accuracy: 0.9500 - val_1_accuracy: 0.7042 - val_2_accuracy: 0.8375 - val_3_accuracy: 0.9292\n",
      "Epoch 19/80\n",
      " 99/100 [============================>.] - ETA: 0s - loss: 2.8630 - 0_loss: 0.3383 - 1_loss: 1.0595 - 2_loss: 0.9175 - 3_loss: 0.5477 - 0_accuracy: 0.9148 - 1_accuracy: 0.6837 - 2_accuracy: 0.7355 - 3_accuracy: 0.8590\n",
      "Epoch 00019: val_loss improved from 1.92806 to 1.90162, saving model to ./logs/ep019-loss2.858-val_loss1.902.h5\n",
      "100/100 [==============================] - 45s 450ms/step - loss: 2.8579 - 0_loss: 0.3377 - 1_loss: 1.0552 - 2_loss: 0.9197 - 3_loss: 0.5452 - 0_accuracy: 0.9150 - 1_accuracy: 0.6848 - 2_accuracy: 0.7348 - 3_accuracy: 0.8592 - val_loss: 1.9016 - val_0_loss: 0.1968 - val_1_loss: 0.7924 - val_2_loss: 0.5947 - val_3_loss: 0.3177 - val_0_accuracy: 0.9417 - val_1_accuracy: 0.7750 - val_2_accuracy: 0.8083 - val_3_accuracy: 0.9083\n",
      "Epoch 20/80\n",
      " 99/100 [============================>.] - ETA: 0s - loss: 2.8183 - 0_loss: 0.3153 - 1_loss: 1.0615 - 2_loss: 0.8863 - 3_loss: 0.5552 - 0_accuracy: 0.9156 - 1_accuracy: 0.6867 - 2_accuracy: 0.7471 - 3_accuracy: 0.8620\n",
      "Epoch 00020: val_loss did not improve from 1.90162\n",
      "100/100 [==============================] - 45s 452ms/step - loss: 2.8141 - 0_loss: 0.3143 - 1_loss: 1.0608 - 2_loss: 0.8864 - 3_loss: 0.5525 - 0_accuracy: 0.9160 - 1_accuracy: 0.6865 - 2_accuracy: 0.7471 - 3_accuracy: 0.8625 - val_loss: 2.0621 - val_0_loss: 0.1883 - val_1_loss: 0.8643 - val_2_loss: 0.7239 - val_3_loss: 0.2856 - val_0_accuracy: 0.9458 - val_1_accuracy: 0.7375 - val_2_accuracy: 0.7958 - val_3_accuracy: 0.9125\n",
      "Epoch 21/80\n",
      " 99/100 [============================>.] - ETA: 0s - loss: 2.9225 - 0_loss: 0.3292 - 1_loss: 1.0996 - 2_loss: 0.9055 - 3_loss: 0.5882 - 0_accuracy: 0.9156 - 1_accuracy: 0.6774 - 2_accuracy: 0.7355 - 3_accuracy: 0.8552\n",
      "Epoch 00021: val_loss did not improve from 1.90162\n",
      "100/100 [==============================] - 45s 450ms/step - loss: 2.9140 - 0_loss: 0.3278 - 1_loss: 1.0973 - 2_loss: 0.9034 - 3_loss: 0.5854 - 0_accuracy: 0.9158 - 1_accuracy: 0.6777 - 2_accuracy: 0.7360 - 3_accuracy: 0.8562 - val_loss: 2.1890 - val_0_loss: 0.1941 - val_1_loss: 1.0265 - val_2_loss: 0.6141 - val_3_loss: 0.3542 - val_0_accuracy: 0.9542 - val_1_accuracy: 0.7083 - val_2_accuracy: 0.8083 - val_3_accuracy: 0.8750\n",
      "Epoch 22/80\n",
      " 99/100 [============================>.] - ETA: 0s - loss: 2.7690 - 0_loss: 0.3108 - 1_loss: 1.0645 - 2_loss: 0.8621 - 3_loss: 0.5315 - 0_accuracy: 0.9133 - 1_accuracy: 0.6896 - 2_accuracy: 0.7504 - 3_accuracy: 0.8647\n",
      "Epoch 00022: val_loss did not improve from 1.90162\n",
      "100/100 [==============================] - 45s 449ms/step - loss: 2.7564 - 0_loss: 0.3081 - 1_loss: 1.0601 - 2_loss: 0.8598 - 3_loss: 0.5283 - 0_accuracy: 0.9142 - 1_accuracy: 0.6910 - 2_accuracy: 0.7510 - 3_accuracy: 0.8654 - val_loss: 2.0568 - val_0_loss: 0.2528 - val_1_loss: 0.8550 - val_2_loss: 0.7077 - val_3_loss: 0.2414 - val_0_accuracy: 0.9000 - val_1_accuracy: 0.7667 - val_2_accuracy: 0.8292 - val_3_accuracy: 0.9167\n",
      "Epoch 23/80\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " 99/100 [============================>.] - ETA: 0s - loss: 2.7805 - 0_loss: 0.3316 - 1_loss: 1.0420 - 2_loss: 0.8619 - 3_loss: 0.5450 - 0_accuracy: 0.9141 - 1_accuracy: 0.6886 - 2_accuracy: 0.7496 - 3_accuracy: 0.8651\n",
      "Epoch 00023: val_loss did not improve from 1.90162\n",
      "100/100 [==============================] - 45s 449ms/step - loss: 2.7749 - 0_loss: 0.3344 - 1_loss: 1.0404 - 2_loss: 0.8594 - 3_loss: 0.5407 - 0_accuracy: 0.9133 - 1_accuracy: 0.6883 - 2_accuracy: 0.7500 - 3_accuracy: 0.8665 - val_loss: 2.1314 - val_0_loss: 0.2526 - val_1_loss: 0.9067 - val_2_loss: 0.7110 - val_3_loss: 0.2611 - val_0_accuracy: 0.9083 - val_1_accuracy: 0.7333 - val_2_accuracy: 0.7917 - val_3_accuracy: 0.9125\n",
      "Epoch 24/80\n",
      " 99/100 [============================>.] - ETA: 0s - loss: 2.7785 - 0_loss: 0.2978 - 1_loss: 1.0484 - 2_loss: 0.8854 - 3_loss: 0.5470 - 0_accuracy: 0.9184 - 1_accuracy: 0.6898 - 2_accuracy: 0.7536 - 3_accuracy: 0.8674\n",
      "Epoch 00024: val_loss did not improve from 1.90162\n",
      "100/100 [==============================] - 45s 448ms/step - loss: 2.7727 - 0_loss: 0.2959 - 1_loss: 1.0467 - 2_loss: 0.8855 - 3_loss: 0.5446 - 0_accuracy: 0.9192 - 1_accuracy: 0.6898 - 2_accuracy: 0.7535 - 3_accuracy: 0.8681 - val_loss: 1.9960 - val_0_loss: 0.2860 - val_1_loss: 0.9415 - val_2_loss: 0.5750 - val_3_loss: 0.1936 - val_0_accuracy: 0.9208 - val_1_accuracy: 0.7208 - val_2_accuracy: 0.8458 - val_3_accuracy: 0.9583\n",
      "Epoch 25/80\n",
      " 99/100 [============================>.] - ETA: 0s - loss: 2.6644 - 0_loss: 0.2798 - 1_loss: 1.0311 - 2_loss: 0.8423 - 3_loss: 0.5112 - 0_accuracy: 0.9289 - 1_accuracy: 0.6909 - 2_accuracy: 0.7588 - 3_accuracy: 0.8664\n",
      "Epoch 00025: val_loss did not improve from 1.90162\n",
      "100/100 [==============================] - 45s 448ms/step - loss: 2.6572 - 0_loss: 0.2791 - 1_loss: 1.0278 - 2_loss: 0.8421 - 3_loss: 0.5081 - 0_accuracy: 0.9292 - 1_accuracy: 0.6915 - 2_accuracy: 0.7585 - 3_accuracy: 0.8673 - val_loss: 2.0197 - val_0_loss: 0.2511 - val_1_loss: 0.8498 - val_2_loss: 0.7100 - val_3_loss: 0.2088 - val_0_accuracy: 0.9208 - val_1_accuracy: 0.7542 - val_2_accuracy: 0.8000 - val_3_accuracy: 0.9500\n",
      "Epoch 26/80\n",
      " 99/100 [============================>.] - ETA: 0s - loss: 2.7517 - 0_loss: 0.2943 - 1_loss: 1.0521 - 2_loss: 0.8687 - 3_loss: 0.5366 - 0_accuracy: 0.9211 - 1_accuracy: 0.6904 - 2_accuracy: 0.7517 - 3_accuracy: 0.8628\n",
      "Epoch 00026: val_loss improved from 1.90162 to 1.76142, saving model to ./logs/ep026-loss2.743-val_loss1.761.h5\n",
      "100/100 [==============================] - 45s 447ms/step - loss: 2.7434 - 0_loss: 0.2936 - 1_loss: 1.0507 - 2_loss: 0.8664 - 3_loss: 0.5327 - 0_accuracy: 0.9210 - 1_accuracy: 0.6908 - 2_accuracy: 0.7521 - 3_accuracy: 0.8640 - val_loss: 1.7614 - val_0_loss: 0.2157 - val_1_loss: 0.7790 - val_2_loss: 0.5379 - val_3_loss: 0.2287 - val_0_accuracy: 0.9250 - val_1_accuracy: 0.7625 - val_2_accuracy: 0.8500 - val_3_accuracy: 0.9292\n",
      "Epoch 27/80\n",
      " 99/100 [============================>.] - ETA: 0s - loss: 2.6997 - 0_loss: 0.3028 - 1_loss: 1.0239 - 2_loss: 0.8487 - 3_loss: 0.5243 - 0_accuracy: 0.9228 - 1_accuracy: 0.6970 - 2_accuracy: 0.7532 - 3_accuracy: 0.8708\n",
      "Epoch 00027: val_loss did not improve from 1.76142\n",
      "100/100 [==============================] - 45s 450ms/step - loss: 2.6949 - 0_loss: 0.3013 - 1_loss: 1.0227 - 2_loss: 0.8500 - 3_loss: 0.5208 - 0_accuracy: 0.9229 - 1_accuracy: 0.6981 - 2_accuracy: 0.7533 - 3_accuracy: 0.8717 - val_loss: 1.9725 - val_0_loss: 0.1768 - val_1_loss: 0.8899 - val_2_loss: 0.6269 - val_3_loss: 0.2788 - val_0_accuracy: 0.9458 - val_1_accuracy: 0.7250 - val_2_accuracy: 0.8250 - val_3_accuracy: 0.9417\n",
      "Epoch 28/80\n",
      " 99/100 [============================>.] - ETA: 0s - loss: 2.5844 - 0_loss: 0.2671 - 1_loss: 0.9727 - 2_loss: 0.8633 - 3_loss: 0.4813 - 0_accuracy: 0.9306 - 1_accuracy: 0.7121 - 2_accuracy: 0.7553 - 3_accuracy: 0.8742\n",
      "Epoch 00028: val_loss did not improve from 1.76142\n",
      "100/100 [==============================] - 45s 450ms/step - loss: 2.5804 - 0_loss: 0.2667 - 1_loss: 0.9714 - 2_loss: 0.8631 - 3_loss: 0.4792 - 0_accuracy: 0.9302 - 1_accuracy: 0.7123 - 2_accuracy: 0.7556 - 3_accuracy: 0.8746 - val_loss: 2.1187 - val_0_loss: 0.3358 - val_1_loss: 0.8772 - val_2_loss: 0.6848 - val_3_loss: 0.2209 - val_0_accuracy: 0.9042 - val_1_accuracy: 0.7333 - val_2_accuracy: 0.8042 - val_3_accuracy: 0.9375\n",
      "Epoch 29/80\n",
      " 99/100 [============================>.] - ETA: 0s - loss: 2.5969 - 0_loss: 0.2609 - 1_loss: 1.0022 - 2_loss: 0.8340 - 3_loss: 0.4997 - 0_accuracy: 0.9274 - 1_accuracy: 0.7010 - 2_accuracy: 0.7649 - 3_accuracy: 0.8718\n",
      "Epoch 00029: val_loss did not improve from 1.76142\n",
      "100/100 [==============================] - 45s 452ms/step - loss: 2.5872 - 0_loss: 0.2600 - 1_loss: 0.9996 - 2_loss: 0.8301 - 3_loss: 0.4975 - 0_accuracy: 0.9277 - 1_accuracy: 0.7013 - 2_accuracy: 0.7665 - 3_accuracy: 0.8723 - val_loss: 1.8361 - val_0_loss: 0.1905 - val_1_loss: 0.8894 - val_2_loss: 0.5419 - val_3_loss: 0.2143 - val_0_accuracy: 0.9500 - val_1_accuracy: 0.7542 - val_2_accuracy: 0.8500 - val_3_accuracy: 0.9375\n",
      "Epoch 30/80\n",
      " 99/100 [============================>.] - ETA: 0s - loss: 2.5201 - 0_loss: 0.2753 - 1_loss: 0.9403 - 2_loss: 0.7923 - 3_loss: 0.5121 - 0_accuracy: 0.9242 - 1_accuracy: 0.7165 - 2_accuracy: 0.7788 - 3_accuracy: 0.8767\n",
      "Epoch 00030: val_loss did not improve from 1.76142\n",
      "100/100 [==============================] - 45s 448ms/step - loss: 2.5076 - 0_loss: 0.2738 - 1_loss: 0.9375 - 2_loss: 0.7871 - 3_loss: 0.5092 - 0_accuracy: 0.9244 - 1_accuracy: 0.7175 - 2_accuracy: 0.7800 - 3_accuracy: 0.8771 - val_loss: 1.8271 - val_0_loss: 0.1712 - val_1_loss: 0.8816 - val_2_loss: 0.6145 - val_3_loss: 0.1597 - val_0_accuracy: 0.9625 - val_1_accuracy: 0.7542 - val_2_accuracy: 0.8333 - val_3_accuracy: 0.9667\n",
      "Epoch 31/80\n",
      " 99/100 [============================>.] - ETA: 0s - loss: 2.5756 - 0_loss: 0.2763 - 1_loss: 0.9826 - 2_loss: 0.8221 - 3_loss: 0.4946 - 0_accuracy: 0.9303 - 1_accuracy: 0.7001 - 2_accuracy: 0.7740 - 3_accuracy: 0.8811\n",
      "Epoch 00031: val_loss improved from 1.76142 to 1.74665, saving model to ./logs/ep031-loss2.563-val_loss1.747.h5\n",
      "100/100 [==============================] - 45s 452ms/step - loss: 2.5625 - 0_loss: 0.2755 - 1_loss: 0.9782 - 2_loss: 0.8183 - 3_loss: 0.4906 - 0_accuracy: 0.9304 - 1_accuracy: 0.7013 - 2_accuracy: 0.7752 - 3_accuracy: 0.8823 - val_loss: 1.7467 - val_0_loss: 0.2024 - val_1_loss: 0.7001 - val_2_loss: 0.6373 - val_3_loss: 0.2068 - val_0_accuracy: 0.9375 - val_1_accuracy: 0.7875 - val_2_accuracy: 0.7917 - val_3_accuracy: 0.9125\n",
      "Epoch 32/80\n",
      " 99/100 [============================>.] - ETA: 0s - loss: 2.5825 - 0_loss: 0.2605 - 1_loss: 1.0002 - 2_loss: 0.8169 - 3_loss: 0.5050 - 0_accuracy: 0.9299 - 1_accuracy: 0.6982 - 2_accuracy: 0.7658 - 3_accuracy: 0.8807\n",
      "Epoch 00032: val_loss did not improve from 1.74665\n",
      "100/100 [==============================] - 45s 452ms/step - loss: 2.5785 - 0_loss: 0.2597 - 1_loss: 0.9997 - 2_loss: 0.8165 - 3_loss: 0.5026 - 0_accuracy: 0.9298 - 1_accuracy: 0.6988 - 2_accuracy: 0.7665 - 3_accuracy: 0.8808 - val_loss: 1.9577 - val_0_loss: 0.1737 - val_1_loss: 0.9013 - val_2_loss: 0.6135 - val_3_loss: 0.2692 - val_0_accuracy: 0.9500 - val_1_accuracy: 0.7167 - val_2_accuracy: 0.8208 - val_3_accuracy: 0.9167\n",
      "Epoch 33/80\n",
      " 99/100 [============================>.] - ETA: 0s - loss: 2.5901 - 0_loss: 0.2508 - 1_loss: 0.9955 - 2_loss: 0.8328 - 3_loss: 0.5110 - 0_accuracy: 0.9354 - 1_accuracy: 0.7041 - 2_accuracy: 0.7727 - 3_accuracy: 0.8723\n",
      "Epoch 00033: val_loss improved from 1.74665 to 1.66128, saving model to ./logs/ep033-loss2.587-val_loss1.661.h5\n",
      "100/100 [==============================] - 45s 454ms/step - loss: 2.5874 - 0_loss: 0.2499 - 1_loss: 0.9982 - 2_loss: 0.8316 - 3_loss: 0.5076 - 0_accuracy: 0.9356 - 1_accuracy: 0.7042 - 2_accuracy: 0.7733 - 3_accuracy: 0.8729 - val_loss: 1.6613 - val_0_loss: 0.2666 - val_1_loss: 0.7373 - val_2_loss: 0.4993 - val_3_loss: 0.1581 - val_0_accuracy: 0.9375 - val_1_accuracy: 0.7750 - val_2_accuracy: 0.8583 - val_3_accuracy: 0.9750\n",
      "Epoch 34/80\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " 99/100 [============================>.] - ETA: 0s - loss: 2.4217 - 0_loss: 0.2530 - 1_loss: 0.9223 - 2_loss: 0.7807 - 3_loss: 0.4657 - 0_accuracy: 0.9360 - 1_accuracy: 0.7214 - 2_accuracy: 0.7740 - 3_accuracy: 0.8853\n",
      "Epoch 00034: val_loss did not improve from 1.66128\n",
      "100/100 [==============================] - 45s 449ms/step - loss: 2.4126 - 0_loss: 0.2528 - 1_loss: 0.9198 - 2_loss: 0.7780 - 3_loss: 0.4619 - 0_accuracy: 0.9362 - 1_accuracy: 0.7212 - 2_accuracy: 0.7744 - 3_accuracy: 0.8865 - val_loss: 1.7215 - val_0_loss: 0.1936 - val_1_loss: 0.7655 - val_2_loss: 0.5713 - val_3_loss: 0.1910 - val_0_accuracy: 0.9542 - val_1_accuracy: 0.7667 - val_2_accuracy: 0.8583 - val_3_accuracy: 0.9458\n",
      "Epoch 35/80\n",
      " 99/100 [============================>.] - ETA: 0s - loss: 2.4624 - 0_loss: 0.2347 - 1_loss: 0.9382 - 2_loss: 0.8031 - 3_loss: 0.4864 - 0_accuracy: 0.9428 - 1_accuracy: 0.7275 - 2_accuracy: 0.7731 - 3_accuracy: 0.8855\n",
      "Epoch 00035: val_loss did not improve from 1.66128\n",
      "100/100 [==============================] - 45s 449ms/step - loss: 2.4504 - 0_loss: 0.2331 - 1_loss: 0.9350 - 2_loss: 0.7985 - 3_loss: 0.4838 - 0_accuracy: 0.9431 - 1_accuracy: 0.7275 - 2_accuracy: 0.7744 - 3_accuracy: 0.8858 - val_loss: 1.7657 - val_0_loss: 0.0892 - val_1_loss: 0.7814 - val_2_loss: 0.7131 - val_3_loss: 0.1820 - val_0_accuracy: 0.9667 - val_1_accuracy: 0.7583 - val_2_accuracy: 0.7875 - val_3_accuracy: 0.9625\n",
      "Epoch 36/80\n",
      " 99/100 [============================>.] - ETA: 0s - loss: 2.5669 - 0_loss: 0.2419 - 1_loss: 0.9894 - 2_loss: 0.8255 - 3_loss: 0.5101 - 0_accuracy: 0.9388 - 1_accuracy: 0.7109 - 2_accuracy: 0.7748 - 3_accuracy: 0.8817\n",
      "Epoch 00036: val_loss did not improve from 1.66128\n",
      "100/100 [==============================] - 45s 451ms/step - loss: 2.5588 - 0_loss: 0.2414 - 1_loss: 0.9880 - 2_loss: 0.8222 - 3_loss: 0.5072 - 0_accuracy: 0.9388 - 1_accuracy: 0.7113 - 2_accuracy: 0.7763 - 3_accuracy: 0.8825 - val_loss: 1.9531 - val_0_loss: 0.1681 - val_1_loss: 0.9251 - val_2_loss: 0.6773 - val_3_loss: 0.1826 - val_0_accuracy: 0.9333 - val_1_accuracy: 0.7583 - val_2_accuracy: 0.8083 - val_3_accuracy: 0.9417\n",
      "Epoch 37/80\n",
      " 99/100 [============================>.] - ETA: 0s - loss: 2.4409 - 0_loss: 0.2344 - 1_loss: 0.9375 - 2_loss: 0.7940 - 3_loss: 0.4749 - 0_accuracy: 0.9392 - 1_accuracy: 0.7205 - 2_accuracy: 0.7696 - 3_accuracy: 0.8807\n",
      "Epoch 00037: val_loss improved from 1.66128 to 1.56707, saving model to ./logs/ep037-loss2.436-val_loss1.567.h5\n",
      "100/100 [==============================] - 45s 447ms/step - loss: 2.4360 - 0_loss: 0.2333 - 1_loss: 0.9376 - 2_loss: 0.7927 - 3_loss: 0.4723 - 0_accuracy: 0.9394 - 1_accuracy: 0.7206 - 2_accuracy: 0.7704 - 3_accuracy: 0.8813 - val_loss: 1.5671 - val_0_loss: 0.1000 - val_1_loss: 0.8198 - val_2_loss: 0.5024 - val_3_loss: 0.1450 - val_0_accuracy: 0.9833 - val_1_accuracy: 0.7500 - val_2_accuracy: 0.8250 - val_3_accuracy: 0.9667\n",
      "Epoch 38/80\n",
      " 99/100 [============================>.] - ETA: 0s - loss: 2.5210 - 0_loss: 0.2594 - 1_loss: 0.9853 - 2_loss: 0.7995 - 3_loss: 0.4768 - 0_accuracy: 0.9312 - 1_accuracy: 0.6997 - 2_accuracy: 0.7725 - 3_accuracy: 0.8819\n",
      "Epoch 00038: val_loss did not improve from 1.56707\n",
      "100/100 [==============================] - 45s 447ms/step - loss: 2.5146 - 0_loss: 0.2580 - 1_loss: 0.9838 - 2_loss: 0.7977 - 3_loss: 0.4752 - 0_accuracy: 0.9317 - 1_accuracy: 0.6996 - 2_accuracy: 0.7729 - 3_accuracy: 0.8817 - val_loss: 1.6257 - val_0_loss: 0.1383 - val_1_loss: 0.7117 - val_2_loss: 0.5742 - val_3_loss: 0.2014 - val_0_accuracy: 0.9583 - val_1_accuracy: 0.7958 - val_2_accuracy: 0.8417 - val_3_accuracy: 0.9625\n",
      "Epoch 39/80\n",
      " 99/100 [============================>.] - ETA: 0s - loss: 2.4499 - 0_loss: 0.2330 - 1_loss: 0.9516 - 2_loss: 0.7975 - 3_loss: 0.4678 - 0_accuracy: 0.9339 - 1_accuracy: 0.7142 - 2_accuracy: 0.7835 - 3_accuracy: 0.8843\n",
      "Epoch 00039: val_loss improved from 1.56707 to 1.45696, saving model to ./logs/ep039-loss2.441-val_loss1.457.h5\n",
      "100/100 [==============================] - 45s 451ms/step - loss: 2.4410 - 0_loss: 0.2314 - 1_loss: 0.9497 - 2_loss: 0.7944 - 3_loss: 0.4655 - 0_accuracy: 0.9344 - 1_accuracy: 0.7146 - 2_accuracy: 0.7842 - 3_accuracy: 0.8848 - val_loss: 1.4570 - val_0_loss: 0.1060 - val_1_loss: 0.6162 - val_2_loss: 0.5582 - val_3_loss: 0.1765 - val_0_accuracy: 0.9667 - val_1_accuracy: 0.8000 - val_2_accuracy: 0.8333 - val_3_accuracy: 0.9458\n",
      "Epoch 40/80\n",
      " 99/100 [============================>.] - ETA: 0s - loss: 2.4600 - 0_loss: 0.2434 - 1_loss: 0.9479 - 2_loss: 0.7970 - 3_loss: 0.4717 - 0_accuracy: 0.9362 - 1_accuracy: 0.7189 - 2_accuracy: 0.7763 - 3_accuracy: 0.8843\n",
      "Epoch 00040: val_loss did not improve from 1.45696\n",
      "100/100 [==============================] - 45s 450ms/step - loss: 2.4535 - 0_loss: 0.2416 - 1_loss: 0.9466 - 2_loss: 0.7956 - 3_loss: 0.4697 - 0_accuracy: 0.9369 - 1_accuracy: 0.7196 - 2_accuracy: 0.7773 - 3_accuracy: 0.8844 - val_loss: 1.6614 - val_0_loss: 0.1167 - val_1_loss: 0.9122 - val_2_loss: 0.4561 - val_3_loss: 0.1764 - val_0_accuracy: 0.9583 - val_1_accuracy: 0.7667 - val_2_accuracy: 0.8750 - val_3_accuracy: 0.9500\n",
      "Epoch 41/80\n",
      " 99/100 [============================>.] - ETA: 0s - loss: 2.4402 - 0_loss: 0.2312 - 1_loss: 0.9228 - 2_loss: 0.8073 - 3_loss: 0.4790 - 0_accuracy: 0.9413 - 1_accuracy: 0.7193 - 2_accuracy: 0.7736 - 3_accuracy: 0.8843\n",
      "Epoch 00041: val_loss did not improve from 1.45696\n",
      "100/100 [==============================] - 45s 450ms/step - loss: 2.4348 - 0_loss: 0.2297 - 1_loss: 0.9232 - 2_loss: 0.8062 - 3_loss: 0.4758 - 0_accuracy: 0.9419 - 1_accuracy: 0.7192 - 2_accuracy: 0.7746 - 3_accuracy: 0.8852 - val_loss: 1.4653 - val_0_loss: 0.1792 - val_1_loss: 0.6602 - val_2_loss: 0.4453 - val_3_loss: 0.1807 - val_0_accuracy: 0.9542 - val_1_accuracy: 0.7917 - val_2_accuracy: 0.8833 - val_3_accuracy: 0.9625\n",
      "Epoch 42/80\n",
      " 99/100 [============================>.] - ETA: 0s - loss: 2.4310 - 0_loss: 0.2234 - 1_loss: 0.9605 - 2_loss: 0.7753 - 3_loss: 0.4719 - 0_accuracy: 0.9451 - 1_accuracy: 0.7229 - 2_accuracy: 0.7780 - 3_accuracy: 0.8849\n",
      "Epoch 00042: val_loss did not improve from 1.45696\n",
      "100/100 [==============================] - 45s 449ms/step - loss: 2.4222 - 0_loss: 0.2231 - 1_loss: 0.9539 - 2_loss: 0.7765 - 3_loss: 0.4688 - 0_accuracy: 0.9450 - 1_accuracy: 0.7248 - 2_accuracy: 0.7788 - 3_accuracy: 0.8854 - val_loss: 1.7274 - val_0_loss: 0.1599 - val_1_loss: 0.8109 - val_2_loss: 0.6082 - val_3_loss: 0.1484 - val_0_accuracy: 0.9417 - val_1_accuracy: 0.7833 - val_2_accuracy: 0.8333 - val_3_accuracy: 0.9500\n",
      "Epoch 43/80\n",
      " 99/100 [============================>.] - ETA: 0s - loss: 2.4812 - 0_loss: 0.2227 - 1_loss: 0.9538 - 2_loss: 0.8088 - 3_loss: 0.4959 - 0_accuracy: 0.9442 - 1_accuracy: 0.7258 - 2_accuracy: 0.7744 - 3_accuracy: 0.8904\n",
      "Epoch 00043: val_loss did not improve from 1.45696\n",
      "100/100 [==============================] - 45s 450ms/step - loss: 2.4763 - 0_loss: 0.2216 - 1_loss: 0.9564 - 2_loss: 0.8063 - 3_loss: 0.4919 - 0_accuracy: 0.9444 - 1_accuracy: 0.7254 - 2_accuracy: 0.7752 - 3_accuracy: 0.8913 - val_loss: 1.8409 - val_0_loss: 0.1043 - val_1_loss: 0.8361 - val_2_loss: 0.7101 - val_3_loss: 0.1903 - val_0_accuracy: 0.9667 - val_1_accuracy: 0.7792 - val_2_accuracy: 0.8083 - val_3_accuracy: 0.9375\n",
      "Epoch 44/80\n",
      " 99/100 [============================>.] - ETA: 0s - loss: 2.4403 - 0_loss: 0.2169 - 1_loss: 0.9505 - 2_loss: 0.8017 - 3_loss: 0.4711 - 0_accuracy: 0.9455 - 1_accuracy: 0.7195 - 2_accuracy: 0.7734 - 3_accuracy: 0.8889\n",
      "Epoch 00044: val_loss did not improve from 1.45696\n",
      "100/100 [==============================] - 45s 447ms/step - loss: 2.4309 - 0_loss: 0.2163 - 1_loss: 0.9469 - 2_loss: 0.8004 - 3_loss: 0.4673 - 0_accuracy: 0.9454 - 1_accuracy: 0.7204 - 2_accuracy: 0.7742 - 3_accuracy: 0.8898 - val_loss: 1.7208 - val_0_loss: 0.1398 - val_1_loss: 0.7178 - val_2_loss: 0.6747 - val_3_loss: 0.1885 - val_0_accuracy: 0.9417 - val_1_accuracy: 0.7625 - val_2_accuracy: 0.8125 - val_3_accuracy: 0.9333\n"
     ]
    }
   ],
   "source": [
    "adam = keras.optimizers.RMSprop()\n",
    "model.compile(\n",
    "            optimizer=adam,\n",
    "            loss='categorical_crossentropy',\n",
    "            metrics=['accuracy'])\n",
    "\n",
    "cp_callback = keras.callbacks.ModelCheckpoint('./logs/'+'ep{epoch:03d}-loss{loss:.3f}-val_loss{val_loss:.3f}.h5',\n",
    "                                              save_best_only=True,save_weights_only=True,verbose=1)\n",
    "reduce_lr = keras.callbacks.ReduceLROnPlateau(monitor='loss',factor=0.2,patience=5, min_lr=0.001) #loss不下降，降低学习率\n",
    "reduce_tr = keras.callbacks.EarlyStopping(monitor='loss', patience=10, verbose=0, mode='auto')\n",
    "\n",
    "model.fit_generator(generator(height,width,batch_size,n_class),\n",
    "                    epochs=80,\n",
    "                    steps_per_epoch=100,\n",
    "                    validation_data=generator(height,width,batch_size,n_class),\n",
    "                    validation_steps=5,\n",
    "                    callbacks=[cp_callback,reduce_lr,reduce_tr])\n",
    "\n",
    "model.save_weights('./logs/'+'last.h5')"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "验证代码"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {},
   "outputs": [],
   "source": [
    "import matplotlib.pyplot as plt#画图\n",
    "#定义验证码大小，n_class,\n",
    "height,width,batch_size,n_class = 50,100,1,10\n",
    "#生成随机数\n",
    "def get_code():\n",
    "    raw = string.digits\n",
    "#     + string.ascii_lowercase #小写\n",
    "#     + string.ascii_uppercase#大写\n",
    "\n",
    "    random_code = ''.join(random.sample(raw,4))\n",
    "    return raw,random_code\n",
    "\n",
    "#生成验证码\n",
    "def generator(height,width,batch_size,n_class):\n",
    "    X = np.zeros((batch_size,height,width,3))\n",
    "    y = [np.zeros((batch_size,n_class),dtype=np.uint8) for i in range(4)]\n",
    "    \n",
    "    #验证码生成器\n",
    "    generator = ImageCaptcha(height=height,width=width)\n",
    "    while True:\n",
    "        for i in range(batch_size):\n",
    "            raw ,random_code = get_code()\n",
    "            image= generator.generate_image(random_code)\n",
    "#             plt.imshow(image)\n",
    "#             plt.title('random_str')\n",
    "#             pylab.show()\n",
    "            img = np.array((generator.generate_image(random_code)),dtype=np.float32)\n",
    "            X[i] = img/255.0\n",
    "            \n",
    "            for j,ch in enumerate(random_code):\n",
    "                y[j][i,:] = 0\n",
    "                y[j][i,raw.find(ch)] = 1\n",
    "        yield X,y,image"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXAAAADWCAYAAADIK9l4AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjAsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+17YcXAAAgAElEQVR4nO2deZBkR33nv7+urq6+qq+Z7tYcmukRjI5BaCR2LGQESEKwSEhI2OBYWGEGIwLv4UDY3jDCa8fCrg/Y8BpWETa7CsQyGIJjMbsjxLHW6oDlEhqdMBK6RjOjnqPv+6jq6s79o6pf/jKnq/p1VXVXva7vJ2Ji8r2X9V52vqysl9/3O8QYA0IIIdGjrtINIIQQUhycwAkhJKJwAieEkIjCCZwQQiIKJ3BCCIkonMAJISSicAIn64aIGBGZEZG/rHRbNgMi8pKIpEXkK5VuC6kOOIGT9Wa/MebfA4CI9InI8eUDIvIVETkjIpMi8ryIfFgdu1ZEHg5zARH5oIh8SW0v/3BM5/59QR37koh8MOR5HxaRa9X2H4rIWRGZEJEvikhCXzPkOYM+EJGEiNwjIidEZEpEnhCRG1Vdpw+MMa8C8FdhrkNqA07gpJL8NYA+Y0wbgFsA/IWI/LMynXu/MaY19+/Dq1cvjIi8HcCdAK4H0AfgAgCfKvG09QBeAXANgHYAfw7gmyLSV+J5SY3ACZxUDGPMUWNMankz9+9VFWxSIQ4CuCfX5jEA/wnAB0s5oTFmxhjzSWPMcWPMkjHmPgAvAyjXjxjZ5Ahd6cl6kZMV9hpjXixQ5++RnQibADwB4M3GmOkyXPcMsg8oPwXwR8aY4yWe8ykAf2WM+UZueyuAIQBbjTEjpZxbXaMXwAkAlxtjfp2nzicBvNoY8/5yXJNEGz6Bk4pijPk3AJIA3gTg2wBShT8RimuQlTkuBnAawH0iUl/iOVsBTKjt5XKyxPMCAEQkDuCrAA7lm7wJ8eEETiqOMWbRGPNjADsB/OsynO9Hxpi0MWYcwB0A9gC4pMTTTgNoU9vL5akSzwsRqQPwDwDSAP6g1POR2oETOKkm6rE+GrgBICWe4yiA/Wp7P4CBUuUTEREA9wDoBfBuY8xCKecjtQUncFIRRKRHRN4rIq0iEstZebwPwIN56j+c039XO+9rROTy3DlbAfwXAKcAPLtC3b6cyWFfiCZ/GcDtIrJPRDoB/BmAL+VpwyfDmkAC+Dyyq4N3GmPmQn6GEACcwEnlMMjKJf0AxgD8DYCPGWMO56l/PoCfhDhvL4BvAJgEcAxZLfzmPE+25yP70vDUqo015gcA/jOAh3KfOQHgP5TSVhHZDeD3AVwO4KyyW79ttc8SAtAKhawjIjKP7EvJu4wxf17CeXYC+J/GmN8sW+Oy5/0zAEPGmP9e5vM+CeD6clmnqPM+B2AHgG8aYz5UznOTaMIJnBBCIgolFEIIiSicwAkhJKKUNIGLyA0i8pyIvCgid5arUYQQQlanaA1cRGIAngfwNmQtCR4F8D5jzDP5PrNl61azq6+vqOuR6uGZZ88E5X2XbKtgS0pjbHy2qM91djSXuSWbj9nZeWc7vbAUlMvx2q1OWfW3t2/++/HkY48NG2O6/f2luBdfCeBFY8wxABCRrwO4FUDeCXxXXx8efPTREi5JqoHXHfjroPzgo5+oYEtK438dfjLvMbO0lPfYb//W69ajOZuKJ594ztnuP2VN3NMLdgavE9e/ainP7O7XizfYeje944qi2xkVuurqTqy0vxQJZQeyoTCX6c/tcxCRj4jIERE5Mjw0VMLlCCGEaEp5Al/JNfmcn09jzN0A7gaAKw4cqLjNYjqdcbYzmcW8x/Svfn08FpTjcbfb4upYlBkft09Jx0+4Jsx33fVQUL58/86g/Kn/+D2n3sGDrw/KHQWWth0dTUW3cy3MzrqxsY4fHwzKTY32Ps7N+X4+9t4nEu79/slP7NPl9u1tqrzFqZdINKy5vVFjYcF+f6amrCS1uOh/J0SV7DSQ74n7HLzZpqW5MSgPDtoYY52dLU49/7u62SjlCbwfWY+zZXYiG/mNEELIBlDKBP4ogL0iskdEGgC8F8C95WkWIYSQ1Sh6fWGMyYjIHwD4PwBiAL5ojDlatpYRQggpSEkCkTHmewC+t2rFCrCwYPVsrW1PTbmaaH//aFCuq3MtD7Q819Js9czzd7nWPHXKpikW2xy+UYcPPx2q3rXX7HW2Txy3/Xnoh48E5Vtvvcyp1werF5dbD9f3e3ravd8DAzbZTypl6xWSYv13IxOTdpy0twd5jR09GAASCWw6Fhfd78jcXDoonzwxHJRHRl0zwsUl1cGyYvEc9C3xzZ3n521fnzltx1xra6NTr9wauH5PVIiNesezOWYbQgipQTiBE0JIRNk0NjZaMgHcpfOxYwNBeWLCXVKn1PLYLHlL4Ab7+yawx4aHxp16523rCsqx2OYwHfMlDy2p+LKJ5tCXf77ifl+SueOj15XQusJoKeOEWtYDwNy8vd9Lalnv+Yk4nn6+uqI/l0otrrh/s+J/zwYHxoLyyIiVF2bn3HozM1ZqaYjb71Ui4ZobJhrj6lpLqux+N/X3dmZ2ZbkUAJqbS9OxfMlEm9f6Y1p/Z9ZTItTwCZwQQiIKJ3BCCIkokZNQFrXn5EJ+a4MXX7Syyfi4fSM+n3KXWKLeg9fF3CVwXcZuT6slYCLhLqv08i7Klgd6qaeXgIAreYxPWI+7Q4ceQRh8SWY90dYl6bS79NYSQKPyxEw0uEt5ragsetLa7JzdnlPWEIuLtSehjE/Y74L+bmnJBABGRqz1z9FnbDC0d970Gqdeb09rUJ5UFmOzs+75tHSlDVQKhLAJTViPZB8tqaynRKjhEzghhEQUTuCEEBJROIETQkhEqXoN/BzPr3mrhZ05bfWp02ennXqzyrRI62V+XGGNMe4xHYS+LmWP1YLWWcj0KZOxfXv11XucY2fPWhPLd7/bxs3euaM99PmLQY+TxUX9nsTVr/UdTrZak7Xdu7Y69VIpO85OvDLmHDPG/v3qUqgBK8JzTCV1v09N23dNIyMzTr0HHng+KLe0WFPbRx496dS77DIbkfq88zqC8ksvDTj1FtJWp9bvPHxzQx1ttL5+7VFDw3okAxv7nmcZPoETQkhE4QROCCERpeollCXPLmhkxAZvPzMwFZT9IFVaDkkk7O+UDjwFAIkGuz0145pI6eWxNlWq9wLkSAFZZrOg/35dHh2ZOrdyDu2ll06f5xzTHnMNDaUPw6UlnVjALt+XPAmuQSVnSLZam8/OLjcRQCZjJZ7BYU+em/GTP+SoAQnFH+p6W5effLLfqdegPC4blbflb/+Wmw4tmbT3QXtC63sFANPT9h5omWxgwJW72pL2c8VIKIU8ks8J0LZ7Y7wvNXwCJ4SQiMIJnBBCIgoncEIIiShVr4H7rrvzKprc3KzVwTxvZyQSVpDbssXqUTqRLeAms/UTrE5OqWsrTb2uzv3di9Vtfg18QrnPH3t5KCh/5z43CZN2ef7xT48H5R07XNf8tjZ7T8qhgWuXea2v++OnWSXm2LbdRpFMJOJOvfp6q513b211jk1NuW7dtYSfIEH3Z4M6duWVu516L71ko0LedtuVQVnrxoCrHWuTQB3xEwBGxqwZ4YwKozHvJafW+nhYVTpsSIlCn9so+AROCCERhRM4IYRElKqXUPxceDPTaXXM7m9qcv+U9ja7tOtSJmKdHe5yeFKZnPmRCmMxFfFM7Y/Xu797NWA95ixnv/OdXwbllBdAP6O8VC++qDcoz3lLW+0919paegjHjDIXHBuzS2rfc7BBRR3U0o0vDej8qJ2dSedY4xlrOqk9ETOLpXsBVlvORR+/n3p7O4PymJI1GjxJ6u1v3xeUOzvs97HQ3xGP2z5LJDzppsmef3bG3u9F737788daqVQ/h4VP4IQQElE4gRNCSESpegklVucuPXUgnIZxvcRy6+3u6w7K7e12yeZ7kmUW7VK+o6PZOaatULSn5+jYrFOvrd3KMk1NVg6o9uXwWtDWHJfv3x6Uh4ZcT8wDB84Pyl2dtt996590ykoq8/O27C+V83m5+kGLtPXBnDqfeBZDOolDIeuXWMx+rt6TzLSEtpC2kp72EgaA9rZGdY78Ekq+BALVkHPRx/87tMzR0mJljWTSzQ3b1Wm/W+3ta29vLOaOAz2e6tS98hWTEhWUqodP4IQQElFWncBF5IsiMigiv1L7ukTkfhF5Ifd/Z6FzEEIIKT9hnsC/BOAGb9+dAB4wxuwF8EBumxBCyAayqgZujPmRiPR5u28FcG2ufAjAwwA+XsZ2BcS9ZLNbtrYF5RllPtTU7JottbVZza2x0epxfnTDlhZbL97gBqHXpm4DKvLhT3/2slPv/cqzbEZ5h/oaeL5IZr63VzVq4trjTptlvv2fX+TU62i37wC2bNEJal3vxeMnrGdePC6q7Jp5xmIra8dpz3zx9GkbhU7fN1/n3qY8+rR+Wwhfhq9X7dWmg5kCyQQ0/rjQune1Jc1dDd2/Tcq0z3v1cI6GvVb0OwkASDjX0vfDFb21Gak2KdwsEUSL1cB7jTFnACD3f0/5mkQIISQM6/4SU0Q+IiJHROTI8NDQ6h8ghBASimLNCAdEZJsx5oyIbAMwmK+iMeZuAHcDwBUHDqzZqMdfQjcrM732dit/dPe4ORd9j7Fl/EBUehm9sOA2Ty9177//uaDcmnQ9B7/y1UeD8rtu2R+UD3355yu2AajO5XAhXK/XuNrvml729GhzMVueT7nSlZY5ZmZsLsVk0pWPwkoo2tNzQQW2am1pdOoV8r7MjxTYsh3jSyZhvQDD5l2sRM7F1dDfn507renuxKQrRxaTTKHQ5/W2HiPnmJeqHLqLGWXWGfreh6cSZsPFPoHfC+BgrnwQwOHyNIcQQkhYwpgRfg3AzwBcJCL9InI7gE8DeJuIvADgbbltQgghG0gYK5T35Tl0fZnbEgptlbJrd3feemGXbIvqrXWiwfUee/zxV4KyzumX8CwbtGzyv+99KtR1q3E5rPGtdfS27rMGz0qop8e6BLS02iVrrN61Ejp2zL4PGRu3nq1bu10pLF+YK1+dyGSUtYHa35Dw5I+ijA/CSSHFOv3psVBtORdXQ8tQoqxBmpobVqpeNL702aZyZ8Zi40HZtwTS+VG3qrju5RBQClkTFbyPZfSipScmIYREFE7ghBASUTiBE0JIRKn6aIQ+4U2/wqE9tYaGJp1jl73WRt2rqzsblLXnJQB099iA/wc/cFVQLhhNrgr1TI2fCGF21pr66SQGjV6UQR3Iv0WZ8KVSrjYZj9tnB53P0jcDy8c5Ued0G5ykDa5GX0z+Td9rT2/WqY16z1swn7eff7+1JlptORfXQn0ek89y4Hti6giR+h5nvByouk2lJncA8keOBPJ70frzQDnNhvkETgghEYUTOCGERJTISSjlRicq8AP3t7VbCeCmm14TlLdtd03ddu6wpnPj7XaJFeXlsI9efWrvO1/SyidR+GqCY6VozErFc9AmYpmMu1ReVF6Qug3bt3c59cIGsNL4S28dPGlK5WhNJt28nzMz9lhra/77HbWxUA3o4eTIWJ75cEx9p/N59RZLNXjQ8gmcEEIiCidwQgiJKDUnoWgLCgBYzNhtPxiRjoHdkqfss1mWw1omAIDGRusTGVPL1N5eV07yPxeGsLYBaSV3nT075hxbUPeuQ0lfvtdsMVZMvqwzMmItcgYHpoPyj3+cP058ImHHzGYZI5Uk35jxPTZ1vP+YH6S8RHxpJG+8/93rF++fT+CEEBJROIETQkhE4QROCCERpeY08AXPU2tszHpf+kkCtNBWV2JOv6jha4naE65BmeKlUvNevSRWwjcjLEaO1PdnZsY12VtUuSl1/sVSczECwPS0m89zcMhGuPvBP/06KLe2urET7/vu0aB8x0eZdbCc6PcSOjqmeO9g9Liti5X+vKr1az+XbT6z4fV858EncEIIiSicwAkhJKLUnITiyyTjE1YCmJvzJBTRRbVMqy01JYf9oxeVG6VvYucngghxOiwp681CJoX61DqBA+CaiCWVlFFqLkYAfhJMPPlUf1BONORP9FHtSTs2C4sq8Nq5X831+7JWgzkon8AJISSicAInhJCIUnMSin5jDQDz8yvnegSAeL1dfun41X5s4lpD91Nq3rUGmVfbRkkefpzv+Xm7XSdK5iigoWi5xq9Wrzwsk23W+67c8eMB4HWv2xmU0ykru93mxYmv9pjvUUaPBS2SpL1xpoOe6TFYTFCzaqS2ZyJCCIkwnMAJISSicAInhJCIEjkNXOekK0RbmzUl06aDc3OuV11KaZi+92FHhz3Heb3Ww7CYvIrrTdh+KVqL1WZ/yp5vaHjaqaa16e3bbaKLkWE336jOg9mYyK9H6nvnlv0cmzoPpo38V+z7CidBhKeXtrXaaIc33mgTfWzf5kZmpO69fhj1gkWPOT+i6MCAjVrZ1mbvGzVwQgghFWXVCVxEzheRh0TkWRE5KiJ35PZ3icj9IvJC7v/O1c5FCCGkfITRAjIA/tgY87iIJAE8JiL3A/gggAeMMZ8WkTsB3Ang4+VuoC8NHD8xEpR1APV33LjPqderJI9Mxpq29fePO/W0aVFLs9sd553XFpS3bLXL42pZfum+ydcvgBdcHsWZtmlTrSVlRjjjBXqKx2eDclOjlTKmZ9ygV3qpm2hTOQ294FNaKnnllVG13/WabVAekaJcZaVIt9mMCo41OurKP3HtfZmIB+WmAok+SGn4Qej091aXdVAzwPUa9pO5FMO6S5VrZNUncGPMGWPM47nyFIBnAewAcCuAQ7lqhwC8a70aSQgh5FzWpIGLSB+AKwA8AqDXGHMGyE7yAFaMlykiHxGRIyJyZHhoqLTWEkIICQg9gYtIK4B/BPAxY8zkavWXMcbcbYw5YIw5sLW7u5g2EkIIWYFQ9nAiEkd28v6qMebbud0DIrLNGHNGRLYBGCxXo/JpuwBw110PBeWU0kFHR11zthtuuCQo60S7C552Wq96oL3dTUbQ2aFNB63W6ZsbbhSF3gfofvHRmni+oPNrQbsxZ7zwA9PTtn9Ppq1mPZ/Krz+2tGizP1ez1vc4lcekMEsC5USbES56pmlLSmetq7PjoiaDVG4QfiiGoaEJdcyOhQYvdEKy1WrRxby7CvsODijPu6a1EsYKRQDcA+BZY8zfqkP3AjiYKx8EcLj8zSOEEJKPME/gVwP4XQC/FJEnc/v+FMCnAXxTRG4HcBLA76xPEwkhhKzEqhO4MebHyL86vL68zTkXf5mil0szM6mgfNVVfU690VFlztZkl7m+mVqjMnVr9szAEupYpWSTQvh9k49iEgv4ZluplDXFTKtjvmXW7Jytl0rpJBCu1KJlE20CGPe8XOfT1kzRqHP43rA6mUKigGdncbht10MhXl+34n5SXvzxqKNeZpS80tTkfofb21uDcn19OA/qsBKuT7mlyjBwyBFCSEThBE4IIRGl+qIyefjL/6997RdB+ZKLrVliotG1QtAectoZL97g/mYlW229rVvdYETVnrhB941evvl9FjaxgA5SNeclajh92lqU6ABgPn6OzGWalYwFAJ0dNrBQT4+NwuAn1dAJE1IpHWDKHbq71d9YjmBjTis8b05t1ZRI6EQftENZL/xUqwuZlQeaTrzibxfzfQ4rUwKVyYFa3TMUIYSQvHACJ4SQiMIJnBBCIkpVauBap91tupxj/+r33xSUn3v+dFBOeZ5+OiiZljDPUSnVwfTCgn80FBsVocz/vPb2KmS2FPa62tttdHTKOTam/sYFFSFQJ34GAKPUY/0eYvs218t1166tQVlHLZz3EjWcOGG1dx0hMJl033loU9FiNHA/Up2OlphO+d671kxRm62tRwLlKLGe3wP/3UomjwbemHCfSYsMRhng69rleNdUTvgETgghEYUTOCGERJSqX/OdsxQxdmnb3W0TLkxNuQkDFtJ2SZxW0sCStxabmLDenK0ts84x7cWll8dhA9ycs8Qqc4Cbci/TdICo4SEv16XqtvZ2K1+cazpnK6bTttzZ6Uoo2gxQe19OTqWceqm0lbV0sP72NtfjrlQTPj8RwLgKjjYz6yatEOVy2dVlx2C1JPrYSNYzqchSgWQMelsn2PDvQTFymm6TbitQHqmynPAJnBBCIgoncEIIiShVL6H4NDbZ5fvuXdZCZX7OXeZOz9ilnZYDxifceqNjVnpp9gLhdHTaY9rKJWyAG38ZuVEBboplYUF7YrqWFzpo0wUX2ORLjY3uEDp1ajgoj09Y+ePMGTcXaXv72pebejmsA2D5x4rBj/k9n7LjZGbG7YumZutFGovFVixvVjYyJr2W9CYnXElPB7DSVkE9vW5u9bABrPJRCVlkLfAJnBBCIgoncEIIiSicwAkhJKJUpQZeyKNLmxYlk1aL9KPd6aD+2nNwcsrVwIeHrbb2k5+87Bx7/21XBuUtW61J4XomUqgk2lQw5mUniDfa7fY2qws2ev2eSllzwfm5MbXf1ZhT6p60Ihw6mlxHRzLvsWIwnnlpJmPH2ZIXCs+pW+MBCNc3qYgdIyOjromvTiqSbLTj0Y9SudlNO/kETgghEYUTOCGERJSqkVBCe3Td8tqgvGu3NSNsV3IKAMSU+dDsnF2KpV4cdur90/3PBWU/n9593z0alA8efL1tQ8gANzq4DVB9Jkl+nsFMxm77QYA6O23/as9JbcIFAO1tLUF5MGEDYs2nXIlC57fMl28TANJpncRBm+y5zx5+O5YJG2DJj0PlJ5bQaHWpxhWUsicV0R6WelzMewlGltT4qVcB1aolL+lGBbirkj+XEELIWuEETgghEaViEkrRHl33/jIoa4+umLeE1tutrXaZ8sRTZ516elnuWyLccMMlQVlbXnS0Nzv18nmWVZtk4pP2Ym/rvJf+UlQHbdJekHW+tUqDtUppabGySybjBqlKp9OqbM9xqt/1ctUyj75XUiDQcz45Dsi/zO/qdO/Volqi+7Go41W4ZN8o1jsmvQ4qNjZmLcRS3ljVeUlbmvNLaxvJegb2ykeNDT9CCNk8rDqBi0ijiPxCRJ4SkaMi8qnc/j0i8oiIvCAi3xCRhtXORQghpHyEeQJPAXiLMWY/gMsB3CAiVwH4DIDPGmP2AhgDcPv6NZMQQojPqhq4yQrDy2JUPPfPAHgLgH+Z238IwCcBfD7shQtF9NNakq8LFePRpaPEvfPm1zrHvvJVe63f+I3dzrHeHusjqNtRSH+NEqm0a7I3M2u3fW1Xm+kVirqn63V02P6bnHQ9YLXWOasSJix4UQF1RLqWFpUH07sH+fTHsBHyDn7gSufY0JA9n04IArh9kygxCmLUKfd7Hp2LNF8ZcD0sm9W48KNUricbGZkxH6E0cBGJiciTAAYB3A/gJQDjxpjlb1c/gB0ltYQQQsiaCDWBG2MWjTGXA9gJ4EoAl6xUbaXPishHROSIiBwZHhoqvqWEEEIc1rT+M8aMi8jDAK4C0CEi9bmn8J0ATuf5zN0A7gaAKw4cCCb5QkFwdvdZD8uDH7jKORbWo0uTTNolVm+vGwTpHTfa3yLj+dW1JqvbDLBUzJK3rX+C8zsiFkSbcWkTQz8X6fS0TZaRTNp6Cwule0AWE2BpetqVeAaHZoLyT3/2knNMBznT+TKbmhMgpaE9LLVsovcDQKMKVpdMWrPeSibVqESQuzBWKN0i0pErNwF4K4BnATwE4D25agcBHC5bqwghhKxKmCfwbQAOiUgM2Qn/m8aY+0TkGQBfF5G/APAEgHvWsZ2EEEI8wlihPA3gihX2H0NWDyeEEFIBKudKP+EGaN+/f2dQ7lMaeDkwSuw1S25UM52Ut67O1c9EKa2bxXQwLMX+tbqftLuznzR4dNTeh/p6636/4JnsaU1dJ1aur3fVP51zIV+EPP+YfocyODjl1Pv+D44iH/erCJb7Ltmetx5ZO1rrnlVmrX48g3plRhivX7srvW8C6M9Hy/hhMwq9dyt3ZMYw0JWeEEIiCidwQgiJKBWTUPxkB44n3fHRFcsAgGtU+US4a+lEBaMj7lJ5eMSai23pck0MNyPaA1KXAUCU7aAf3dGXLPLheM/p4PxeNDmzZLeHhsbtfs+2UXvW9fTYZBF+e5qVCV/YCHnay/PwvU/nPdbgeVveeOO+vOckpZFRiTSmpm0Ey4HBaaeeni/qxI6FlpZJ94QhtcAf/vCFoKxljVtv2Z/3M+sdmTEMfAInhJCIwgmcEEIiSsUkFP/NbNgccnqpExYtoQwNuRLK1JRdpu3bt805phMStCYr72XnL730G/KwyzItDYyMTDjHdPKE7u4W51hYIxxtRTI8NJG3nrZWicXUyT0JpaPdRinWAYwKSTph+6K/fywoX7rvPOfYmdO27dddd6FzbNu29jVfqxooZHkR9vu33kxN2nb8+jkbeuPpp/qdeq2t9vv4Sr+9V01NxX1Pr7lmb1Au9p5WYizwCZwQQiIKJ3BCCIkonMAJISSiVI0ZIXavXM/X5jpubcp7LB8Laev1d/LksHPs/z5gNfXnnh90jo2p8ydbG1FtaN0urEnl/JyNnnfs2IBzbHbW9tOIMq8EgIkJ+7mGRBz5cM7/su3P2RnXA3bHzo6gPDpqTcTGx+edei0tdohOTdlzDwy6976+fu1R6KaVmdromOuJd+GFPbZNE17g/uM2cL/fT1GimPdJ5Ua/dwGAqSl7/33vWM3l++37qgsvtN6wjU3hMjuW431SNcAncEIIiSicwAkhJKJUTEJZWloquL1MW5trFuQsb/LILj4zM3pZ7i7Z3nj1nqC8kHED5ly4tzco67x71UIxS+C5OSsb+EvU4WG7nUq596Onx3qpbt1qy/79mZmx59dJG+q8JJsvn/A8bJfbN5tythNKrkkmrcemby7mnz8f2oxSm5BOz7jSjU5okZxzkz38/OcvB2XfSzNKOBJchZidce/30WesuaBOxLKly5U19imzz0svtYHwGhvDSSibBT6BE0JIROEETgghEaVi67+MFx96fMwGoVlQx5LJVqdeY6NdUmvLA38JvagDKaWsBUQ67UoDXV3W47C3173Wq19tl2ltbW5c4EpQDouckVW6AKoAAA3dSURBVGFr8fHUU67pypHHTgblei+3YFOT7fd33vxatd8dQiOjVoYZVAGIEgl3advb2xaUT52y0ogfD7yjw1r/vOoCaxkSL1K6mFZWDs+/YK1kZjwJpb1dXfdVvc6xliqU08JQLZYX+rt/6pQrpZ08afv20UfteLzuWlfu6e62VkzFWCDp+SG7vbhivZg3r/hB3ioNn8AJISSicAInhJCIwgmcEEIiSvVo4OPWE04nWUi2up5u23fYfJltbVa/9jXwJaVpjY9bXXZu3su5WK+Dwbs6bbWZiJ2jYRZhUjkwYCO3PfDgr51j5/Va80BfV/zQ770+KHdvtf0+65n9Lak2NSuvuPZ2t+3dPTain36v4XvmvfbSHUG5a4u9bkM83L3xtc1h9Q4grTx0pz1zth4VjfGSS9xIhe3tbqRGsjb0d/255844xw7f+6ugnFbvQ5562q137bUXB+ViNPAlTwMfG7Xv4PSY2bK1w6lHDZwQQkhZ4AROCCERpYKemK7X49y8Xc5qD7lUyl0CJ5PWvCuultHplBssaX7ees/poEW+dKOX71u2tDvHdAKBzYJOzPDmN+5xjk2ope0b3uBqMnV1dsk5MWllLT9I1eS0lUASCdt/yaQrT9WrJA7z6t4bd1ggk7HXDSubaHyzxPGxqRWP+QkiWpWcFq+yZXPU0f3+4EMvOcfSKndqnRojV7+hz6kXi6392VN/9+fmXclsYtKOfZ27JNnmjp9ElVmQ8gmcEEIiSugJXERiIvKEiNyX294jIo+IyAsi8g0Rqa0gBIQQUmHWsia9A8CzAJZd6D4D4LPGmK+LyH8DcDuAz4c/nbtW1kvnxUW7kUq7S5h+5bU3M2tlkro6N2nj5IT1rJua1kt097o652JDg7tU1vGnCxGl+MHa+277dlcyuvmmfUF5etaTRqbsdkPc9vXs3MoebIArPSS8GOIZFThMx9R+5BfHnXpblOVJq4rJXkwOUACYUnJaWlm8NHlxpDs6rVdusV6fZGX093v//h3OsSFlJfSmN14QlPdcsNWp51s12XO741FLMtpi6syZEafe5KSdL2IxO1a7ezxNr8oI9QQuIjsB3ATgC7ltAfAWAN/KVTkE4F3r0UBCCCErE1ZC+RyAPwGw/EZpC4BxY8zyI0w/gB0rfVBEPiIiR0TkyPDQ0EpVCCGEFMGqE7iI3Axg0BjzmN69QtUV1xrGmLuNMQeMMQe2dncX2UxCCCE+YcS9qwHcIiLvANCIrAb+OQAdIlKfewrfCeD02i7t/gbElMmQKD077ZkRji9YszKty/q/KHVif0/SC7acaHT/ZO1tOevpvmcHrMnZ4cNPB+Vbb73MqdcHm9+z2vVw7XmqvVoBYEolYEh7yS3SadWfKqKjeB2v3yO0t1ubq/p6V2M+fcZ6hH7v+0fztvf7338mKOsEG4XQXnZp7x3KjLrH2pTVNxXUCSOK8fSrRsJGrFyPMZxRpoPaPFebBQPAW6+/KCh3qSQO5/W6HpF63KVS9l3VvJd8Y3DIvjMbHLTvWmY889f5lH0f0tlh26f1+mpk1SdwY8wnjDE7jTF9AN4L4EFjzG0AHgLwnly1gwAOr1srCSGEnEMpduAfB/BHIvIispr4PeVpEiGEkDCsyT7KGPMwgIdz5WMAriz6wt6ytLnZLlk7VDD9iUl3STQ3Z5c6RnlW+RKKXmLpY743n17a68QCAPB3f/+jFduu5RQAuOOj161YrxqJqUQNfl7JdmViuOCuMDE/r/pdve5oiLvPAG1ttj9377amXwsL7lL0u9/9lTq28vIaAG68cR/WijYPHB+fdo6lUhm/OgAgkXD/Dm2WKr5OFCG0bHL8hGs6l08W1JIgUB5JZSFj+31wYCwonzsP2PGj5RXfU1ZbAw8rmWTMu9/jY1YW1JJrykvsos+35LsDVzH0xCSEkIjCCZwQQiJKxVzM/KXT9u122dbWZr3gXnhhwKm3tKQCXal4zp4jprPs1ZdqbHSvq61Q7vvu486xfG/tD35g74r7o4bvebpnj7Xy6O52c0SePGlzFzY02N99ndsSAETJKy0tdgmsY0ADwGWX2Rjbg4M2FvN1113k1Nu5w1ofFFrKLzk5UK3sNjIylbdekxoLfl/4S/YokU82ueuuh/J+Rssp6yEJaplMx/n2479rCa1JWYz5ntbzc3YemJiw1iXaAxsAJpRssrDgyiabgeiOUkIIqXE4gRNCSEThBE4IIRGlajRwva1N3S6+eLtT73mliWv9TCcIAIB4/cqmXy0truncrIpoePnlbjiXH/7o+aCso/htFuKeSaXe9s35dCRAja8V5/NaTCZd/bFX5d+84Qab37Cnu9Wp5+cpzcfikj3/+JjOe+l6YtapRABbt9i/SbcH2DzJPHyT13z43sXlRkcB1Uk6jJfYJaYiXXapiJC+h+WQMh0cGbWa/9SUa/+qo17W19vxnfAiTDbo5CNqrOv91QifwAkhJKJwAieEkIhSlZHqtWlfq7uixqWvsZKKkz/P+ynK5zznB7E5ddoGVQq73Hz4hy842/v37wz1uSjR4C8xy5zUoKvTSlJaxmpqchM/hHWC1HKaTuKgPXcBIB6359++3XqKdnW1OPVimySAlZZG/PHteF/uXt+AbErhcgJEFfJ5HB62MolvRjgyYs0IZ+fsd3pxKX+imGY1hvde2OPUyyeb+FJLtcEncEIIiSicwAkhJKJwAieEkIhS3QIPitditTvx+IR14z5xfNSpd+jLPw/KfrQ2jdYF19vkajPiB3ibnFSJFZSlnx8lLqMiTi4pIdU/nzZNG5+w+qjvPt3Sot3n4yuWV6OSiRHWel0dWbCQi3y52+onD9fJM3TZv5GLi/adxdi4vfeZjKuBp1P2vmaW8ivpzvu0pDUh1smyAVcDjxJ8AieEkIjCCZwQQiJK1UsoYfGXtVoO0TKJL6FofG/L3X02Z+TBD1wVlLXJFVD9eTCrAT+3oDF2CaxNv3TuRABIpazUMjJizcoaE+6Sd2JiTpWthFLn2ZfqJXVYE7FCY6vac6VW6rpa+gKA+XmVt1LluS1kRqiTgKQ9aU2bJRa6p1o20TlVy2EeWA1SGp/ACSEkonACJ4SQiBJpCaVQvj8dvL7QUkfLJO+6ZX/eY1peoWRSZvQ62rhL7/5XhoNyp/LeHF10cx+eHbCWRinloZtIuEN81y57T+MN+b0tw44tTZRzpZabRU9CmZiw90vLK741UXphZVHFr6c3EwU8LLW1ia5XrGdxteQYXYZP4IQQElE4gRNCSEThBE4IIREl0hq41pI+9NZ/CPWZx498Yr2aQwrgR/sbG7OJaLVcOjLmRotMzFjzs4UFW56YdKMMzqW0d58tNjW64mlvb3uo9hYztr54z++GqlcLNDa5iVPSKasdz84u+NUDfK07DFPTNpGxb+JbbsKOi8P3PhWU13PO4RM4IYREFE7ghBASUcQPOrOuFxMZAjADYHi1ujXCVrAvlmFfWNgXFvZFlt3GmG5/54ZO4AAgIkeMMQc29KJVCvvCwr6wsC8s7IvCUEIhhJCIwgmcEEIiSiUm8LsrcM1qhX1hYV9Y2BcW9kUBNlwDJ4QQUh4ooRBCSEThBE4IIRFlQydwEblBRJ4TkRdF5M6NvHalEZHzReQhEXlWRI6KyB25/V0icr+IvJD7v7PSbd0oRCQmIk+IyH257T0i8kiuL74hIg2VbuNGICIdIvItEfl1bnz8Zq2OCxH5w9z341ci8jURaazVcRGGDZvARSQG4O8A3AhgH4D3ici+jbp+FZAB8MfGmEsAXAXg3+b+/jsBPGCM2Qvggdx2rXAHgGfV9mcAfDbXF2MAbq9Iqzae/wrgB8aYiwHsR7ZPam5ciMgOAB8FcMAYcymAGID3onbHxaps5BP4lQBeNMYcM8akAXwdwK0beP2KYow5Y4x5PFeeQvZLugPZPjiUq3YIwLsq08KNRUR2ArgJwBdy2wLgLQC+latSE30hIm0A3gzgHgAwxqSNMeOo0XGBbIC9JhGpB9AM4AxqcFyEZSMn8B0AXlHb/bl9NYeI9AG4AsAjAHqNMWeA7CQPoCf/JzcVnwPwJwCW09NuATBujFkOM1gr4+MCAEMA/kdOTvqCiLSgBseFMeYUgL8BcBLZiXsCwGOozXERio2cwGWFfTVnwygirQD+EcDHjDGTlW5PJRCRmwEMGmMe07tXqFoL46MewOsAfN4YcwWysYI2vVyyEjmd/1YAewBsB9CCrOTqUwvjIhQbOYH3Azhfbe8EcHoDr19xRCSO7OT9VWPMt3O7B0RkW+74NgCDlWrfBnI1gFtE5DiyUtpbkH0i78gtnYHaGR/9APqNMY/ktr+F7IRei+PirQBeNsYMGWMWAHwbwBtQm+MiFBs5gT8KYG/ujXIDsi8n7t3A61eUnMZ7D4BnjTF/qw7dC+BgrnwQwOGNbttGY4z5hDFmpzGmD9lx8KAx5jYADwF4T65arfTFWQCviMhFuV3XA3gGNTgukJVOrhKR5tz3Zbkvam5chGWjw8m+A9knrRiALxpj/nLDLl5hROSNAP4fgF/C6r5/iqwO/k0Au5AdwL9jjBmtSCMrgIhcC+DfGWNuFpELkH0i7wLwBID3G2NSlWzfRiAilyP7MrcBwDEAv4fsw1XNjQsR+RSAf4Gs1dYTAD6MrOZdc+MiDHSlJ4SQiEJPTEIIiSicwAkhJKJwAieEkIjCCZwQQiIKJ3BCCIkonMAJISSicAInhJCI8v8Bt30iq5Dkn2oAAAAASUVORK5CYII=\n",
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    }
   ],
   "source": [
    "import pylab\n",
    "X,y,image = next(generator(height,width,1,n_class))\n",
    "y_pred = model.predict(X)\n",
    "pred = np.argmax(y_pred,axis=-1)\n",
    "a = '0,1,2,3,4,5,6,7,8,9'\n",
    "list = []\n",
    "for i in str(pred):\n",
    "    if i in a :\n",
    "        list.append(i)\n",
    "# pred = str(pred)\n",
    "# print(list)\n",
    "plt.imshow(image)\n",
    "plt.title(list)\n",
    "pylab.show()"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.7.3"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
