{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [],
   "source": [
    "from tensorflow import keras\n",
    "import matplotlib.pyplot as plt\n",
    "import numpy as np\n",
    "%matplotlib inline"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [],
   "source": [
    "data = keras.datasets.imdb\n",
    "max_word = 10000"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "引入预训练词向量"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [],
   "source": [
    "word_index = data.get_word_index()\n",
    "embeddings_index = {}\n",
    "f = open('./glove.6B.100d.txt', encoding=\"utf-8\")\n",
    "for line in f:\n",
    "    values = line.split()\n",
    "    word = values[0]\n",
    "    coefs = np.asarray(values[1:], dtype='float32')\n",
    "    embeddings_index[word] = coefs\n",
    "f.close()\n",
    "embedding_matrix = np.zeros((max_word, 100))\n",
    "for word, i in word_index.items():\n",
    "    if i >= max_word:  \n",
    "        continue\n",
    "    embedding_vector = embeddings_index.get(word)  # 根据词向量字典获取该单词对应的词向量\n",
    "    if embedding_vector is not None:\n",
    "        embedding_matrix[i] = embedding_vector"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [],
   "source": [
    "(x_train,y_train),(x_test,y_test)=data.load_data(num_words=max_word)\n",
    "x_train = keras.preprocessing.sequence.pad_sequences(x_train,300)\n",
    "x_test = keras.preprocessing.sequence.pad_sequences(x_test,300)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "array([[   0,    0,    0, ...,   19,  178,   32],\n",
       "       [   0,    0,    0, ...,   16,  145,   95],\n",
       "       [   0,    0,    0, ...,    7,  129,  113],\n",
       "       ...,\n",
       "       [   0,    0,    0, ...,    4, 3586,    2],\n",
       "       [   0,    0,    0, ...,   12,    9,   23],\n",
       "       [   0,    0,    0, ...,  204,  131,    9]])"
      ]
     },
     "execution_count": 5,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "x_train"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "单独的词嵌入，查看是否归一化"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "collapsed": true
   },
   "source": [
    "model = keras.Sequential()\n",
    "model.add(keras.layers.Embedding(max_word,100,input_length=300))\n",
    "w2v_output = model.predict(x_train)\n",
    "w2v_output"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "使用预训练词向量"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "model.add(keras.layers.Embedding(max_word,100,weights=[embedding_matrix],input_length=300))"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "全连接"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "collapsed": true
   },
   "source": [
    "model = keras.Sequential()\n",
    "model.add(keras.layers.Embedding(max_word,100,input_length=300))\n",
    "model.add(keras.layers.Flatten())\n",
    "model.add(keras.layers.Dense(16,activation='relu'))\n",
    "model.add(keras.layers.Dropout(0.5))\n",
    "model.add(keras.layers.Dense(16,activation='relu'))\n",
    "model.add(keras.layers.Dropout(0.5))\n",
    "model.add(keras.layers.Dense(16,activation='relu'))\n",
    "model.add(keras.layers.Dropout(0.5))\n",
    "model.add(keras.layers.Dense(1,activation='sigmoid'))"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "普通CNN"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "collapsed": true
   },
   "source": [
    "model = keras.Sequential()\n",
    "model.add(keras.layers.Embedding(max_word,100,input_length=300))\n",
    "#model.add(keras.layers.Embedding(max_word,100,embeddings_initializer=keras.initializers.Constant(embedding_matrix),input_length=300))\n",
    "model.add(keras.layers.Conv1D(128,3,activation='relu'))\n",
    "model.add(keras.layers.MaxPool1D())\n",
    "model.add(keras.layers.Conv1D(64,3,activation='relu'))\n",
    "model.add(keras.layers.MaxPool1D())\n",
    "model.add(keras.layers.Conv1D(32,3,activation='relu'))\n",
    "model.add(keras.layers.MaxPool1D())\n",
    "model.add(keras.layers.GlobalAveragePooling1D())\n",
    "model.add(keras.layers.Dropout(0.5))\n",
    "model.add(keras.layers.Dense(1,activation='sigmoid'))"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "LSTM"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "collapsed": true
   },
   "source": [
    "model = keras.Sequential()\n",
    "model.add(keras.layers.Embedding(max_word,100,input_length=300))\n",
    "model.add(keras.layers.LSTM(128))\n",
    "model.add(keras.layers.Dropout(0.5))\n",
    "model.add(keras.layers.Dense(1,activation='sigmoid'))"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "textCNN"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "metadata": {},
   "outputs": [],
   "source": [
    "main_input = keras.Input(shape=(300,))\n",
    "#embedder = keras.layers.Embedding(max_word,100,input_length=300)\n",
    "embedder = keras.layers.Embedding(max_word,100,embeddings_initializer=keras.initializers.Constant(embedding_matrix),input_length=300,trainable=False)\n",
    "embed = embedder(main_input)\n",
    "cnn1 = keras.layers.Conv1D(32, 3, padding='same', strides=1, activation='relu')(embed)\n",
    "cnn1 = keras.layers.MaxPool1D()(cnn1)\n",
    "cnn2 = keras.layers.Conv1D(32, 4, padding='same',strides=1, activation='relu')(embed)\n",
    "cnn2 = keras.layers.MaxPool1D()(cnn2)\n",
    "cnn3 = keras.layers.Conv1D(32, 5, padding='same',strides=1, activation='relu')(embed)\n",
    "cnn3 = keras.layers.MaxPool1D()(cnn3)\n",
    "cnn = keras.layers.concatenate([cnn1,cnn2,cnn3],axis=-1)\n",
    "#cnn = keras.layers.Flatten()(cnn)\n",
    "cnn = keras.layers.GlobalAveragePooling1D()(cnn)\n",
    "drop =keras.layers.Dropout(0.5)(cnn)\n",
    "#bn = keras.layers.BatchNormalization()(cnn)\n",
    "main_output = keras.layers.Dense(1, activation='sigmoid')(drop)\n",
    "model = keras.Model(inputs=main_input, outputs=main_output)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "metadata": {},
   "outputs": [],
   "source": [
    "keras.utils.plot_model(model=model,show_shapes=True,show_layer_names=False)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "__________________________________________________________________________________________________\n",
      "Layer (type)                    Output Shape         Param #     Connected to                     \n",
      "==================================================================================================\n",
      "input_2 (InputLayer)            (None, 300)          0                                            \n",
      "__________________________________________________________________________________________________\n",
      "embedding_1 (Embedding)         (None, 300, 100)     1000000     input_2[0][0]                    \n",
      "__________________________________________________________________________________________________\n",
      "conv1d_3 (Conv1D)               (None, 300, 32)      9632        embedding_1[0][0]                \n",
      "__________________________________________________________________________________________________\n",
      "conv1d_4 (Conv1D)               (None, 300, 32)      12832       embedding_1[0][0]                \n",
      "__________________________________________________________________________________________________\n",
      "conv1d_5 (Conv1D)               (None, 300, 32)      16032       embedding_1[0][0]                \n",
      "__________________________________________________________________________________________________\n",
      "max_pooling1d_3 (MaxPooling1D)  (None, 150, 32)      0           conv1d_3[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "max_pooling1d_4 (MaxPooling1D)  (None, 150, 32)      0           conv1d_4[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "max_pooling1d_5 (MaxPooling1D)  (None, 150, 32)      0           conv1d_5[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_1 (Concatenate)     (None, 150, 96)      0           max_pooling1d_3[0][0]            \n",
      "                                                                 max_pooling1d_4[0][0]            \n",
      "                                                                 max_pooling1d_5[0][0]            \n",
      "__________________________________________________________________________________________________\n",
      "global_average_pooling1d_1 (Glo (None, 96)           0           concatenate_1[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "dropout_1 (Dropout)             (None, 96)           0           global_average_pooling1d_1[0][0] \n",
      "__________________________________________________________________________________________________\n",
      "dense_1 (Dense)                 (None, 1)            97          dropout_1[0][0]                  \n",
      "==================================================================================================\n",
      "Total params: 1,038,593\n",
      "Trainable params: 38,593\n",
      "Non-trainable params: 1,000,000\n",
      "__________________________________________________________________________________________________\n"
     ]
    }
   ],
   "source": [
    "model.summary()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "metadata": {},
   "outputs": [],
   "source": [
    "model.compile(optimizer=keras.optimizers.RMSprop(lr=0.0001),loss='binary_crossentropy',metrics=['acc'])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "metadata": {
    "scrolled": false
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Train on 25000 samples, validate on 25000 samples\n",
      "Epoch 1/500\n",
      "25000/25000 [==============================] - 3s 128us/step - loss: 0.7112 - acc: 0.5048 - val_loss: 0.6896 - val_acc: 0.5572\n",
      "Epoch 2/500\n",
      "25000/25000 [==============================] - 2s 100us/step - loss: 0.6975 - acc: 0.5144 - val_loss: 0.6871 - val_acc: 0.5840\n",
      "Epoch 3/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.6900 - acc: 0.5360 - val_loss: 0.6853 - val_acc: 0.5756\n",
      "Epoch 4/500\n",
      "25000/25000 [==============================] - 2s 100us/step - loss: 0.6870 - acc: 0.5456 - val_loss: 0.6827 - val_acc: 0.6006\n",
      "Epoch 5/500\n",
      "25000/25000 [==============================] - 3s 100us/step - loss: 0.6861 - acc: 0.5495 - val_loss: 0.6810 - val_acc: 0.6045\n",
      "Epoch 6/500\n",
      "25000/25000 [==============================] - 3s 104us/step - loss: 0.6817 - acc: 0.5704 - val_loss: 0.6789 - val_acc: 0.6034\n",
      "Epoch 7/500\n",
      "25000/25000 [==============================] - 3s 102us/step - loss: 0.6806 - acc: 0.5702 - val_loss: 0.6767 - val_acc: 0.6087\n",
      "Epoch 8/500\n",
      "25000/25000 [==============================] - 3s 102us/step - loss: 0.6783 - acc: 0.5766 - val_loss: 0.6747 - val_acc: 0.6114\n",
      "Epoch 9/500\n",
      "25000/25000 [==============================] - 3s 101us/step - loss: 0.6764 - acc: 0.5845 - val_loss: 0.6726 - val_acc: 0.6144\n",
      "Epoch 10/500\n",
      "25000/25000 [==============================] - 3s 101us/step - loss: 0.6752 - acc: 0.5813 - val_loss: 0.6708 - val_acc: 0.6171\n",
      "Epoch 11/500\n",
      "25000/25000 [==============================] - 3s 100us/step - loss: 0.6730 - acc: 0.5882 - val_loss: 0.6689 - val_acc: 0.6192\n",
      "Epoch 12/500\n",
      "25000/25000 [==============================] - 3s 100us/step - loss: 0.6713 - acc: 0.5924 - val_loss: 0.6671 - val_acc: 0.6213\n",
      "Epoch 13/500\n",
      "25000/25000 [==============================] - 3s 101us/step - loss: 0.6702 - acc: 0.5947 - val_loss: 0.6656 - val_acc: 0.6241\n",
      "Epoch 14/500\n",
      "25000/25000 [==============================] - 3s 100us/step - loss: 0.6685 - acc: 0.5990 - val_loss: 0.6645 - val_acc: 0.6225\n",
      "Epoch 15/500\n",
      "25000/25000 [==============================] - 3s 100us/step - loss: 0.6676 - acc: 0.5975 - val_loss: 0.6661 - val_acc: 0.6096\n",
      "Epoch 16/500\n",
      "25000/25000 [==============================] - 2s 100us/step - loss: 0.6656 - acc: 0.6030 - val_loss: 0.6613 - val_acc: 0.6294\n",
      "Epoch 17/500\n",
      "25000/25000 [==============================] - 3s 101us/step - loss: 0.6662 - acc: 0.5997 - val_loss: 0.6602 - val_acc: 0.6316\n",
      "Epoch 18/500\n",
      "25000/25000 [==============================] - 3s 101us/step - loss: 0.6633 - acc: 0.6066 - val_loss: 0.6589 - val_acc: 0.6316\n",
      "Epoch 19/500\n",
      "25000/25000 [==============================] - 3s 103us/step - loss: 0.6634 - acc: 0.6104 - val_loss: 0.6590 - val_acc: 0.6226\n",
      "Epoch 20/500\n",
      "25000/25000 [==============================] - 3s 101us/step - loss: 0.6605 - acc: 0.6100 - val_loss: 0.6559 - val_acc: 0.6379\n",
      "Epoch 21/500\n",
      "25000/25000 [==============================] - 3s 102us/step - loss: 0.6595 - acc: 0.6102 - val_loss: 0.6554 - val_acc: 0.6342\n",
      "Epoch 22/500\n",
      "25000/25000 [==============================] - 3s 101us/step - loss: 0.6600 - acc: 0.6116 - val_loss: 0.6538 - val_acc: 0.6388\n",
      "Epoch 23/500\n",
      "25000/25000 [==============================] - 3s 101us/step - loss: 0.6580 - acc: 0.6139 - val_loss: 0.6541 - val_acc: 0.6355\n",
      "Epoch 24/500\n",
      "25000/25000 [==============================] - 3s 101us/step - loss: 0.6563 - acc: 0.6163 - val_loss: 0.6517 - val_acc: 0.6395\n",
      "Epoch 25/500\n",
      "25000/25000 [==============================] - 3s 100us/step - loss: 0.6557 - acc: 0.6201 - val_loss: 0.6510 - val_acc: 0.6398\n",
      "Epoch 26/500\n",
      "25000/25000 [==============================] - 3s 101us/step - loss: 0.6552 - acc: 0.6213 - val_loss: 0.6511 - val_acc: 0.6360\n",
      "Epoch 27/500\n",
      "25000/25000 [==============================] - 3s 101us/step - loss: 0.6562 - acc: 0.6196 - val_loss: 0.6494 - val_acc: 0.6431\n",
      "Epoch 28/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.6523 - acc: 0.6233 - val_loss: 0.6477 - val_acc: 0.6472\n",
      "Epoch 29/500\n",
      "25000/25000 [==============================] - 3s 100us/step - loss: 0.6522 - acc: 0.6255 - val_loss: 0.6469 - val_acc: 0.6488\n",
      "Epoch 30/500\n",
      "25000/25000 [==============================] - 3s 101us/step - loss: 0.6513 - acc: 0.6244 - val_loss: 0.6460 - val_acc: 0.6467\n",
      "Epoch 31/500\n",
      "25000/25000 [==============================] - 3s 101us/step - loss: 0.6497 - acc: 0.6254 - val_loss: 0.6448 - val_acc: 0.6484\n",
      "Epoch 32/500\n",
      "25000/25000 [==============================] - 3s 103us/step - loss: 0.6503 - acc: 0.6300 - val_loss: 0.6443 - val_acc: 0.6509\n",
      "Epoch 33/500\n",
      "25000/25000 [==============================] - 3s 103us/step - loss: 0.6511 - acc: 0.6270 - val_loss: 0.6438 - val_acc: 0.6497\n",
      "Epoch 34/500\n",
      "25000/25000 [==============================] - 3s 100us/step - loss: 0.6509 - acc: 0.6270 - val_loss: 0.6448 - val_acc: 0.6428\n",
      "Epoch 35/500\n",
      "25000/25000 [==============================] - 3s 100us/step - loss: 0.6496 - acc: 0.6274 - val_loss: 0.6421 - val_acc: 0.6539\n",
      "Epoch 36/500\n",
      "25000/25000 [==============================] - 3s 101us/step - loss: 0.6469 - acc: 0.6336 - val_loss: 0.6410 - val_acc: 0.6552\n",
      "Epoch 37/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.6470 - acc: 0.6307 - val_loss: 0.6403 - val_acc: 0.6549\n",
      "Epoch 38/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.6457 - acc: 0.6351 - val_loss: 0.6396 - val_acc: 0.6563\n",
      "Epoch 39/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.6463 - acc: 0.6337 - val_loss: 0.6401 - val_acc: 0.6484\n",
      "Epoch 40/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.6432 - acc: 0.6412 - val_loss: 0.6381 - val_acc: 0.6565\n",
      "Epoch 41/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.6424 - acc: 0.6386 - val_loss: 0.6372 - val_acc: 0.6580\n",
      "Epoch 42/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.6428 - acc: 0.6372 - val_loss: 0.6367 - val_acc: 0.6586\n",
      "Epoch 43/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.6422 - acc: 0.6368 - val_loss: 0.6361 - val_acc: 0.6568 loss: 0.6428 - acc: \n",
      "Epoch 44/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.6411 - acc: 0.6400 - val_loss: 0.6359 - val_acc: 0.6590\n",
      "Epoch 45/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.6426 - acc: 0.6374 - val_loss: 0.6346 - val_acc: 0.6589 loss: 0.6434 - acc\n",
      "Epoch 46/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.6406 - acc: 0.6436 - val_loss: 0.6339 - val_acc: 0.6610\n",
      "Epoch 47/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.6397 - acc: 0.6406 - val_loss: 0.6331 - val_acc: 0.6620\n",
      "Epoch 48/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.6392 - acc: 0.6426 - val_loss: 0.6324 - val_acc: 0.6622\n",
      "Epoch 49/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.6380 - acc: 0.6441 - val_loss: 0.6319 - val_acc: 0.6601\n",
      "Epoch 50/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.6381 - acc: 0.6444 - val_loss: 0.6314 - val_acc: 0.6622\n",
      "Epoch 51/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.6368 - acc: 0.6479 - val_loss: 0.6305 - val_acc: 0.6652\n",
      "Epoch 52/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.6370 - acc: 0.6444 - val_loss: 0.6302 - val_acc: 0.6635\n",
      "Epoch 53/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.6368 - acc: 0.6445 - val_loss: 0.6298 - val_acc: 0.6634\n",
      "Epoch 54/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.6354 - acc: 0.6465 - val_loss: 0.6289 - val_acc: 0.6659\n",
      "Epoch 55/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.6342 - acc: 0.6497 - val_loss: 0.6293 - val_acc: 0.6665\n",
      "Epoch 56/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.6333 - acc: 0.6522 - val_loss: 0.6271 - val_acc: 0.6669\n",
      "Epoch 57/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.6317 - acc: 0.6559 - val_loss: 0.6265 - val_acc: 0.6694\n",
      "Epoch 58/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.6333 - acc: 0.6484 - val_loss: 0.6272 - val_acc: 0.6631\n",
      "Epoch 59/500\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.6331 - acc: 0.6495 - val_loss: 0.6255 - val_acc: 0.6703\n",
      "Epoch 60/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.6322 - acc: 0.6509 - val_loss: 0.6249 - val_acc: 0.6712loss: 0.6324 -\n",
      "Epoch 61/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.6328 - acc: 0.6498 - val_loss: 0.6247 - val_acc: 0.6722\n",
      "Epoch 62/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.6304 - acc: 0.6537 - val_loss: 0.6236 - val_acc: 0.6719\n",
      "Epoch 63/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.6301 - acc: 0.6534 - val_loss: 0.6233 - val_acc: 0.6718\n",
      "Epoch 64/500\n",
      "25000/25000 [==============================] - 2s 100us/step - loss: 0.6286 - acc: 0.6531 - val_loss: 0.6225 - val_acc: 0.6728\n",
      "Epoch 65/500\n",
      "25000/25000 [==============================] - 3s 101us/step - loss: 0.6280 - acc: 0.6518 - val_loss: 0.6221 - val_acc: 0.6740\n",
      "Epoch 66/500\n",
      "25000/25000 [==============================] - 2s 100us/step - loss: 0.6270 - acc: 0.6570 - val_loss: 0.6210 - val_acc: 0.6734\n",
      "Epoch 67/500\n",
      "25000/25000 [==============================] - 2s 100us/step - loss: 0.6273 - acc: 0.6570 - val_loss: 0.6215 - val_acc: 0.6710\n",
      "Epoch 68/500\n",
      "25000/25000 [==============================] - 2s 100us/step - loss: 0.6289 - acc: 0.6541 - val_loss: 0.6214 - val_acc: 0.6758\n",
      "Epoch 69/500\n",
      "25000/25000 [==============================] - 2s 100us/step - loss: 0.6260 - acc: 0.6594 - val_loss: 0.6193 - val_acc: 0.6762\n",
      "Epoch 70/500\n",
      "25000/25000 [==============================] - 2s 100us/step - loss: 0.6260 - acc: 0.6581 - val_loss: 0.6189 - val_acc: 0.6760\n",
      "Epoch 71/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.6269 - acc: 0.6583 - val_loss: 0.6188 - val_acc: 0.6776\n",
      "Epoch 72/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.6243 - acc: 0.6642 - val_loss: 0.6180 - val_acc: 0.6771\n",
      "Epoch 73/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.6231 - acc: 0.6629 - val_loss: 0.6182 - val_acc: 0.6783 - acc: 0.6\n",
      "Epoch 74/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.6221 - acc: 0.6624 - val_loss: 0.6172 - val_acc: 0.6785\n",
      "Epoch 75/500\n",
      "25000/25000 [==============================] - 2s 100us/step - loss: 0.6230 - acc: 0.6609 - val_loss: 0.6161 - val_acc: 0.6803\n",
      "Epoch 76/500\n",
      "25000/25000 [==============================] - 2s 100us/step - loss: 0.6238 - acc: 0.6615 - val_loss: 0.6156 - val_acc: 0.6814\n",
      "Epoch 77/500\n",
      "25000/25000 [==============================] - 2s 100us/step - loss: 0.6247 - acc: 0.6602 - val_loss: 0.6155 - val_acc: 0.6802\n",
      "Epoch 78/500\n",
      "25000/25000 [==============================] - 2s 100us/step - loss: 0.6215 - acc: 0.6639 - val_loss: 0.6173 - val_acc: 0.6728\n",
      "Epoch 79/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.6213 - acc: 0.6642 - val_loss: 0.6153 - val_acc: 0.6814\n",
      "Epoch 80/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.6199 - acc: 0.6648 - val_loss: 0.6155 - val_acc: 0.6806\n",
      "Epoch 81/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.6215 - acc: 0.6640 - val_loss: 0.6134 - val_acc: 0.6812\n",
      "Epoch 82/500\n",
      "25000/25000 [==============================] - 2s 100us/step - loss: 0.6199 - acc: 0.6672 - val_loss: 0.6135 - val_acc: 0.6786\n",
      "Epoch 83/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.6188 - acc: 0.6670 - val_loss: 0.6119 - val_acc: 0.6847\n",
      "Epoch 84/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.6194 - acc: 0.6642 - val_loss: 0.6112 - val_acc: 0.6848- loss: 0.\n",
      "Epoch 85/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.6183 - acc: 0.6683 - val_loss: 0.6112 - val_acc: 0.6863\n",
      "Epoch 86/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.6178 - acc: 0.6672 - val_loss: 0.6104 - val_acc: 0.6846ss: 0.6169 - acc\n",
      "Epoch 87/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.6171 - acc: 0.6694 - val_loss: 0.6106 - val_acc: 0.6867\n",
      "Epoch 88/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.6175 - acc: 0.6693 - val_loss: 0.6096 - val_acc: 0.6844\n",
      "Epoch 89/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.6156 - acc: 0.6695 - val_loss: 0.6086 - val_acc: 0.6874\n",
      "Epoch 90/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.6167 - acc: 0.6697 - val_loss: 0.6087 - val_acc: 0.6886\n",
      "Epoch 91/500\n",
      "25000/25000 [==============================] - 2s 100us/step - loss: 0.6156 - acc: 0.6713 - val_loss: 0.6080 - val_acc: 0.6863\n",
      "Epoch 92/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.6143 - acc: 0.6723 - val_loss: 0.6083 - val_acc: 0.6880\n",
      "Epoch 93/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.6144 - acc: 0.6717 - val_loss: 0.6069 - val_acc: 0.6902\n",
      "Epoch 94/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.6128 - acc: 0.6706 - val_loss: 0.6065 - val_acc: 0.6904\n",
      "Epoch 95/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.6137 - acc: 0.6721 - val_loss: 0.6057 - val_acc: 0.6888\n",
      "Epoch 96/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.6118 - acc: 0.6750 - val_loss: 0.6050 - val_acc: 0.6898\n",
      "Epoch 97/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.6128 - acc: 0.6744 - val_loss: 0.6048 - val_acc: 0.6895\n",
      "Epoch 98/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.6124 - acc: 0.6745 - val_loss: 0.6041 - val_acc: 0.6925\n",
      "Epoch 99/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.6106 - acc: 0.6728 - val_loss: 0.6037 - val_acc: 0.6913\n",
      "Epoch 100/500\n",
      "25000/25000 [==============================] - 2s 100us/step - loss: 0.6099 - acc: 0.6758 - val_loss: 0.6028 - val_acc: 0.6934\n",
      "Epoch 101/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.6094 - acc: 0.6783 - val_loss: 0.6021 - val_acc: 0.6951\n",
      "Epoch 102/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.6102 - acc: 0.6781 - val_loss: 0.6024 - val_acc: 0.6950\n",
      "Epoch 103/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.6101 - acc: 0.6776 - val_loss: 0.6039 - val_acc: 0.6883\n",
      "Epoch 104/500\n",
      "25000/25000 [==============================] - 2s 100us/step - loss: 0.6075 - acc: 0.6798 - val_loss: 0.6008 - val_acc: 0.6945\n",
      "Epoch 105/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.6100 - acc: 0.6759 - val_loss: 0.6015 - val_acc: 0.6918\n",
      "Epoch 106/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.6080 - acc: 0.6775 - val_loss: 0.6018 - val_acc: 0.6958\n",
      "Epoch 107/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.6061 - acc: 0.6825 - val_loss: 0.5995 - val_acc: 0.6955\n",
      "Epoch 108/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.6078 - acc: 0.6792 - val_loss: 0.6009 - val_acc: 0.6916\n",
      "Epoch 109/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.6070 - acc: 0.6788 - val_loss: 0.6000 - val_acc: 0.6974\n",
      "Epoch 110/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.6065 - acc: 0.6815 - val_loss: 0.5988 - val_acc: 0.6978\n",
      "Epoch 111/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.6057 - acc: 0.6809 - val_loss: 0.5987 - val_acc: 0.6988\n",
      "Epoch 112/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.6064 - acc: 0.6801 - val_loss: 0.5972 - val_acc: 0.6987\n",
      "Epoch 113/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.6040 - acc: 0.6826 - val_loss: 0.5964 - val_acc: 0.6984\n",
      "Epoch 114/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.6042 - acc: 0.6830 - val_loss: 0.5960 - val_acc: 0.6983\n",
      "Epoch 115/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.6024 - acc: 0.6851 - val_loss: 0.5967 - val_acc: 0.6947\n",
      "Epoch 116/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.6002 - acc: 0.6882 - val_loss: 0.5945 - val_acc: 0.6988\n",
      "Epoch 117/500\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "25000/25000 [==============================] - 3s 101us/step - loss: 0.6025 - acc: 0.6832 - val_loss: 0.5941 - val_acc: 0.7013\n",
      "Epoch 118/500\n",
      "25000/25000 [==============================] - 2s 100us/step - loss: 0.6035 - acc: 0.6846 - val_loss: 0.5940 - val_acc: 0.7011\n",
      "Epoch 119/500\n",
      "25000/25000 [==============================] - 3s 100us/step - loss: 0.6014 - acc: 0.6865 - val_loss: 0.5936 - val_acc: 0.7024\n",
      "Epoch 120/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.6012 - acc: 0.6873 - val_loss: 0.5928 - val_acc: 0.7025\n",
      "Epoch 121/500\n",
      "25000/25000 [==============================] - 3s 101us/step - loss: 0.6006 - acc: 0.6868 - val_loss: 0.5924 - val_acc: 0.7021\n",
      "Epoch 122/500\n",
      "25000/25000 [==============================] - 2s 100us/step - loss: 0.6016 - acc: 0.6867 - val_loss: 0.5922 - val_acc: 0.7044\n",
      "Epoch 123/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5986 - acc: 0.6884 - val_loss: 0.5913 - val_acc: 0.7034\n",
      "Epoch 124/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5959 - acc: 0.6915 - val_loss: 0.5904 - val_acc: 0.7051\n",
      "Epoch 125/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5978 - acc: 0.6877 - val_loss: 0.5903 - val_acc: 0.7049\n",
      "Epoch 126/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5969 - acc: 0.6880 - val_loss: 0.5896 - val_acc: 0.7056\n",
      "Epoch 127/500\n",
      "25000/25000 [==============================] - 3s 101us/step - loss: 0.5970 - acc: 0.6894 - val_loss: 0.5893 - val_acc: 0.7059\n",
      "Epoch 128/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5965 - acc: 0.6911 - val_loss: 0.5892 - val_acc: 0.7051\n",
      "Epoch 129/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5936 - acc: 0.6910 - val_loss: 0.5884 - val_acc: 0.7077\n",
      "Epoch 130/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5974 - acc: 0.6882 - val_loss: 0.5889 - val_acc: 0.7087\n",
      "Epoch 131/500\n",
      "25000/25000 [==============================] - 2s 100us/step - loss: 0.5954 - acc: 0.6940 - val_loss: 0.5877 - val_acc: 0.7064\n",
      "Epoch 132/500\n",
      "25000/25000 [==============================] - 3s 100us/step - loss: 0.5925 - acc: 0.6960 - val_loss: 0.5865 - val_acc: 0.7077\n",
      "Epoch 133/500\n",
      "25000/25000 [==============================] - 2s 100us/step - loss: 0.5939 - acc: 0.6932 - val_loss: 0.5929 - val_acc: 0.7016\n",
      "Epoch 134/500\n",
      "25000/25000 [==============================] - 2s 100us/step - loss: 0.5941 - acc: 0.6950 - val_loss: 0.5864 - val_acc: 0.7103\n",
      "Epoch 135/500\n",
      "25000/25000 [==============================] - 2s 100us/step - loss: 0.5931 - acc: 0.6946 - val_loss: 0.5858 - val_acc: 0.7102\n",
      "Epoch 136/500\n",
      "25000/25000 [==============================] - 2s 100us/step - loss: 0.5915 - acc: 0.6946 - val_loss: 0.5849 - val_acc: 0.7104\n",
      "Epoch 137/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5913 - acc: 0.6934 - val_loss: 0.5844 - val_acc: 0.7106\n",
      "Epoch 138/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5917 - acc: 0.6965 - val_loss: 0.5841 - val_acc: 0.7112\n",
      "Epoch 139/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5923 - acc: 0.6952 - val_loss: 0.5840 - val_acc: 0.7116\n",
      "Epoch 140/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5901 - acc: 0.6987 - val_loss: 0.5835 - val_acc: 0.7118\n",
      "Epoch 141/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5912 - acc: 0.6944 - val_loss: 0.5829 - val_acc: 0.7124\n",
      "Epoch 142/500\n",
      "25000/25000 [==============================] - 3s 100us/step - loss: 0.5892 - acc: 0.6954 - val_loss: 0.5820 - val_acc: 0.7124\n",
      "Epoch 143/500\n",
      "25000/25000 [==============================] - 2s 100us/step - loss: 0.5917 - acc: 0.6952 - val_loss: 0.5824 - val_acc: 0.7131\n",
      "Epoch 144/500\n",
      "25000/25000 [==============================] - 2s 100us/step - loss: 0.5902 - acc: 0.6974 - val_loss: 0.5815 - val_acc: 0.7136\n",
      "Epoch 145/500\n",
      "25000/25000 [==============================] - 3s 100us/step - loss: 0.5863 - acc: 0.7022 - val_loss: 0.5845 - val_acc: 0.7112\n",
      "Epoch 146/500\n",
      "25000/25000 [==============================] - 3s 100us/step - loss: 0.5864 - acc: 0.6995 - val_loss: 0.5803 - val_acc: 0.7123\n",
      "Epoch 147/500\n",
      "25000/25000 [==============================] - 2s 100us/step - loss: 0.5893 - acc: 0.6955 - val_loss: 0.5801 - val_acc: 0.7158\n",
      "Epoch 148/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5866 - acc: 0.7014 - val_loss: 0.5796 - val_acc: 0.7147\n",
      "Epoch 149/500\n",
      "25000/25000 [==============================] - 3s 100us/step - loss: 0.5856 - acc: 0.7020 - val_loss: 0.5788 - val_acc: 0.7156\n",
      "Epoch 150/500\n",
      "25000/25000 [==============================] - 2s 100us/step - loss: 0.5866 - acc: 0.6990 - val_loss: 0.5786 - val_acc: 0.7158\n",
      "Epoch 151/500\n",
      "25000/25000 [==============================] - 3s 100us/step - loss: 0.5860 - acc: 0.7024 - val_loss: 0.5821 - val_acc: 0.7139 acc: 0.7\n",
      "Epoch 152/500\n",
      "25000/25000 [==============================] - 2s 100us/step - loss: 0.5871 - acc: 0.7018 - val_loss: 0.5779 - val_acc: 0.7173\n",
      "Epoch 153/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5853 - acc: 0.7038 - val_loss: 0.5773 - val_acc: 0.7170\n",
      "Epoch 154/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5839 - acc: 0.7067 - val_loss: 0.5776 - val_acc: 0.7167\n",
      "Epoch 155/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5827 - acc: 0.7024 - val_loss: 0.5772 - val_acc: 0.7179\n",
      "Epoch 156/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5832 - acc: 0.7037 - val_loss: 0.5757 - val_acc: 0.7174\n",
      "Epoch 157/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5829 - acc: 0.7034 - val_loss: 0.5749 - val_acc: 0.7194\n",
      "Epoch 158/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5835 - acc: 0.7048 - val_loss: 0.5748 - val_acc: 0.7194\n",
      "Epoch 159/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5806 - acc: 0.7052 - val_loss: 0.5739 - val_acc: 0.7193\n",
      "Epoch 160/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5811 - acc: 0.7077 - val_loss: 0.5740 - val_acc: 0.7188\n",
      "Epoch 161/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5813 - acc: 0.7076 - val_loss: 0.5734 - val_acc: 0.7210\n",
      "Epoch 162/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5825 - acc: 0.7095 - val_loss: 0.5752 - val_acc: 0.7164\n",
      "Epoch 163/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5810 - acc: 0.7060 - val_loss: 0.5729 - val_acc: 0.7215\n",
      "Epoch 164/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5811 - acc: 0.7051 - val_loss: 0.5731 - val_acc: 0.7214\n",
      "Epoch 165/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5805 - acc: 0.7049 - val_loss: 0.5714 - val_acc: 0.7226\n",
      "Epoch 166/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5796 - acc: 0.7079 - val_loss: 0.5709 - val_acc: 0.7226\n",
      "Epoch 167/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5779 - acc: 0.7062 - val_loss: 0.5704 - val_acc: 0.7223\n",
      "Epoch 168/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5809 - acc: 0.7024 - val_loss: 0.5713 - val_acc: 0.7202\n",
      "Epoch 169/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5792 - acc: 0.7054 - val_loss: 0.5710 - val_acc: 0.7206\n",
      "Epoch 170/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5786 - acc: 0.7105 - val_loss: 0.5701 - val_acc: 0.7218\n",
      "Epoch 171/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5774 - acc: 0.7113 - val_loss: 0.5693 - val_acc: 0.7237\n",
      "Epoch 172/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5753 - acc: 0.7122 - val_loss: 0.5690 - val_acc: 0.7253\n",
      "Epoch 173/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5765 - acc: 0.7105 - val_loss: 0.5684 - val_acc: 0.7232oss: 0.5755 - ac\n",
      "Epoch 174/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5752 - acc: 0.7079 - val_loss: 0.5675 - val_acc: 0.7258\n",
      "Epoch 175/500\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5750 - acc: 0.7110 - val_loss: 0.5669 - val_acc: 0.7274\n",
      "Epoch 176/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5732 - acc: 0.7123 - val_loss: 0.5669 - val_acc: 0.7270\n",
      "Epoch 177/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5738 - acc: 0.7125 - val_loss: 0.5689 - val_acc: 0.7266\n",
      "Epoch 178/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5722 - acc: 0.7106 - val_loss: 0.5657 - val_acc: 0.7270\n",
      "Epoch 179/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5734 - acc: 0.7130 - val_loss: 0.5654 - val_acc: 0.7288\n",
      "Epoch 180/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5724 - acc: 0.7138 - val_loss: 0.5646 - val_acc: 0.7279\n",
      "Epoch 181/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5734 - acc: 0.7102 - val_loss: 0.5652 - val_acc: 0.7256\n",
      "Epoch 182/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5717 - acc: 0.7131 - val_loss: 0.5642 - val_acc: 0.7278\n",
      "Epoch 183/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5747 - acc: 0.7128 - val_loss: 0.5654 - val_acc: 0.7246\n",
      "Epoch 184/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5722 - acc: 0.7134 - val_loss: 0.5640 - val_acc: 0.7264\n",
      "Epoch 185/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5705 - acc: 0.7122 - val_loss: 0.5634 - val_acc: 0.7265\n",
      "Epoch 186/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5694 - acc: 0.7134 - val_loss: 0.5617 - val_acc: 0.7304\n",
      "Epoch 187/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5674 - acc: 0.7160 - val_loss: 0.5613 - val_acc: 0.7316\n",
      "Epoch 188/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5689 - acc: 0.7142 - val_loss: 0.5612 - val_acc: 0.7314\n",
      "Epoch 189/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5688 - acc: 0.7164 - val_loss: 0.5623 - val_acc: 0.7316\n",
      "Epoch 190/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5694 - acc: 0.7147 - val_loss: 0.5605 - val_acc: 0.7317\n",
      "Epoch 191/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5678 - acc: 0.7185 - val_loss: 0.5603 - val_acc: 0.7311\n",
      "Epoch 192/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5667 - acc: 0.7172 - val_loss: 0.5593 - val_acc: 0.7331\n",
      "Epoch 193/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5684 - acc: 0.7158 - val_loss: 0.5592 - val_acc: 0.7338\n",
      "Epoch 194/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5659 - acc: 0.7171 - val_loss: 0.5587 - val_acc: 0.7319\n",
      "Epoch 195/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5677 - acc: 0.7186 - val_loss: 0.5586 - val_acc: 0.7330\n",
      "Epoch 196/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5669 - acc: 0.7174 - val_loss: 0.5584 - val_acc: 0.7315\n",
      "Epoch 197/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5678 - acc: 0.7153 - val_loss: 0.5581 - val_acc: 0.7352\n",
      "Epoch 198/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5665 - acc: 0.7167 - val_loss: 0.5575 - val_acc: 0.7347\n",
      "Epoch 199/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5645 - acc: 0.7177 - val_loss: 0.5569 - val_acc: 0.7329\n",
      "Epoch 200/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5636 - acc: 0.7235 - val_loss: 0.5580 - val_acc: 0.7290\n",
      "Epoch 201/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5654 - acc: 0.7177 - val_loss: 0.5560 - val_acc: 0.7358\n",
      "Epoch 202/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5648 - acc: 0.7227 - val_loss: 0.5563 - val_acc: 0.7376\n",
      "Epoch 203/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5626 - acc: 0.7229 - val_loss: 0.5547 - val_acc: 0.7359\n",
      "Epoch 204/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5601 - acc: 0.7234 - val_loss: 0.5539 - val_acc: 0.7365\n",
      "Epoch 205/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5625 - acc: 0.7208 - val_loss: 0.5546 - val_acc: 0.7383\n",
      "Epoch 206/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5620 - acc: 0.7254 - val_loss: 0.5535 - val_acc: 0.7372\n",
      "Epoch 207/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5613 - acc: 0.7226 - val_loss: 0.5584 - val_acc: 0.7350\n",
      "Epoch 208/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5610 - acc: 0.7223 - val_loss: 0.5546 - val_acc: 0.7390\n",
      "Epoch 209/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5618 - acc: 0.7209 - val_loss: 0.5525 - val_acc: 0.7380\n",
      "Epoch 210/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5612 - acc: 0.7224 - val_loss: 0.5520 - val_acc: 0.7382\n",
      "Epoch 211/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5581 - acc: 0.7262 - val_loss: 0.5522 - val_acc: 0.7403\n",
      "Epoch 212/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5589 - acc: 0.7238 - val_loss: 0.5514 - val_acc: 0.7403\n",
      "Epoch 213/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5603 - acc: 0.7223 - val_loss: 0.5520 - val_acc: 0.7408\n",
      "Epoch 214/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5583 - acc: 0.7243 - val_loss: 0.5503 - val_acc: 0.7395\n",
      "Epoch 215/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5569 - acc: 0.7236 - val_loss: 0.5508 - val_acc: 0.7360\n",
      "Epoch 216/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5569 - acc: 0.7262 - val_loss: 0.5509 - val_acc: 0.7357\n",
      "Epoch 217/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5554 - acc: 0.7287 - val_loss: 0.5483 - val_acc: 0.7410\n",
      "Epoch 218/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5574 - acc: 0.7266 - val_loss: 0.5492 - val_acc: 0.7426\n",
      "Epoch 219/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5569 - acc: 0.7244 - val_loss: 0.5488 - val_acc: 0.7422\n",
      "Epoch 220/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5577 - acc: 0.7255 - val_loss: 0.5487 - val_acc: 0.7402\n",
      "Epoch 221/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5573 - acc: 0.7275 - val_loss: 0.5482 - val_acc: 0.7413\n",
      "Epoch 222/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5559 - acc: 0.7267 - val_loss: 0.5472 - val_acc: 0.7423\n",
      "Epoch 223/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5543 - acc: 0.7286 - val_loss: 0.5484 - val_acc: 0.7371\n",
      "Epoch 224/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5551 - acc: 0.7288 - val_loss: 0.5466 - val_acc: 0.7438\n",
      "Epoch 225/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5535 - acc: 0.7275 - val_loss: 0.5470 - val_acc: 0.7440\n",
      "Epoch 226/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5548 - acc: 0.7286 - val_loss: 0.5457 - val_acc: 0.7441\n",
      "Epoch 227/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5509 - acc: 0.7293 - val_loss: 0.5448 - val_acc: 0.7444\n",
      "Epoch 228/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5498 - acc: 0.7303 - val_loss: 0.5444 - val_acc: 0.7444\n",
      "Epoch 229/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5531 - acc: 0.7301 - val_loss: 0.5486 - val_acc: 0.7361\n",
      "Epoch 230/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5527 - acc: 0.7293 - val_loss: 0.5440 - val_acc: 0.7460\n",
      "Epoch 231/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5500 - acc: 0.7331 - val_loss: 0.5432 - val_acc: 0.7459\n",
      "Epoch 232/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5498 - acc: 0.7305 - val_loss: 0.5451 - val_acc: 0.7453\n",
      "Epoch 233/500\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5517 - acc: 0.7296 - val_loss: 0.5433 - val_acc: 0.7466\n",
      "Epoch 234/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5518 - acc: 0.7300 - val_loss: 0.5428 - val_acc: 0.7482\n",
      "Epoch 235/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5479 - acc: 0.7320 - val_loss: 0.5419 - val_acc: 0.7483\n",
      "Epoch 236/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5476 - acc: 0.7340 - val_loss: 0.5413 - val_acc: 0.7485\n",
      "Epoch 237/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5498 - acc: 0.7320 - val_loss: 0.5414 - val_acc: 0.7486\n",
      "Epoch 238/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5486 - acc: 0.7329 - val_loss: 0.5411 - val_acc: 0.7464\n",
      "Epoch 239/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5483 - acc: 0.7358 - val_loss: 0.5402 - val_acc: 0.7493\n",
      "Epoch 240/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5472 - acc: 0.7349 - val_loss: 0.5400 - val_acc: 0.7471\n",
      "Epoch 241/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5472 - acc: 0.7362 - val_loss: 0.5400 - val_acc: 0.7496\n",
      "Epoch 242/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5488 - acc: 0.7335 - val_loss: 0.5396 - val_acc: 0.7496\n",
      "Epoch 243/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5465 - acc: 0.7364 - val_loss: 0.5391 - val_acc: 0.7479\n",
      "Epoch 244/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5457 - acc: 0.7353 - val_loss: 0.5383 - val_acc: 0.7496\n",
      "Epoch 245/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5441 - acc: 0.7365 - val_loss: 0.5374 - val_acc: 0.7480\n",
      "Epoch 246/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5463 - acc: 0.7332 - val_loss: 0.5379 - val_acc: 0.7517\n",
      "Epoch 247/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5427 - acc: 0.7381 - val_loss: 0.5371 - val_acc: 0.7518\n",
      "Epoch 248/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5429 - acc: 0.7382 - val_loss: 0.5364 - val_acc: 0.7514\n",
      "Epoch 249/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5438 - acc: 0.7355 - val_loss: 0.5363 - val_acc: 0.7521\n",
      "Epoch 250/500\n",
      "25000/25000 [==============================] - 3s 102us/step - loss: 0.5419 - acc: 0.7372 - val_loss: 0.5358 - val_acc: 0.7524\n",
      "Epoch 251/500\n",
      "25000/25000 [==============================] - 3s 101us/step - loss: 0.5453 - acc: 0.7366 - val_loss: 0.5365 - val_acc: 0.7518\n",
      "Epoch 252/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5413 - acc: 0.7404 - val_loss: 0.5351 - val_acc: 0.7534\n",
      "Epoch 253/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5425 - acc: 0.7390 - val_loss: 0.5347 - val_acc: 0.7522\n",
      "Epoch 254/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5392 - acc: 0.7399 - val_loss: 0.5356 - val_acc: 0.7469\n",
      "Epoch 255/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5421 - acc: 0.7386 - val_loss: 0.5342 - val_acc: 0.7543\n",
      "Epoch 256/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5421 - acc: 0.7373 - val_loss: 0.5343 - val_acc: 0.7535\n",
      "Epoch 257/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5422 - acc: 0.7355 - val_loss: 0.5338 - val_acc: 0.7540\n",
      "Epoch 258/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5414 - acc: 0.7403 - val_loss: 0.5331 - val_acc: 0.7549\n",
      "Epoch 259/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5409 - acc: 0.7372 - val_loss: 0.5328 - val_acc: 0.7547\n",
      "Epoch 260/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5400 - acc: 0.7395 - val_loss: 0.5325 - val_acc: 0.7551\n",
      "Epoch 261/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5414 - acc: 0.7405 - val_loss: 0.5324 - val_acc: 0.7535\n",
      "Epoch 262/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5374 - acc: 0.7406 - val_loss: 0.5316 - val_acc: 0.7558\n",
      "Epoch 263/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5392 - acc: 0.7392 - val_loss: 0.5319 - val_acc: 0.7552\n",
      "Epoch 264/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5385 - acc: 0.7400 - val_loss: 0.5312 - val_acc: 0.7559\n",
      "Epoch 265/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5387 - acc: 0.7438 - val_loss: 0.5305 - val_acc: 0.7557\n",
      "Epoch 266/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5380 - acc: 0.7420 - val_loss: 0.5304 - val_acc: 0.7566\n",
      "Epoch 267/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5354 - acc: 0.7450 - val_loss: 0.5295 - val_acc: 0.7562\n",
      "Epoch 268/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5404 - acc: 0.7410 - val_loss: 0.5301 - val_acc: 0.7556\n",
      "Epoch 269/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5383 - acc: 0.7402 - val_loss: 0.5311 - val_acc: 0.7521\n",
      "Epoch 270/500\n",
      "25000/25000 [==============================] - 3s 100us/step - loss: 0.5376 - acc: 0.7407 - val_loss: 0.5294 - val_acc: 0.7570\n",
      "Epoch 271/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5342 - acc: 0.7454 - val_loss: 0.5281 - val_acc: 0.7574\n",
      "Epoch 272/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5363 - acc: 0.7437 - val_loss: 0.5287 - val_acc: 0.7578\n",
      "Epoch 273/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5360 - acc: 0.7468 - val_loss: 0.5282 - val_acc: 0.7590\n",
      "Epoch 274/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5334 - acc: 0.7464 - val_loss: 0.5282 - val_acc: 0.7577\n",
      "Epoch 275/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5330 - acc: 0.7449 - val_loss: 0.5268 - val_acc: 0.7584\n",
      "Epoch 276/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5340 - acc: 0.7430 - val_loss: 0.5268 - val_acc: 0.7589\n",
      "Epoch 277/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5331 - acc: 0.7472 - val_loss: 0.5263 - val_acc: 0.7600\n",
      "Epoch 278/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5352 - acc: 0.7432 - val_loss: 0.5267 - val_acc: 0.7571\n",
      "Epoch 279/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5314 - acc: 0.7462 - val_loss: 0.5258 - val_acc: 0.7578\n",
      "Epoch 280/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5312 - acc: 0.7459 - val_loss: 0.5251 - val_acc: 0.7602\n",
      "Epoch 281/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5305 - acc: 0.7472 - val_loss: 0.5244 - val_acc: 0.7602\n",
      "Epoch 282/500\n",
      "25000/25000 [==============================] - 3s 101us/step - loss: 0.5321 - acc: 0.7451 - val_loss: 0.5242 - val_acc: 0.7600\n",
      "Epoch 283/500\n",
      "25000/25000 [==============================] - 3s 101us/step - loss: 0.5327 - acc: 0.7432 - val_loss: 0.5251 - val_acc: 0.7569\n",
      "Epoch 284/500\n",
      "25000/25000 [==============================] - 3s 101us/step - loss: 0.5290 - acc: 0.7469 - val_loss: 0.5247 - val_acc: 0.7602\n",
      "Epoch 285/500\n",
      "25000/25000 [==============================] - 3s 101us/step - loss: 0.5331 - acc: 0.7449 - val_loss: 0.5247 - val_acc: 0.7600\n",
      "Epoch 286/500\n",
      "25000/25000 [==============================] - 3s 101us/step - loss: 0.5292 - acc: 0.7485 - val_loss: 0.5229 - val_acc: 0.7606\n",
      "Epoch 287/500\n",
      "25000/25000 [==============================] - 3s 101us/step - loss: 0.5287 - acc: 0.7446 - val_loss: 0.5228 - val_acc: 0.7620\n",
      "Epoch 288/500\n",
      "25000/25000 [==============================] - 3s 101us/step - loss: 0.5309 - acc: 0.7464 - val_loss: 0.5227 - val_acc: 0.7624\n",
      "Epoch 289/500\n",
      "25000/25000 [==============================] - 3s 101us/step - loss: 0.5289 - acc: 0.7495 - val_loss: 0.5220 - val_acc: 0.7620\n",
      "Epoch 290/500\n",
      "25000/25000 [==============================] - 3s 100us/step - loss: 0.5285 - acc: 0.7501 - val_loss: 0.5228 - val_acc: 0.7614\n",
      "Epoch 291/500\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5269 - acc: 0.7502 - val_loss: 0.5213 - val_acc: 0.7619\n",
      "Epoch 292/500\n",
      "25000/25000 [==============================] - 2s 100us/step - loss: 0.5272 - acc: 0.7520 - val_loss: 0.5221 - val_acc: 0.7613\n",
      "Epoch 293/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5288 - acc: 0.7494 - val_loss: 0.5209 - val_acc: 0.7627\n",
      "Epoch 294/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5283 - acc: 0.7510 - val_loss: 0.5206 - val_acc: 0.7642\n",
      "Epoch 295/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5252 - acc: 0.7528 - val_loss: 0.5203 - val_acc: 0.7638\n",
      "Epoch 296/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5259 - acc: 0.7464 - val_loss: 0.5195 - val_acc: 0.7636\n",
      "Epoch 297/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5272 - acc: 0.7502 - val_loss: 0.5196 - val_acc: 0.7632\n",
      "Epoch 298/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5264 - acc: 0.7497 - val_loss: 0.5242 - val_acc: 0.7530\n",
      "Epoch 299/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5260 - acc: 0.7514 - val_loss: 0.5183 - val_acc: 0.7646\n",
      "Epoch 300/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5241 - acc: 0.7516 - val_loss: 0.5183 - val_acc: 0.7640\n",
      "Epoch 301/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5228 - acc: 0.7522 - val_loss: 0.5178 - val_acc: 0.7651\n",
      "Epoch 302/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5252 - acc: 0.7515 - val_loss: 0.5177 - val_acc: 0.7650\n",
      "Epoch 303/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5234 - acc: 0.7549 - val_loss: 0.5192 - val_acc: 0.7612\n",
      "Epoch 304/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5245 - acc: 0.7531 - val_loss: 0.5172 - val_acc: 0.7650\n",
      "Epoch 305/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5198 - acc: 0.7569 - val_loss: 0.5161 - val_acc: 0.7654\n",
      "Epoch 306/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5228 - acc: 0.7526 - val_loss: 0.5165 - val_acc: 0.7666\n",
      "Epoch 307/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5234 - acc: 0.7512 - val_loss: 0.5162 - val_acc: 0.7663\n",
      "Epoch 308/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5218 - acc: 0.7533 - val_loss: 0.5157 - val_acc: 0.7671\n",
      "Epoch 309/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5212 - acc: 0.7544 - val_loss: 0.5203 - val_acc: 0.7620\n",
      "Epoch 310/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5202 - acc: 0.7549 - val_loss: 0.5153 - val_acc: 0.7654\n",
      "Epoch 311/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5199 - acc: 0.7506 - val_loss: 0.5150 - val_acc: 0.76580s - loss: 0.5207 - acc:\n",
      "Epoch 312/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5221 - acc: 0.7537 - val_loss: 0.5150 - val_acc: 0.7660\n",
      "Epoch 313/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5212 - acc: 0.7527 - val_loss: 0.5144 - val_acc: 0.7678\n",
      "Epoch 314/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5200 - acc: 0.7530 - val_loss: 0.5142 - val_acc: 0.7687\n",
      "Epoch 315/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5193 - acc: 0.7526 - val_loss: 0.5138 - val_acc: 0.7683\n",
      "Epoch 316/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5208 - acc: 0.7524 - val_loss: 0.5142 - val_acc: 0.7671\n",
      "Epoch 317/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5187 - acc: 0.7568 - val_loss: 0.5130 - val_acc: 0.7692\n",
      "Epoch 318/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5180 - acc: 0.7551 - val_loss: 0.5138 - val_acc: 0.7656\n",
      "Epoch 319/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5212 - acc: 0.7527 - val_loss: 0.5131 - val_acc: 0.7684\n",
      "Epoch 320/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5188 - acc: 0.7548 - val_loss: 0.5132 - val_acc: 0.7668\n",
      "Epoch 321/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5169 - acc: 0.7559 - val_loss: 0.5113 - val_acc: 0.7696\n",
      "Epoch 322/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5179 - acc: 0.7563 - val_loss: 0.5113 - val_acc: 0.7706\n",
      "Epoch 323/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5163 - acc: 0.7570 - val_loss: 0.5118 - val_acc: 0.7699\n",
      "Epoch 324/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5194 - acc: 0.7540 - val_loss: 0.5148 - val_acc: 0.7666\n",
      "Epoch 325/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5126 - acc: 0.7612 - val_loss: 0.5095 - val_acc: 0.7713\n",
      "Epoch 326/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5159 - acc: 0.7572 - val_loss: 0.5098 - val_acc: 0.7712\n",
      "Epoch 327/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5137 - acc: 0.7619 - val_loss: 0.5097 - val_acc: 0.7705\n",
      "Epoch 328/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5142 - acc: 0.7603 - val_loss: 0.5108 - val_acc: 0.7678\n",
      "Epoch 329/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5162 - acc: 0.7580 - val_loss: 0.5088 - val_acc: 0.7716\n",
      "Epoch 330/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5127 - acc: 0.7575 - val_loss: 0.5084 - val_acc: 0.7721\n",
      "Epoch 331/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5137 - acc: 0.7591 - val_loss: 0.5091 - val_acc: 0.7721\n",
      "Epoch 332/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5138 - acc: 0.7606 - val_loss: 0.5079 - val_acc: 0.7723\n",
      "Epoch 333/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5136 - acc: 0.7586 - val_loss: 0.5078 - val_acc: 0.7724\n",
      "Epoch 334/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5144 - acc: 0.7603 - val_loss: 0.5096 - val_acc: 0.7711\n",
      "Epoch 335/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5099 - acc: 0.7632 - val_loss: 0.5089 - val_acc: 0.7689\n",
      "Epoch 336/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5119 - acc: 0.7612 - val_loss: 0.5073 - val_acc: 0.7736\n",
      "Epoch 337/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5125 - acc: 0.7615 - val_loss: 0.5080 - val_acc: 0.7728\n",
      "Epoch 338/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5131 - acc: 0.7608 - val_loss: 0.5070 - val_acc: 0.7737\n",
      "Epoch 339/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5121 - acc: 0.7617 - val_loss: 0.5060 - val_acc: 0.7747\n",
      "Epoch 340/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5085 - acc: 0.7598 - val_loss: 0.5055 - val_acc: 0.7743\n",
      "Epoch 341/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5097 - acc: 0.7607 - val_loss: 0.5064 - val_acc: 0.7744\n",
      "Epoch 342/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5075 - acc: 0.7620 - val_loss: 0.5051 - val_acc: 0.7746\n",
      "Epoch 343/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5102 - acc: 0.7612 - val_loss: 0.5045 - val_acc: 0.7752\n",
      "Epoch 344/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5105 - acc: 0.7621 - val_loss: 0.5048 - val_acc: 0.7758\n",
      "Epoch 345/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5107 - acc: 0.7615 - val_loss: 0.5050 - val_acc: 0.7744\n",
      "Epoch 346/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5091 - acc: 0.7644 - val_loss: 0.5046 - val_acc: 0.7765\n",
      "Epoch 347/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5098 - acc: 0.7602 - val_loss: 0.5041 - val_acc: 0.7748\n",
      "Epoch 348/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5079 - acc: 0.7632 - val_loss: 0.5028 - val_acc: 0.7757\n",
      "Epoch 349/500\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5079 - acc: 0.7644 - val_loss: 0.5035 - val_acc: 0.7749\n",
      "Epoch 350/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5085 - acc: 0.7619 - val_loss: 0.5029 - val_acc: 0.7766\n",
      "Epoch 351/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5069 - acc: 0.7640 - val_loss: 0.5023 - val_acc: 0.7762\n",
      "Epoch 352/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5048 - acc: 0.7630 - val_loss: 0.5024 - val_acc: 0.7765\n",
      "Epoch 353/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5086 - acc: 0.7614 - val_loss: 0.5019 - val_acc: 0.7773\n",
      "Epoch 354/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5082 - acc: 0.7614 - val_loss: 0.5033 - val_acc: 0.7740\n",
      "Epoch 355/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5047 - acc: 0.7661 - val_loss: 0.5009 - val_acc: 0.7777\n",
      "Epoch 356/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5039 - acc: 0.7648 - val_loss: 0.5006 - val_acc: 0.7773.5023 - ac\n",
      "Epoch 357/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5050 - acc: 0.7644 - val_loss: 0.5005 - val_acc: 0.7778\n",
      "Epoch 358/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5077 - acc: 0.7630 - val_loss: 0.5007 - val_acc: 0.7776\n",
      "Epoch 359/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5053 - acc: 0.7654 - val_loss: 0.5003 - val_acc: 0.7790\n",
      "Epoch 360/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5056 - acc: 0.7640 - val_loss: 0.5002 - val_acc: 0.7784\n",
      "Epoch 361/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5072 - acc: 0.7640 - val_loss: 0.5000 - val_acc: 0.7780\n",
      "Epoch 362/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5037 - acc: 0.7642 - val_loss: 0.4988 - val_acc: 0.7785\n",
      "Epoch 363/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5030 - acc: 0.7664 - val_loss: 0.4998 - val_acc: 0.7768\n",
      "Epoch 364/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5089 - acc: 0.7626 - val_loss: 0.4995 - val_acc: 0.7797\n",
      "Epoch 365/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5025 - acc: 0.7671 - val_loss: 0.4999 - val_acc: 0.7792\n",
      "Epoch 366/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5027 - acc: 0.7676 - val_loss: 0.4981 - val_acc: 0.7788\n",
      "Epoch 367/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5070 - acc: 0.7619 - val_loss: 0.4987 - val_acc: 0.7803\n",
      "Epoch 368/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5045 - acc: 0.7650 - val_loss: 0.4993 - val_acc: 0.7782\n",
      "Epoch 369/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5031 - acc: 0.7683 - val_loss: 0.4982 - val_acc: 0.7784\n",
      "Epoch 370/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5044 - acc: 0.7648 - val_loss: 0.4985 - val_acc: 0.7784\n",
      "Epoch 371/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5039 - acc: 0.7630 - val_loss: 0.4977 - val_acc: 0.7804\n",
      "Epoch 372/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4990 - acc: 0.7693 - val_loss: 0.4966 - val_acc: 0.7801\n",
      "Epoch 373/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.4982 - acc: 0.7689 - val_loss: 0.4965 - val_acc: 0.7788\n",
      "Epoch 374/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5018 - acc: 0.7694 - val_loss: 0.4963 - val_acc: 0.7800\n",
      "Epoch 375/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.5010 - acc: 0.7671 - val_loss: 0.4963 - val_acc: 0.7796\n",
      "Epoch 376/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5026 - acc: 0.7671 - val_loss: 0.4959 - val_acc: 0.7814\n",
      "Epoch 377/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5017 - acc: 0.7684 - val_loss: 0.4976 - val_acc: 0.7784\n",
      "Epoch 378/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4995 - acc: 0.7716 - val_loss: 0.4952 - val_acc: 0.7822\n",
      "Epoch 379/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4997 - acc: 0.7670 - val_loss: 0.4948 - val_acc: 0.7827\n",
      "Epoch 380/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4984 - acc: 0.7688 - val_loss: 0.4945 - val_acc: 0.7824\n",
      "Epoch 381/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.4988 - acc: 0.7693 - val_loss: 0.4941 - val_acc: 0.78284998 - acc: 0.\n",
      "Epoch 382/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.4978 - acc: 0.7707 - val_loss: 0.4939 - val_acc: 0.7829\n",
      "Epoch 383/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.4987 - acc: 0.7701 - val_loss: 0.4949 - val_acc: 0.7816\n",
      "Epoch 384/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4969 - acc: 0.7735 - val_loss: 0.4937 - val_acc: 0.7830\n",
      "Epoch 385/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.4952 - acc: 0.7720 - val_loss: 0.4933 - val_acc: 0.7831\n",
      "Epoch 386/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4939 - acc: 0.7728 - val_loss: 0.4922 - val_acc: 0.7838\n",
      "Epoch 387/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.5005 - acc: 0.7727 - val_loss: 0.4933 - val_acc: 0.7834\n",
      "Epoch 388/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4982 - acc: 0.7713 - val_loss: 0.4932 - val_acc: 0.7827\n",
      "Epoch 389/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.4982 - acc: 0.7688 - val_loss: 0.4938 - val_acc: 0.7825\n",
      "Epoch 390/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4936 - acc: 0.7736 - val_loss: 0.4917 - val_acc: 0.7839\n",
      "Epoch 391/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.4947 - acc: 0.7725 - val_loss: 0.4911 - val_acc: 0.7851\n",
      "Epoch 392/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4950 - acc: 0.7707 - val_loss: 0.4909 - val_acc: 0.7849\n",
      "Epoch 393/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4943 - acc: 0.7734 - val_loss: 0.4918 - val_acc: 0.7817\n",
      "Epoch 394/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.4941 - acc: 0.7720 - val_loss: 0.4904 - val_acc: 0.7848\n",
      "Epoch 395/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4926 - acc: 0.7729 - val_loss: 0.4908 - val_acc: 0.7851\n",
      "Epoch 396/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.4957 - acc: 0.7679 - val_loss: 0.4903 - val_acc: 0.7852\n",
      "Epoch 397/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4964 - acc: 0.7702 - val_loss: 0.4904 - val_acc: 0.7850\n",
      "Epoch 398/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4920 - acc: 0.7729 - val_loss: 0.4930 - val_acc: 0.7794\n",
      "Epoch 399/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.4929 - acc: 0.7742 - val_loss: 0.4893 - val_acc: 0.7859\n",
      "Epoch 400/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4906 - acc: 0.7732 - val_loss: 0.4905 - val_acc: 0.7837\n",
      "Epoch 401/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4942 - acc: 0.7731 - val_loss: 0.4893 - val_acc: 0.7844\n",
      "Epoch 402/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4921 - acc: 0.7745 - val_loss: 0.4886 - val_acc: 0.7859\n",
      "Epoch 403/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.4923 - acc: 0.7732 - val_loss: 0.4884 - val_acc: 0.7864\n",
      "Epoch 404/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.4928 - acc: 0.7734 - val_loss: 0.4883 - val_acc: 0.7872\n",
      "Epoch 405/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.4905 - acc: 0.7730 - val_loss: 0.4882 - val_acc: 0.7849\n",
      "Epoch 406/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.4902 - acc: 0.7750 - val_loss: 0.4877 - val_acc: 0.7867\n",
      "Epoch 407/500\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.4900 - acc: 0.7762 - val_loss: 0.4872 - val_acc: 0.7870\n",
      "Epoch 408/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.4899 - acc: 0.7753 - val_loss: 0.4872 - val_acc: 0.7882\n",
      "Epoch 409/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4892 - acc: 0.7774 - val_loss: 0.4874 - val_acc: 0.7850\n",
      "Epoch 410/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4866 - acc: 0.7791 - val_loss: 0.4873 - val_acc: 0.7845\n",
      "Epoch 411/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4900 - acc: 0.7748 - val_loss: 0.4858 - val_acc: 0.7876\n",
      "Epoch 412/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4887 - acc: 0.7761 - val_loss: 0.4860 - val_acc: 0.7878\n",
      "Epoch 413/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4883 - acc: 0.7761 - val_loss: 0.4858 - val_acc: 0.7883\n",
      "Epoch 414/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4879 - acc: 0.7749 - val_loss: 0.4856 - val_acc: 0.7864\n",
      "Epoch 415/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4898 - acc: 0.7754 - val_loss: 0.4868 - val_acc: 0.78620s - loss: 0.4\n",
      "Epoch 416/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.4892 - acc: 0.7730 - val_loss: 0.4853 - val_acc: 0.7885\n",
      "Epoch 417/500\n",
      "25000/25000 [==============================] - 3s 100us/step - loss: 0.4915 - acc: 0.7725 - val_loss: 0.4857 - val_acc: 0.7883\n",
      "Epoch 418/500\n",
      "25000/25000 [==============================] - 3s 101us/step - loss: 0.4883 - acc: 0.7734 - val_loss: 0.4850 - val_acc: 0.7886\n",
      "Epoch 419/500\n",
      "25000/25000 [==============================] - 3s 101us/step - loss: 0.4912 - acc: 0.7763 - val_loss: 0.4870 - val_acc: 0.7858\n",
      "Epoch 420/500\n",
      "25000/25000 [==============================] - 3s 100us/step - loss: 0.4858 - acc: 0.7770 - val_loss: 0.4840 - val_acc: 0.7894\n",
      "Epoch 421/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.4854 - acc: 0.7768 - val_loss: 0.4837 - val_acc: 0.7894\n",
      "Epoch 422/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4860 - acc: 0.7803 - val_loss: 0.4836 - val_acc: 0.7892\n",
      "Epoch 423/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4848 - acc: 0.7806 - val_loss: 0.4837 - val_acc: 0.7879\n",
      "Epoch 424/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.4842 - acc: 0.7809 - val_loss: 0.4832 - val_acc: 0.7898\n",
      "Epoch 425/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4861 - acc: 0.7778 - val_loss: 0.4841 - val_acc: 0.7865\n",
      "Epoch 426/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4889 - acc: 0.7760 - val_loss: 0.4831 - val_acc: 0.7900\n",
      "Epoch 427/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4873 - acc: 0.7774 - val_loss: 0.4838 - val_acc: 0.7884\n",
      "Epoch 428/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.4865 - acc: 0.7790 - val_loss: 0.4830 - val_acc: 0.7902\n",
      "Epoch 429/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4889 - acc: 0.7740 - val_loss: 0.4826 - val_acc: 0.7901\n",
      "Epoch 430/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4845 - acc: 0.7818 - val_loss: 0.4834 - val_acc: 0.7867\n",
      "Epoch 431/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.4843 - acc: 0.7799 - val_loss: 0.4822 - val_acc: 0.7896\n",
      "Epoch 432/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.4865 - acc: 0.7775 - val_loss: 0.4816 - val_acc: 0.7920\n",
      "Epoch 433/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4857 - acc: 0.7786 - val_loss: 0.4817 - val_acc: 0.7918\n",
      "Epoch 434/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4831 - acc: 0.7800 - val_loss: 0.4818 - val_acc: 0.7899\n",
      "Epoch 435/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4843 - acc: 0.7828 - val_loss: 0.4808 - val_acc: 0.7922\n",
      "Epoch 436/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4821 - acc: 0.7820 - val_loss: 0.4801 - val_acc: 0.7922\n",
      "Epoch 437/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4820 - acc: 0.7812 - val_loss: 0.4818 - val_acc: 0.7895\n",
      "Epoch 438/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.4811 - acc: 0.7806 - val_loss: 0.4797 - val_acc: 0.7922\n",
      "Epoch 439/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4823 - acc: 0.7830 - val_loss: 0.4798 - val_acc: 0.7920\n",
      "Epoch 440/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.4830 - acc: 0.7809 - val_loss: 0.4839 - val_acc: 0.7825\n",
      "Epoch 441/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.4838 - acc: 0.7788 - val_loss: 0.4801 - val_acc: 0.7917\n",
      "Epoch 442/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.4792 - acc: 0.7848 - val_loss: 0.4787 - val_acc: 0.7920\n",
      "Epoch 443/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.4781 - acc: 0.7815 - val_loss: 0.4788 - val_acc: 0.7914\n",
      "Epoch 444/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4824 - acc: 0.7785 - val_loss: 0.4788 - val_acc: 0.7919\n",
      "Epoch 445/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4837 - acc: 0.7796 - val_loss: 0.4784 - val_acc: 0.7933\n",
      "Epoch 446/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.4851 - acc: 0.7768 - val_loss: 0.4804 - val_acc: 0.7918\n",
      "Epoch 447/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.4784 - acc: 0.7844 - val_loss: 0.4809 - val_acc: 0.7860\n",
      "Epoch 448/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4819 - acc: 0.7817 - val_loss: 0.4779 - val_acc: 0.7923\n",
      "Epoch 449/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4815 - acc: 0.7787 - val_loss: 0.4775 - val_acc: 0.7936\n",
      "Epoch 450/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4766 - acc: 0.7862 - val_loss: 0.4783 - val_acc: 0.7924\n",
      "Epoch 451/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4800 - acc: 0.7808 - val_loss: 0.4768 - val_acc: 0.7943\n",
      "Epoch 452/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4782 - acc: 0.7839 - val_loss: 0.4766 - val_acc: 0.7946\n",
      "Epoch 453/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4789 - acc: 0.7848 - val_loss: 0.4779 - val_acc: 0.7930\n",
      "Epoch 454/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4777 - acc: 0.7830 - val_loss: 0.4765 - val_acc: 0.7944\n",
      "Epoch 455/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4794 - acc: 0.7851 - val_loss: 0.4773 - val_acc: 0.7916\n",
      "Epoch 456/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4761 - acc: 0.7846 - val_loss: 0.4768 - val_acc: 0.7934\n",
      "Epoch 457/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.4760 - acc: 0.7825 - val_loss: 0.4758 - val_acc: 0.7931\n",
      "Epoch 458/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4786 - acc: 0.7818 - val_loss: 0.4758 - val_acc: 0.7946\n",
      "Epoch 459/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4761 - acc: 0.7856 - val_loss: 0.4773 - val_acc: 0.7889\n",
      "Epoch 460/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.4796 - acc: 0.7832 - val_loss: 0.4751 - val_acc: 0.7944\n",
      "Epoch 461/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4739 - acc: 0.7865 - val_loss: 0.4749 - val_acc: 0.7946\n",
      "Epoch 462/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4793 - acc: 0.7814 - val_loss: 0.4748 - val_acc: 0.7953\n",
      "Epoch 463/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4749 - acc: 0.7854 - val_loss: 0.4737 - val_acc: 0.7953\n",
      "Epoch 464/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4781 - acc: 0.7830 - val_loss: 0.4744 - val_acc: 0.7943\n",
      "Epoch 465/500\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4756 - acc: 0.7841 - val_loss: 0.4742 - val_acc: 0.7958\n",
      "Epoch 466/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4800 - acc: 0.7782 - val_loss: 0.4758 - val_acc: 0.7948\n",
      "Epoch 467/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4755 - acc: 0.7829 - val_loss: 0.4746 - val_acc: 0.7922\n",
      "Epoch 468/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4762 - acc: 0.7827 - val_loss: 0.4736 - val_acc: 0.7953\n",
      "Epoch 469/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4752 - acc: 0.7840 - val_loss: 0.4731 - val_acc: 0.7958\n",
      "Epoch 470/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4756 - acc: 0.7840 - val_loss: 0.4732 - val_acc: 0.7955\n",
      "Epoch 471/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4766 - acc: 0.7814 - val_loss: 0.4736 - val_acc: 0.7947\n",
      "Epoch 472/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4779 - acc: 0.7831 - val_loss: 0.4734 - val_acc: 0.7953\n",
      "Epoch 473/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4752 - acc: 0.7844 - val_loss: 0.4725 - val_acc: 0.7963\n",
      "Epoch 474/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.4723 - acc: 0.7879 - val_loss: 0.4722 - val_acc: 0.7947\n",
      "Epoch 475/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4718 - acc: 0.7886 - val_loss: 0.4719 - val_acc: 0.7942\n",
      "Epoch 476/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4719 - acc: 0.7853 - val_loss: 0.4712 - val_acc: 0.7958\n",
      "Epoch 477/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4747 - acc: 0.7832 - val_loss: 0.4717 - val_acc: 0.7954\n",
      "Epoch 478/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4720 - acc: 0.7897 - val_loss: 0.4707 - val_acc: 0.7963\n",
      "Epoch 479/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4761 - acc: 0.7824 - val_loss: 0.4712 - val_acc: 0.7972\n",
      "Epoch 480/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4745 - acc: 0.7868 - val_loss: 0.4716 - val_acc: 0.7980\n",
      "Epoch 481/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.4718 - acc: 0.7853 - val_loss: 0.4711 - val_acc: 0.7952\n",
      "Epoch 482/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.4724 - acc: 0.7870 - val_loss: 0.4731 - val_acc: 0.7950\n",
      "Epoch 483/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4722 - acc: 0.7876 - val_loss: 0.4703 - val_acc: 0.7968\n",
      "Epoch 484/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4704 - acc: 0.7872 - val_loss: 0.4715 - val_acc: 0.7958\n",
      "Epoch 485/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.4711 - acc: 0.7869 - val_loss: 0.4698 - val_acc: 0.7979\n",
      "Epoch 486/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4710 - acc: 0.7859 - val_loss: 0.4705 - val_acc: 0.7951\n",
      "Epoch 487/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.4716 - acc: 0.7860 - val_loss: 0.4693 - val_acc: 0.7974\n",
      "Epoch 488/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.4682 - acc: 0.7926 - val_loss: 0.4689 - val_acc: 0.7973\n",
      "Epoch 489/500\n",
      "25000/25000 [==============================] - 2s 99us/step - loss: 0.4718 - acc: 0.7861 - val_loss: 0.4690 - val_acc: 0.7964\n",
      "Epoch 490/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4688 - acc: 0.7886 - val_loss: 0.4695 - val_acc: 0.7958 loss: 0.4715 - a\n",
      "Epoch 491/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4697 - acc: 0.7872 - val_loss: 0.4685 - val_acc: 0.7986\n",
      "Epoch 492/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4695 - acc: 0.7862 - val_loss: 0.4688 - val_acc: 0.7963\n",
      "Epoch 493/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4689 - acc: 0.7892 - val_loss: 0.4693 - val_acc: 0.7965\n",
      "Epoch 494/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4700 - acc: 0.7860 - val_loss: 0.4682 - val_acc: 0.7973\n",
      "Epoch 495/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4679 - acc: 0.7904 - val_loss: 0.4678 - val_acc: 0.7975\n",
      "Epoch 496/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4662 - acc: 0.7904 - val_loss: 0.4675 - val_acc: 0.8002\n",
      "Epoch 497/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4666 - acc: 0.7876 - val_loss: 0.4677 - val_acc: 0.7970\n",
      "Epoch 498/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4653 - acc: 0.7905 - val_loss: 0.4668 - val_acc: 0.7995\n",
      "Epoch 499/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4682 - acc: 0.7871 - val_loss: 0.4669 - val_acc: 0.7972\n",
      "Epoch 500/500\n",
      "25000/25000 [==============================] - 2s 98us/step - loss: 0.4637 - acc: 0.7923 - val_loss: 0.4665 - val_acc: 0.7998\n"
     ]
    }
   ],
   "source": [
    "history = model.fit(x_train,y_train,batch_size=216,epochs=500,validation_data=(x_test,y_test))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "<tf.Tensor 'embedding/embedding_lookup/Identity_2:0' shape=(?, 300, 100) dtype=float32>"
      ]
     },
     "execution_count": 11,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "model.layers[1].output"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "<matplotlib.legend.Legend at 0x20f7ed064e0>"
      ]
     },
     "execution_count": 12,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAX0AAAD8CAYAAACb4nSYAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJzt3Xd4VVXWwOHfIpSICAIBRWpURIoImsGuKMpgA6yDjnVUxm+sWMYCY0HGMjMWVGRERdFRUdAoVgZBQB1UguLQhmIsxCgGEpQipK3vj3UjlxDISbg3t633ec6T3HPPOdlHw7o76+y9tqgqzjnnUkO9WDfAOedc3fGg75xzKcSDvnPOpRAP+s45l0I86DvnXArxoO+ccynEg75zzqUQD/rOOZdCPOg751wKqR/rBlSWkZGhnTp1inUznHMuocybN2+1qraq7ri4C/qdOnUiJycn1s1wzrmEIiLfBDnO0zvOOZdCPOg751wK8aDvnHMpJO5y+lUpKSkhLy+PTZs2xbopcSk9PZ127drRoEGDWDfFORfnEiLo5+Xlsdtuu9GpUydEJNbNiSuqypo1a8jLyyMzMzPWzXHOxbmESO9s2rSJli1besCvgojQsmVL/yvIORdIQgR9wAP+Dvh/G+dcUAmR3nHOuaRRUgIffAArV8LmzbBp05Ztzz1h6NCo/ngP+s45F22bNsG0afDKKzBlChQVVX3coYfGR9AXkQHAaCANeFJV7630fkdgPNAKKATOU9W80HsXAiNCh45S1QkRartzzsXGd9/BG29AYSH8/DP89JNt69aBKohs2YqL4cMPYf16aNYMBg6EM86AHj1gl10gPd22Ro0gLS3qTa826ItIGjAGOAHIA+aKyBRVXRx22D+AZ1V1gogcB9wDnC8iLYDbgSxAgXmhc7fzMRffBg8ezMqVK9m0aRPXXHMNQ4cO5d133+XWW2+lrKyMjIwMpk+fzvr167nqqqvIyclBRLj99ts544wzYt1851xVioshNxeWLbOtQQPo1w+6d7egHS4nBx58EF5+GUpLbV/DhhbMmzWDJk2gXj0L/BWbCJxzjgX6Y4+142MoSE+/D7BCVXMBRGQiMAgID/rdgGGh798HXgt9/1tgmqoWhs6dBgwAXqx1i6+9FubPr/XpVerVCx56qNrDxo8fT4sWLfjll1/4zW9+w6BBg7jsssuYPXs2mZmZFBYWAnDXXXfRrFkzFixYAEDR9v6Uc87VrVWrLHDPm2fb4sXw1VdQVrbtsW3awPHHwwknWE/84Yetx77bbnDVVXD55dCxo/XQE0iQoN8WWBn2Og84pNIxXwBnYCmg04DdRKTlds5tW+vWxtjDDz9MdnY2ACtXrmTcuHEcffTRv46Pb9GiBQDvvfceEydO/PW85s2b131jnUt2xcWwaBF06gRV/RtTheXL4b33YPp0+OQTS8uA9b733x8OOgiGDIH99rOtc2dLw7z3nuXg33kHnnvOzsnMtM7hxRdD06Z1dpuRFiToVzUeUCu9vgF4VEQuAmYD3wGlAc9FRIYCQwE6dOiw49YE6JFHw8yZM3nvvfeYM2cOjRs3pm/fvhx44IEsXbp0m2NV1YdROhcNZWU28uWFF2Dy5C0PRPfay3LkPXpYcP7sMwvcK0N9zo4doW9fOPhgyMqyv+53263qn9GiBfzhD7aVl1tmobDQUjN1kHOPtiBBPw9oH/a6HZAffoCq5gOnA4hIE+AMVf1JRPKAvpXOnVn5B6jqOGAcQFZW1jYfCvHgp59+onnz5jRu3Jj//e9/fPzxx2zevJlZs2bx1Vdf/ZreadGiBf379+fRRx/lodAHVFFRkff2nQtq6VJYuNB68iUl9rW4GFasgJdegvx82HVXGDwYTjzRXi9caNtjj9lImebNLS8/fLilaPbee9v8fBD16tlfA0kkSNCfC3QWkUysBz8EODf8ABHJAApVtRy4BRvJAzAVuFtEKiJe/9D7CWfAgAH885//pGfPnnTp0oVDDz2UVq1aMW7cOE4//XTKy8tp3bo106ZNY8SIEVxxxRX06NGDtLQ0br/9dk4//fRY34Jz8amsDD7+GF5/3bZly6o+rkEDC/LnngunnGKBv6pr5edbzz8JeuXRUG3QV9VSEbkSC+BpwHhVXSQiI4EcVZ2C9ebvERHF0jtXhM4tFJG7sA8OgJEVD3UTTaNGjXjnnXeqfO/EE0/c6nWTJk2YMMFHproUU1pqo2CWLLHAvcsu0Lo1tGq1ZVu71h6c5uZu+frBB1BQYEG9b1+4+mo4/HA7v2HDLdtuu9m+HUlLg/btd3xMigs0Tl9V3wberrTvtrDvJwOTt3PueLb0/J1zyaKsDN56CyZOtNTK0qWWhgmqUSN7CHv88TBoEAwYYMMeXVT5jFznXM388AM89RQ8/rg9KG3dGvr0sdRL167QrZuNhCkuhh9/tF58xdasmT1ozcy0kgP1Eqb8V9LwoO+c277ycsjL2zJxadYsePVVS+X062cTlQYOtNRMVVq3rtv2xqniYpsi8P339pn5ww/2n7ZxY3s0UbFlZNjnZjR50Hcu1anC6tUW1Jcv3xLgK16Hl+1u0WLLxKT99otdm2NE1QYUlZRYxYXFi7cMHFq40P6TVUzUrVBebscG0aePTSeIJg/6zqWaVatg7lybmVrx9ccft7xfvz7ss49NVOrff8vEpf32s1mqcTIHZeNGyxilpdnjgYYNt5Sv+eabrT+7Vqywz6569bbeSkrgl1/svV9+sa242B5XVN5KSqqeuAv2Wdijh1VaqOpZc8uWls3ac0/7T7jHHvafecMGu48NG2yr7jl1JHjQdy5ZlZZaT/2LL+C//7WvX3yx9azUbt0sF3/ggdCliwX2Tp0sItVhM7//3h4P5OXZVli4bdDduNGaHn5MELvvbp9fTZpYr7u01L6WlVlWqlkzC8bp6VsGDKWlbbs1aLD11rixTert0cOCeJx8FlbLg75zyWbtWhg92raKGav161uy+NhjoXdv+M1v7GuTJlFtSlmZfd589BHMmWO57PXrrVe7fr1tRUUWhMOJWJPDg256OrRta59JRx4J7dpZsC0rs7L0FVtpqY3arPjjJCMjcQJyXfCgHyVNmjRh/fr1sW6GSyZlZVYSYOlSi2bdu2+dDygstDIlo0dbud+BA+H0060X37VrrQuDlZVZDzs3F7791n5MUdGWrz//bJdu3Nia07ixBekvvrBAX/HPoCJgN29uQXnXXe0zp2VLe92unW3t21vv2wN1dHjQdy5elZVZJchZs2z78EOr2V6hXj1LyRx4oCWVn3vOnhiecQaMGGH1ZQIoL7fnuF9/bfOlvv56y/e5ufZ9ScnW54hYYG7RwuZMlZRY+qVi27zZPmcuuACOOMK2Dh08kMeDhAv6saqsfNNNN9GxY0f+9Kc/AXDHHXcgIsyePZuioiJKSkoYNWoUgwYNqvbnrV+/nkGDBlV53rPPPss//vEPRISePXvy3HPPsWrVKi6//HJyc3MBGDt2LIcffvjO3bSLT+Xllgt56SUrKLZqle3v0gV+9zubsdq9+5ZcfUV3euVKOPNMC/YHHGADcgrgyy/tIeaKFVu+LyzcEpwrHl5W1rKl9cp79bI/Fvbe27aOHe29Zs28ykGiSrigHytDhgzh2muv/TXov/zyy7z77rsMGzaMpk2bsnr1ag499FAGDhxYbYXN9PR0srOztzlv8eLF/PWvf+Wjjz4iIyPj1/r8V199NccccwzZ2dmUlZV52ijZFBdb4M7OhkmTrHZMejqcfPKWhTf23PPXw1WhqF1P8jqfQd6hoQeg35az8rt6rLzWUjB5eVuPtBSxnvY++1gwD0/FNG5sPfbMTHuvU6ftF6B0iS/hgn6MKivTu3dvfvzxR/Lz8ykoKKB58+a0adOGYcOGMXv2bOrVq8d3333HqlWr2DPsH2hVVJVbb711m/NmzJjBmWeeSUZGBrClPv+MGTN49tlnAUhLS6OZT1VPfF9+CVOn2jZjhiW+GzWykTRnn03RkaeyLL8JK1bAN+NtCGLF9u231ksPV69ePfbay/LhBx1kVQ3at7cgv+++FtATbK0PFyUJF/Rj6cwzz2Ty5Mn88MMPDBkyhOeff56CggLmzZtHgwYN6NSpE5vCu1fbsb3zvA5/klO13vzIkfDFF5QjfNvuCBYddS8LWx7DkvL9WP51Q5ZdbTn2cK1aWWqle3f7XGjffuuHn3vuWaejLF0C81+TGhgyZAiXXXYZq1evZtasWbz88su0bt2aBg0a8P777/PNN98Eus5PP/1U5Xn9+vXjtNNOY9iwYbRs2fLX+vz9+vVj7NixXHvttZSVlbFhwwaaJvDKPammvExZ+cx0Ft/9GotzG7G46QgWdTySRatbsz6vnq1YgU3a6dLFcugVww333deCfePGsb0Hlzw86NdA9+7dWbduHW3btqVNmzb8/ve/59RTTyUrK4tevXqx//77B7rO9s7r3r07w4cP55hjjiEtLY3evXvzzDPPMHr0aIYOHcpTTz1FWloaY8eO5bDDDovmrbqdtPR/Svaz63jj1WK+WN6YDeXHA8cD0Dpd6ZYpXDzQeu49etjX3XePbZtdahDV+FqoKisrS3Nycrbat2TJErpGuwpRgvP/RnUnPx/efddGr+y6K+wqG9m1cCXlX3/Lv2ek8dqCfViyoSMAWczliN0W0O20LnS7+BC6HlCfli1jfAMuKYnIPFXNqu447+k7F4CqFcJ6+KEyJk0WSsvCSwI3BroAXUijlGOafMafDvmAQScW077vPnDYeTa337k4ECjoi8gAYDS2ctaTqnpvpfc7ABOA3UPH3Kyqb4tIJ2AJULF6+Meqenlkmh7/FixYwPnnn7/VvkaNGvFJtMvouYjZuBFefUV5+J4NzF3ShKaygav0Sf6w22R22a89G9t3YcNendnQOpOSPdvTZ/BetNijD9An1k13rkrVBn0RSQPGACdgj5zmisgUVV0cdtgI4GVVHSsi3bBVtjqF3vtSVYNNDdyBRBzZcsABBzA/0jPJqhBvKbpE98MP8Oab8MYbyrR3y/iluD77s5Ix9f/JBQPX0uQPZ0P/WduvIe9cHAvS0+8DrFDVXAARmQgMAsKDvgIVw0maAfmRbGR6ejpr1qyhZcuWCRf4o01VWbNmDenp6bFuSkJStVIDn30Gn39uQ+Yr/hDr2GgVlxa/zOD9/0ffG35DvbPuAh815RJckKDfFlgZ9joPOKTSMXcA/xaRq4BdqRimYDJF5HPgZ2CEqn5Q+QeIyFBgKECHDh22aUC7du3Iy8ujoKAgQHNTT3p6Ou3atYt1M+JecbGt2T1/vm2ff27bzz/b+/Xrw0G9y7mr32wGzr6BAxqtQB75O1zyqC/r55JGkKBfVde6cj7hHOAZVb1fRA4DnhORHsD3QAdVXSMiBwOviUh3Vf15q4upjgPGgY3eqfzDGjRoQGZmZoCmOrfFpk3w3nswZQp8+qmtclRROGyXXaBnT/j9720Ga+9um+mR9y6N7rwV5i62OjcPvblV+QPnkkGQoJ8HtA973Y5t0zeXAAMAVHWOiKQDGar6I7A5tH+eiHwJ7Afk4FwU/PQTvPUWvPYavPOOVTdo2hQOP9xmsvbqZUUpO3eGtJJNVgZh0iS4bopVqOzY0S5w0kmxvhXnoiJI0J8LdBaRTOA7YAhwbqVjvgX6Ac+ISFcgHSgQkVZAoaqWicjeQGcgN2KtdymvqMgqDs+ebdWHP/vMKhLvuaf14k87zeqV/TpicvNmC/R/nQSvv26BvnlzOPtsOOssOO44f0Drklq1QV9VS0XkSmAqNhxzvKouEpGRQI6qTgGuB54QkWFY6uciVVURORoYKSKlQBlwuaoGXOTMuaqVlcHzz1vxvfnz7WFsw4ZwyCFw881WnPKQQ8LS8Js3wxv/th79669bEr95cytF/LvfeaB3KSUhZuQ6BxbcX3/dSsYvWrSl1vsxx0CfPlaN+FcbN9q02VdfhTfe2BLoBw+2Xv1xx/mEKZdUfEauSyrvvw+33GLDKbt0sU77GWdUWolJFV55BSZOtIT+xo224seZZ9rBxx/vgd6lPA/6Li5t3AgzZ1rsfucdKz/frh08+SRceGEVZYTXroU//MFKF7dpAxddZIH+6KO95rBzYfxfg4sb69fbKoGTJtlD2U2bbGjlscfCjTdasK9yDtr8+dab/+YbeOABuOYaH1fv3HZ40HcxN28ejBsHL7xggX/ffeGPf7Qhlsccs51AD5bOeeopuPJKyMiwTwpfO9i5HfKg72Lixx/h5Zdh/HibFbvLLvZ89bLLLG7vsNpGeTksWGC9+mefhRNOsOE8rVrVWfudS1Qe9F2dWbfORt88/zxMm2ZDL3v1gjFj4Nxzq1lEJDfXptdOn24Fclavtk+GO+6w4TxpaXV1G84lNA/6Luq+/BLuust69r/8Ah06WI7+97+3VaN2KD/f0jfZ2fZ6r70s73P88dCvH7RtG/X2O5dMPOi7qMnPt2D/5JM29+mCC+C88yx9U+1z1vJyeOIJ+POfrVLanXda/qdLl2pyP865HfGg7yKusBDuuw8eecQKnA0dahmYNm0CXmDpUkvuf/CBTaJ6/HF7uuuc22ke9F1EvfuupW2KiixPf+edsM8+AU9WhQcftFlYu+5qT3kvush79s5FkAd9FxGqcM891qM/4ACbQduzZw0usGEDXHqpzaYdNAj++U8va+xcFHjQdztt3TqbOJWdDeecY6n4XXetwQW++spq4ixYYJ8cN93kvXvnosSDvtspS5da+eJly2zY/LXX1jBeT5sGQ4bYg9u334YBA6LWVuecB31XS+vX24Pae+6xGbPTplm5hEAKC60I/tSplsbp3t3+TAic/HfO1ZYHfVcjv/wCY8fCvfdCQYHVrn/sMRt7v0Pvv28VMGfPtjQOQKNGNobzscdqmA9yztVWoKpUIjJARJaKyAoRubmK9zuIyPsi8rmI/FdETgp775bQeUtF5LeRbLyrOyUlNnN2n33g+uttycE5c+DNNwME/HHjbCLVM8/Yw9lRoyz4r10LEyZ4wHeuDlXb0xeRNGAMcAK2Xu5cEZmiqovDDhsBvKyqY0WkG/A20Cn0/RCgO7AX8J6I7KeqZZG+ERc9a9ZYEcuZM+Goo2yAzdFHBzz5oYdg2DD7k2DSJCuy45yLmSA9/T7AClXNVdViYCIwqNIxCjQNfd+MLQunDwImqupmVf0KWBG6nksQS5bY0oNz5linfNasGgT8e+6xgH/GGbaClQd852IuSNBvC6wMe50X2hfuDuA8EcnDevlX1eBcF6fefRcOPdSGZL7/vpVRCDQyRxX+8he49VabqTVxoq9Y5VycCBL0q/pnXnlh3XOAZ1S1HXAS8JyI1At4LiIyVERyRCSnoKAgQJNcNKnC6NGWkcnMhLlz4bDDAp5cUmJJ/1GjbLLVhAm+cpVzcSRI0M8D2oe9bseW9E2FS4CXAVR1DpAOZAQ8F1Udp6pZqprVymuix1RZmRW1vPZaGDjQRlZW+6C2wpw5kJVlpRSuuspq5njJY+fiSpCgPxfoLCKZItIQezA7pdIx3wL9AESkKxb0C0LHDRGRRiKSCXQGPo1U411kbdoEv/udjaC88UYbYdmkSYATi4psqavDD7cx+NnZ9qeCL1noXNyp9u9uVS0VkSuBqUAaMF5VF4nISCBHVacA1wNPiMgwLH1zkaoqsEhEXgYWA6XAFT5yJz799JNVQpg5E+6/H667LsBJqrbG4bBhFuyvu84qrAX6pHDOxYJYbI4fWVlZmpOTE+tmpJQffrB1SRYuhKeftvlSgTzwgOXv+/SxVE6vXlFtp3Nu+0RknqpmVXecP2FLcV9+Cf37W+B/440alL6ZMgVuuMEG8E+c6Ll75xKEB/0Ulp9va5Rs2GDLzh5ySMAT58+3YvlZWTY6xwO+cwnDg36K+vlnOOkkS8XPmgUHHRTwxPx8OOUUaNHCVjlv3Diq7XTORZYH/RRUUmJZmYUL4a23ahDwN260BU7WroWPPqrB+ofOuXjhQT/FqNrys9Om2WqEvw1aAq+8HM4/H+bNsx7+gQdGtZ3OuejwgdQp5vbbLQ1/551w8cUBT1q8GI4/3urn3H8/nHpqVNvonIseD/op5PHH4a674JJLrDROtdavt6ULDzzQHt4+/rhN1XXOJSwP+ilAFUaOhMsvt/H4Y8dWUzhNFSZPhq5d4W9/s0prS5fC0KG+dq1zCc6DfpLbtMlS8bffbrE7OxsaNNjBCarwf/8HZ50FLVvaA9unngKvieRcUvCgn8QKCmzBquefh7/+1RauatSompOGD7c0zg03QE6O1dNxziUNH72TpBYvtuH0338PL79sHfdqPfCALXwydKildTyV41zS8aCfhD77zHr4jRrZxKs+QdYqmzDB6uiceaaV2fSA71xS8qCfZBYtslo6TZtawO/UKcBJb7xhQ3r69YN//cvLKjiXxDynn0SWL7fh9A0bwvTpAQP+Bx/A2WfbtNzs7ABJf+dcIvOefpL45hvrqJeWWg9/330DnJSXB6efDh07wttvw267Rb2dzrnY8qCfBPLzLeCvW2fVMrt1C3BSSYktk7Vpk5VVyMiIejudc7EXKL0jIgNEZKmIrBCRm6t4/0ERmR/alonI2rD3ysLeq7zMottJq1bBCSfY13fegd69A554883wn//Ak09Cly5RbaNzLn5U29MXkTRgDHACttD5XBGZoqqLK45R1WFhx18FhIeeX1TVl1SKgpUrrYf/3XdWLfPQQwOe+OqrNjzzqqust++cSxlBevp9gBWqmquqxcBEYNAOjj8HeDESjXPbt2IFHHWU9fD//W/o2zfgicuXW6W1Qw6Bf/wjmk10zsWhIEG/LbAy7HVeaN82RKQjkAnMCNudLiI5IvKxiAzeznlDQ8fkFBQUBGx66lq0CI4+2uqhvf8+HHFEwBN/+cXG4devbzO2GjaMajudc/EnyIPcqmbpbG819SHAZFUtC9vXQVXzRWRvYIaILFDVL7e6mOo4YBzYwugB2pSy5s2zGvgNG9oone7dA55YWmozbRcssJE6HTpEtZ3OufgUpKefB7QPe90OyN/OsUOolNpR1fzQ11xgJlvn+10NfPqprWnbpAnMnl2DgL9una149a9/WW3lwKufO+eSTZCgPxfoLCKZItIQC+zbjMIRkS5Ac2BO2L7mItIo9H0GcASwuPK5rnoLF1qszsiw+VSBxuGDjcU/6iiYOtVqKg8fHtV2OufiW7XpHVUtFZErgalAGjBeVReJyEggR1UrPgDOASaqanh6pivwuIiUYx8w94aP+nHBfPWVlVZIT7dlDtu3r/4cAD7/3KqurVtnw3sCr43onEtWsnWMjr2srCzNycmJdTPixvffw5FHQlGRpXR69Ah44ltv2XDMFi3s+wMOiGo7nXOxJSLzVDWruuO89k4cKyqyzvmqVfbsNXDAX7YMTjvNJl19/LEHfOfcr7wMQ5zasAFOPtlWKazRxCuAP//ZCqe9/TbssUfU2uicSzwe9ONQeTmcdx588glMmmSVMwN7/32rpXP33R7wnXPb8KAfh0aOhNdeg4cesiKYgZWVwXXXWdXMYcOqP945l3I86MeZ7Gy480648EK4+uoanvzsszB/Prz4og31cc65Snz0ThxZtMhy99262WzbGsXt9ethv/2sl/+f//hyh86lmKCjd7ynHycKC23SbJMmVgSzxh31v//dxne+8ooHfOfcdnnQjwOlpXDOOVYqeeZMaFtlObsdyMuzoD9kCBx2WDSa6JxLEh7048CIEVYe+cknaxmzb73Vhvzcc0/E2+acSy4+OSvG3noL7rsPLr8cLrmkFheYMweee85G6wRaCd05l8r8QW4M5eVBr15WS2fOnFrk8TdvtvURN2ywimy+sLlzKcsf5Ma5ijz+5s3w0ku1HGF5992wZIktjusB3zkXgAf9GLntNvjwQ3j+eRtpWWMLF1oO/7zzvD6+cy4wz+nHwNSpFq8vvRTOPbcWFygrs5ObNYMHH4x4+5xzyct7+nUsPx/OP98qZo4eXcuLPPKIFeZ54QVbVcU55wIK1NMXkQEislREVojIzVW8/6CIzA9ty0Rkbdh7F4rI8tB2YSQbn2gqCqlt2GDrkjduXIuLfPWVrX518sk2Lt8552qg2p6+iKQBY4ATsPVy54rIlPAVsFR1WNjxVxFaB1dEWgC3A1nYYurzQucWRfQuEsSYMVYE84knoGvXWlxAFf74R0hLs6UPfeatc66GgvT0+wArVDVXVYuBicCgHRx/DlsWR/8tME1VC0OBfhqQkk8dly+Hm26CE0+s5Xj80lKroDltGtx7bw3WTHTOuS2C5PTbAivDXucBh1R1oIh0BDKBGTs4t6ZFBhJeWRlcdJGta/LEE7XooK9da6mcqVOt9Obll0ejmc65FBAk6FcVorY3o2sIMFlVy2pyrogMBYYCdOjQIUCTEsuDD1rhy2efrUVdnWXLYOBAyM2FcePgssui0kbnXGoIkt7JA8JzCe2A/O0cO4QtqZ3A56rqOFXNUtWsVq1aBWhS4liyxGrrDBpkD3Fr5N//hkMOgTVr4L33POA753ZakKA/F+gsIpki0hAL7FMqHyQiXYDmwJyw3VOB/iLSXESaA/1D+1JCaakthtKkCTz+eA3TOv/6lz0AaN8e5s6Fo4+OWjudc6mj2vSOqpaKyJVYsE4DxqvqIhEZCeSoasUHwDnARA0r5qOqhSJyF/bBATBSVQsjewvx629/s3g9cWINl6udNQv+8Ac45hiYMsU+NZxzLgK84FqUrFlj+ftTT7XFzQNbvtyWz2rd2qqw7b571NronEseQQuueRmGKJkwwYqp3XZbDU4qKoJTTrE80JtvesB3zkWcl2GIAlUbaHPYYXDAAQFPKimBM8+0GbfTp8M++0S1jc651ORBPwpmz4alS+GZZwKeoAp/+hPMmGF/Ihx1VDSb55xLYZ7eiYLHH7fMzNlnBzzh0UdtrcThw+GCC6LaNudcavOgH2GrV8Mrr1glzV12CXDCDz/YGrcnnggjR0a9fc651OZBP8ImTIDiYquLFshtt8GmTVZnuZ7/73DORZdHmQiqeIB7xBHQvXuAE/6ELAKeAAAR3ElEQVT7X3jqKbjySujcOertc845D/oRNHOmlcoZOjTAwapw/fW2+tVf/hLtpjnnHOCjdyJq3Dho3hzOOivAwW+/bfV0Ro+GFi2i3jbnnAPv6UdMQYE9wL3gggAPcEtK4IYbbEX0//u/Ommfc86B9/Qj5plnLJYHSu2MGwf/+x+8/jo0aBDtpjnn3K886O+kdetgwQIbm3/kkdCtWzUnrF0Lt98Oxx5rhXmcc64OedCvofXr4eGHIScHvvjC1jYBK5dz//0BLjBqFBQWwgMP+Bq3zrk650G/BsrK4Nxz4Y03LB1/8MFw8cVw4IHQuze0a1fNBT77DB56yMom9+pVJ212zrlwHvRrYMQIC/iPPGJD62ukuNg+IVq1gr//PSrtc8656njQD+hf/4J777WZtldcUYsL3HefTcZ67TUb1+mcczEQaMimiAwQkaUiskJEbt7OMWeLyGIRWSQiL4TtLxOR+aFtm2UWE8Enn8Cll0LfvtbLr3EqfuFCuOsuGDLEFst1zrkYqbanLyJpwBjgBGyh87kiMkVVF4cd0xm4BThCVYtEpHXYJX5R1YRNYOflweDBtgrW5Mm1GGFZWmo5/GbN7Amwc87FUJD0Th9gharmAojIRGAQsDjsmMuAMapaBKCqP0a6obGwcaN1zDdssMmzLVvW4iIPPrhlodxWrSLeRuecq4kg6Z22wMqw13mhfeH2A/YTkY9E5GMRGRD2XrqI5IT2D67qB4jI0NAxOQUFBTW6gWgaMQI+/xxefDFgAbXKli61ujqDB9eguL5zzkVPkJ5+VRnsyqup1wc6A32BdsAHItJDVdcCHVQ1X0T2BmaIyAJV/XKri6mOA8aBLYxew3uIimXLLH9/6aVw8sm1uEB5uZ28yy7w2GM+Jt85FxeC9PTzgPZhr9sB+VUc87qqlqjqV8BS7EMAVc0Pfc0FZgK9d7LNdeLGGy1e33VXLS/w9NPw4Yc2Y6tNm4i2zTnnaitI0J8LdBaRTBFpCAwBKo/CeQ04FkBEMrB0T66INBeRRmH7j2DrZwFxafp0mDLFVi/cY49aXGD1avjzn60uw0UXRbp5zjlXa9Wmd1S1VESuBKYCacB4VV0kIiOBHFWdEnqvv4gsBsqAG1V1jYgcDjwuIuXYB8y94aN+4lFZGQwbBp06wTXX1PIiN98MP/8MY8f6aljOubgSaHKWqr4NvF1p321h3ytwXWgLP+Y/wAE738y689RTVkBt0iRIT6/FBT76yC5y443Qo0fE2+eccztDLF7Hj6ysLM3JyYnJz/7pJ1u1cP/9YdasWjx7LSmxgjxr18LixdCkSVTa6ZxzlYnIPFXNqu44L8MQ5u67LR3/4IO1HGzzyCP2Z8Krr3rAd87FJU84h+TmWgHMCy6wznqN5eVZnfyTT7Zx+c45F4c86AMffwwnngj161tvv1aGDbOnwLUqzuOcc3UjpYP+5s1w661wxBGwaZOtVb7XXrW40PTpVphn+HDIzIx4O51zLlJSNqc/f76lchYsgEsusYWsmjatxYXKyuD6622M5/XXR7qZzjkXUSkZ9J94Av70J8jIgDffrGWZhQoTJti6iS+9VMsxns45V3dSbshmaSnsuSd07WrrmdSqcmaF9ett3cROnWx8vufynXMx4kM2t+PDD2HNGpttu1MBH2zZw++/tyGaHvCdcwkg5R7kZmdDo0YwYED1x+5QXp4F/d/9Dg49NCJtc865aEupoK9qKZ3+/SMwd2rECCuffO+9EWmbc87VhZQK+p99Bt9+C6edFoELTZhgOaJOnSLRNOecqxMpFfSzs63o5amn7sRFVG1oZkaGDfJ3zrkEklIPcrOz4eijLV7X2tSpMHMmjBlji50751wCSZme/rJlVvhyp1M7Dzxg03YvvTQi7XLOuboUKOiLyAARWSoiK0Tk5u0cc7aILBaRRSLyQtj+C0VkeWi7MFINr6nsbPu6U7XQFi2CadPgyiuhYcOItMs55+pStekdEUkDxgAnYGvhzhWRKeErYIlIZ+AW4AhVLRKR1qH9LYDbgSxsMfV5oXOLIn8rO/bqq1Y9s0OHnbjI6NE263bo0Ii1yznn6lKQnn4fYIWq5qpqMTARGFTpmMuAMRXBXFV/DO3/LTBNVQtD700DdnaEfI199x18+ulOpnZWr4bnnrOCPTs9q8s552IjSNBvC6wMe50X2hduP2A/EflIRD4WkQE1ODfqXnvNvu5U0B83zkpx1nrhXOeci70go3eqqi9QuWBPfaAz0BdoB3wgIj0CnouIDAWGAnTYqfxL1bKzrURO1661vEBxsY3W6d8funWLaNucc64uBenp5wHtw163A/KrOOZ1VS1R1a+ApdiHQJBzUdVxqpqlqlmtWrWqSfurVVhoIyxPO20nyuNMngz5+XDttZFsmnPO1bkgQX8u0FlEMkWkITAEmFLpmNeAYwFEJANL9+QCU4H+ItJcRJoD/UP76sybb1rJ+1qndlRt0dwuXeC3v41o25xzrq5Vm95R1VIRuRIL1mnAeFVdJCIjgRxVncKW4L4YKANuVNU1ACJyF/bBATBSVQujcSPbk51tw+p/85taXmDOHMjJgcces+m8zjmXwJK6nv5338E++9gIy4cfruVFzj7bxubn5cGuu0akXc45F2lB6+knddf1nnsstXPddbW8wDffwCuv2KeGB3znXBJI2qC/cqUti3jxxTtRCPOWWyylc8UVkWyac87FTNIWXLv7bnsGO3x4LS/w4ou2jRq1k9N4nXMufiRlT/+bb+Cpp+CSS6Bjx1pcYOVKWzn9sMPgppsi3j7nnIuVpAz6d99tY/JrVe6+vNxyQiUl8OyzUD9p/xhyzqWgpItoX38N48fbs9f27as9fFuPPALTp1vZhX33jXTznHMuppKupz9qFKSl2TPYGlu0yNI5p5zi9fKdc0kpqYJ+bi4884z18tu1q+HJxcVw3nnQtCk8+eRO1Gxwzrn4lVTpnVGjoEEDuLnKZV6qcd99MH++leTcY4+It8055+JB0vT0c3Ptuesf/2hlF2pk82bL5Z96KgyqvFSAc84lj6Tp6XfoAE8/DccfX4uTJ02CggK46qqIt8s55+JJ0gT9+vXh/PNrefKYMVZwv1+/iLbJOefiTdKkd2rts8/g449tMpZX0XTOJTmPcmPGQOPGcOGFsW6Jc85FXWoH/cJCeOEFG6q5++6xbo1zzkVdagf9p5+2xc69iqZzLkUECvoiMkBElorIChHZZhS8iFwkIgUiMj+0XRr2XlnY/srLLMZOebmthnXUUdCzZ6xb45xzdaLa0TsikgaMAU7AFjqfKyJTVHVxpUNfUtUrq7jEL6raa+ebGmHvvmuD++++O9Ytcc65OhOkp98HWKGquapaDEwEEn8G05gxsOeeO7FiunPOJZ4gQb8tsDLsdV5oX2VniMh/RWSyiITXt0wXkRwR+VhEBlf1A0RkaOiYnIKCguCtr63cXHjnHSvS07Bh9H+ec87FiSBBv6rKY5VXU38D6KSqPYH3gAlh73UILdZ7LvCQiOyzzcVUx6lqlqpmtWrVKmDTd8LYsTYmf+jQ6P8s55yLI0GCfh4Q3nNvB+SHH6Cqa1R1c+jlE8DBYe/lh77mAjOB3jvR3p2Xn28PcM86C9pW9QeLc84lryBBfy7QWUQyRaQhMATYahSOiLQJezkQWBLa31xEGoW+zwCOACo/AK5bf/mLrYo1alRMm+Gcc7FQ7egdVS0VkSuBqUAaMF5VF4nISCBHVacAV4vIQKAUKAQuCp3eFXhcRMqxD5h7qxj1U3fmz7ex+dddB/tsk2VyzrmkJ6qV0/OxlZWVpTk5OZG/sKqV4PziC1ixwmfgOueSiojMCz0/3aGkqbJZrTffhBkzrG6+B3znXIpKjTIMJSVw443QpYutsuKccykqNXr6jz8OS5fClCm2nqJzzqWo5O/pFxXBHXfAccfBKafEujXOORdTyR/0//pXK6F8//0gVc0zc8651JHcQf+77+zB7UUXQa/4q/nmnHN1LbmD/t//DmVlNiHLOedcEgf9H36wB7jnnw+ZmbFujXPOxYXkDfr33w/FxXDrrbFuiXPOxY3kDPqrV1slzXPOgc6dY90a55yLG8kZ9B98EDZuhOHDY90S55yLK8kX9AsLbcTOmWdC166xbo1zzsWV5Av6Dz8M69bBiBGxbolzzsWd5Ar6P/8Mo0fD4MHQs2esW+Occ3EnuYL+o4/C2rXey3fOue0IFPRFZICILBWRFSJycxXvXyQiBSIyP7RdGvbehSKyPLRdGMnGb2X9enjgATjpJDj44OqPd865FFRtlU0RSQPGACdg6+XOFZEpVayA9ZKqXlnp3BbA7UAWtpj6vNC5RRFpfbiff4Zjj4Xrr4/4pZ1zLlkEKa3cB1gRWtgcEZkIDCLYWre/BaapamHo3GnAAODF2jV3B/baCyZNivhlnXMumQRJ77QFVoa9zgvtq+wMEfmviEwWkfY1PNc551wdCBL0q6pHXHlh3TeATqraE3gPmFCDcxGRoSKSIyI5BQUFAZrknHOuNoIE/TygfdjrdkB++AGqukZVN4dePgEcHPTc0PnjVDVLVbNatWoVtO3OOedqKEjQnwt0FpFMEWkIDAGmhB8gIm3CXg4EloS+nwr0F5HmItIc6B/a55xzLgaqfZCrqqUiciUWrNOA8aq6SERGAjmqOgW4WkQGAqVAIXBR6NxCEbkL++AAGFnxUNc551zdE9VtUuwxlZWVpTk5ObFuhnPOJRQRmaeqWdUdl1wzcp1zzu2QB33nnEshcZfeEZEC4JuduEQGsDpCzYm1ZLoXSK77SaZ7Ab+feBb0XjqqarXDH+Mu6O8sEckJktdKBMl0L5Bc95NM9wJ+P/Es0vfi6R3nnEshHvSdcy6FJGPQHxfrBkRQMt0LJNf9JNO9gN9PPIvovSRdTt8559z2JWNP3znn3HYkTdCvbnWveCci40XkRxFZGLavhYhMC606Ni1UvyjuiUh7EXlfRJaIyCIRuSa0P1HvJ11EPhWRL0L3c2dof6aIfBK6n5dCtakSgoikicjnIvJm6HUi38vXIrIgtGpfTmhfQv6uAYjI7qES9f8L/Rs6LJL3kxRBP2x1rxOBbsA5ItIttq2qsWewBWbC3QxMV9XOwPTQ60RQClyvql2BQ4ErQv8/EvV+NgPHqeqBQC9ggIgcCtwHPBi6nyLgkhi2saauYUthREjsewE4VlV7hQ1tTNTfNYDRwLuquj9wIPb/KXL3o6oJvwGHAVPDXt8C3BLrdtXiPjoBC8NeLwXahL5vAyyNdRtreV+vY8ttJvz9AI2Bz4BDsAkz9UP7t/odjOcNK3E+HTgOeBNb9yIh7yXU3q+BjEr7EvJ3DWgKfEXoeWs07icpevok7wpde6jq9wChr61j3J4aE5FOQG/gExL4fkLpkPnAj8A04EtgraqWhg5JpN+5h4A/A+Wh1y1J3HsBW5jp3yIyT0SGhvYl6u/a3kAB8HQo/fakiOxKBO8nWYJ+oBW6XN0SkSbAK8C1qvpzrNuzM1S1TFV7Yb3kPkDXqg6r21bVnIicAvyoqvPCd1dxaNzfS5gjVPUgLL17hYgcHesG7YT6wEHAWFXtDWwgwqmpZAn6gVboSkCrKhaoCX39McbtCUxEGmAB/3lVfTW0O2Hvp4KqrgVmYs8qdheRijUpEuV37ghgoIh8DUzEUjwPkZj3AoCq5oe+/ghkYx/Kifq7lgfkqeonodeTsQ+BiN1PsgT9alf3SlBTgAtD31+I5cbjnogI8BSwRFUfCHsrUe+nlYjsHvp+F+B47OHa+8CZocMS4n5U9RZVbaeqnbB/JzNU9fck4L0AiMiuIrJbxffY6nwLSdDfNVX9AVgpIl1Cu/oBi4nk/cT6wUUEH4CcBCzDcq3DY92eWrT/ReB7oAT7tL8Ey7VOB5aHvraIdTsD3suRWHrgv8D80HZSAt9PT+Dz0P0sBG4L7d8b+BRYAUwCGsW6rTW8r77Am4l8L6F2fxHaFlX820/U37VQ23sBOaHft9eA5pG8H5+R65xzKSRZ0jvOOecC8KDvnHMpxIO+c86lEA/6zjmXQjzoO+dcCvGg75xzKcSDvnPOpRAP+s45l0L+H1l4n9p3EuhAAAAAAElFTkSuQmCC\n",
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    }
   ],
   "source": [
    "plt.plot(history.epoch,history.history.get('acc'),c=\"r\",label=\"acc\")\n",
    "plt.plot(history.epoch,history.history.get('val_acc'),c=\"b\",label=\"val_acc\")\n",
    "plt.legend()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "<matplotlib.legend.Legend at 0x20f7fad5d30>"
      ]
     },
     "execution_count": 13,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXcAAAD8CAYAAACMwORRAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJzt3XmcjvX+x/HX11jLlj1mNERkHZlUEpGyFNKGsuTXcipbKcppOaVU5+ikTWlfFZIiihaKSjI0si9ZB2WJlqMJM9/fH58bQ5Yxc89cc9/zfj4e12Pc93W5fa5zps9853t9vp+v894jIiLRpUDQAYiISPgpuYuIRCEldxGRKKTkLiIShZTcRUSikJK7iEgUUnIXEYlCSu4iIlFIyV1EJAoVDOofLleunI+Pjw/qnxcRiUjz5s3b5r0vf6zrAkvu8fHxJCUlBfXPi4hEJOfcusxcp2kZEZEopOQuIhKFlNxFRKJQYHPuIpI/7dmzh5SUFFJTU4MOJU8rWrQosbGxFCpUKEt/X8ldRHJVSkoKJUqUID4+Hudc0OHkSd57tm/fTkpKCtWqVcvSZ2haRkRyVWpqKmXLllViPwrnHGXLls3WbzdK7iKS65TYjy27/xtFXnKfOxeGDAFtDygickSRl9y/+w4efdS+iohkQfHixYMOIcdFXnLv2RNKlIBnngk6EhGRPCvyknuJEnDttTB2LPz8c9DRiEgE894zaNAg6tWrR/369Rk7diwAmzdvpnnz5iQkJFCvXj1mzZpFWloa11577f5rR4wYEXD0RxeZpZB9+sDTT8OLL8I99wQdjYhk1a23QnJyeD8zIQGeeCJTl06YMIHk5GQWLFjAtm3bOPPMM2nevDlvv/02bdq04e677yYtLY1du3aRnJzMxo0bWbRoEQA7d+4Mb9xhFnkjd4BateCii+C552DPnqCjEZEI9dVXX9GtWzdiYmKoWLEiLVq0YO7cuZx55pm8+uqr3H///SxcuJASJUpQvXp1Vq9eTb9+/Zg6dSolS5YMOvyjisyRO0C/ftChA3zwAVx5ZdDRiEhWZHKEnVP8EarumjdvzsyZM5kyZQo9evRg0KBB9OzZkwULFjBt2jRGjhzJuHHjeOWVV3I54syLzJE7QLt2UK2aHqyKSJY1b96csWPHkpaWxtatW5k5cyZNmjRh3bp1VKhQgRtuuIHrrruO+fPns23bNtLT07n88st58MEHmT9/ftDhH1WmRu7OubbAk0AM8JL3/tFDzo8AWoZengBU8N6XDmegfxMTA7fcAoMGwQ8/QIMGOfrPiUj06dy5M7Nnz6Zhw4Y45/jPf/5DpUqVeP311xk+fDiFChWiePHivPHGG2zcuJHevXuTnp4OwCOPPBJw9EfnjvRryf4LnIsBVgAXAinAXKCb937JEa7vBzTy3v/f0T43MTHRZ3uzjl9+gdhY6N4dXnghe58lIrli6dKlnH766UGHEREO97+Vc26e9z7xWH83M9MyTYBV3vvV3vvdwBig01Gu7wa8k4nPzb4yZeCaa+CttyzRi4gIkLnkXgXYkOF1Sui9v3HOnQJUA6ZnP7RM6tsX/vwTXn011/5JEZG8LjPJ/XDda440l9MVGO+9TzvsBzl3o3MuyTmXtHXr1szGeHQNG8J558HIkZB22H9WRCTfyUxyTwHiMryOBTYd4dquHGVKxnv/gvc+0XufWL78MTfvzrzbboM1ayzJL18evs8VEYlQmUnuc4GazrlqzrnCWAKfdOhFzrlawEnA7PCGmAmXXgpvvgnLltnqtMce0yheRPK1YyZ37/1eoC8wDVgKjPPeL3bODXXOdcxwaTdgjD9W+U02/fYb/P77IW86ZxUzS5ZA27ZWHtmsGSxdmpOhiIjkWZlaxOS9/8h7f5r3/lTv/bDQe/d57ydluOZ+7/1dORXoPq++CuXKWQ5/9llISclwslIlmDAB3nkHVq6ERo3g7rvtJ4KISD4ScStUzz8f+veH1autf1hcHDRuDA88ACtWYKP4rl1h8WK44gp4+GGoUcN+EqgPjYgcp6P1fl+7di316tXLxWgyL+KSe8OGMHy4JfKlS+Hf/4ZixSy516oFZ51lDSO3uIpW/z53LtStaz8J6tWzXjTaxUlEolzkNg4Date2Y/Bg2LQJxoyx56r9+1sBTZs2cP31iVwybTqFpk22Czt3tuma226DLl2gcOGgb0Mk3wqi4++dd97JKaecwi233ALA/fffj3OOmTNnsmPHDvbs2cNDDz1Ep05HW6v5d6mpqdx8880kJSVRsGBBHn/8cVq2bMnixYvp3bs3u3fvJj09nffee4/KlStz1VVXkZKSQlpaGvfeey9dunTJzm3/TcSN3I+kcmUYOBC+/x4WLjzQcuayy+CUeMd9czuwfspCeOkl+Osv29HplFPgwQchXDX3IpLnde3adf+mHADjxo2jd+/evP/++8yfP58ZM2Zw++23H7Fj5JGMHDkSgIULF/LOO+/Qq1cvUlNTGTVqFAMGDCA5OZmkpCRiY2OZOnUqlStXZsGCBSxatIi2bduG9R4Ba3kZxNG4cWOf0/bs8X7SJO8vvth757wvUMD+/M3X6d5Pm+Z9u3beg/dFinjfr5/327fneEwi+d2SJUuCDsHXrl3bb9y40ScnJ/umTZv63bt3+z59+vj69ev7hg0b+qJFi/rNmzd7770/8cQTj/g5a9as8XXr1vXee3/ppZf6zz//fP+5Zs2a+QULFvjRo0f7OnXq+EcffdSvWLHCe+/98uXLfXx8vB88eLCfOXPmET//cP9bAUk+Ezk2akbuh1OwoLV8nzzZ1jgNGQJJSdD0XEev0Rex+eWPrHyyRw9b4XraabYBiGrkRaLaFVdcwfjx4xk7dixdu3Zl9OjRbN26lXnz5pGcnEzFihVJTU09rs/0RxjpX3311UyaNIlixYrRpk0bpk+fzmmnnca8efOoX78+Q4YMYejQoeG4rYNEdXLP6JRT4KGHYNUq+Oc/bX6+Vi14bMrp7B75os3n1K9vbYTPOAO+/DLokEUkh3Tt2pUxY8Ywfvx4rrjiCn799VcqVKhAoUKFmDFjBuvWrTvuz2zevDmjR48GYMWKFaxfv55atWqxevVqqlevTv/+/enYsSM//PADmzZt4oQTTqB79+7ccccdOdIbPt8k932KF4dhw6xSskULm5tv0ACmb2sA06fDu+/Czp1Wc9mzpzUlE5GoUrduXX7//XeqVKnCySefzDXXXENSUhKJiYmMHj2a2rVrH/dn3nLLLaSlpVG/fn26dOnCa6+9RpEiRRg7diz16tUjISGBZcuW0bNnTxYuXEiTJk1ISEhg2LBh3JMDe0Efs597TglLP/cw+OgjGDDARvR9+lhp5YkF/rT6+GHD4MwzYeJEWyAlItmmfu6Zl9P93KNa+/ZWVXPrrTbtnpAAs5OLWRXNhAmwaJEVzy9cGHSoIiKZlu+TO9giqBEjYMYMW8TarJk9fP2r3aUwaxbs3QvnngtTpwYdqogEYOHChSQkJBx0nHXWWUGHdVQRvYgp3M4/30bxAwfCo49aLv/ggzM4Zc4cK7u5+GJ46imbvxGRLPPe49zhtorIm+rXr09yuFdbHUN2p8w1cj9EyZK2zmniRCufTEyEL3+MtRH8xRfbzk933gmhTXJF5PgULVqU7du3Zzt5RTPvPdu3b6do0aJZ/gyN3I+gY0eYMwc6dYLWreGpp4pz04T3cf37wX/+Axs3wiuvqH2ByHGKjY0lJSWFsO3GFqWKFi1KbGxslv++kvtR1KplCf6aa6z8PTk5hqefGknhKlXgnntgyxZ47z0oUSLoUEUiRqFChahWrVrQYUQ9TcscQ6lSNkXzz3/CCy9A6wsdv/e/2xrLT59uxfI//RR0mCIiB1Fyz4SYGCt5f+cdmD0bLrkEdl11LXz4ofUebtrUCuVFRPIIJffj0LWrtYj/6ivbtjW1ZTurn/z9d6uf/OGHoEMUEQGU3I9bly72HPXTT+HKK2F3wzNh5kzrUtaihQ3tRUQCpuSeBb16wahR1m3ymmtgb83TbThfrpyV1nz2WdAhikg+p+SeRf/4h61qHT8eeveG9KrxVgtfo4bVw7//ftAhikg+puSeDbfeag9a33rLdu3zFSvBF1/Yjt1XXAHjxgUdoojkU6pzz6YhQ2DbNhvFn3wy3HXXSTYh364ddO9uS15zYgstEZGj0Mg9m5yDxx6Dq6+2RP/aa8CJJ1qZZL16tonrV18FHaaI5DNK7mFQoICtabrwQrj+epgyBVv9NHUqxMVZYXwuNx0SkfxNyT1MChe2TgQJCVYi+e23QIUKNkVTsiRcdJEteBIRyQVK7mFUooSN2itXtoKZ5cuBqlUtwYOVSW7YEGiMIpI/KLmHWcWKMG2atSy4+GJ72EqtWvbmr7/am7/9FnSYIhLllNxzwKmnwgcfQEqKPU/96y+gUSMril+yxPoY7N0bdJgiEsWU3HNI06b2kHXWLLjhBvAee+L67LPw8cdWJK/NCkQkh6jOPQd162bNIu+7D047zVrAc+ON9mD1v/+1N/v3DzpMEYlCSu457J57LJffe691JujaFfj3v+HHH21Za/XqViopIhJGmZqWcc61dc4td86tcs7ddYRrrnLOLXHOLXbOvR3eMCOXc7Yna7NmcO21oRLJmBjrWdCokWX7778POkwRiTLHTO7OuRhgJNAOqAN0c87VOeSamsAQ4FzvfV3g1hyINWIVKWJ9xKpUsQesmzZhq1gnTYIyZWwuft68oMMUkSiSmZF7E2CV93619343MAbodMg1NwAjvfc7ALz3W8IbZuQrV84qaH77DS6/PFRBU7mybfZRvDi0agVffx10mCISJTKT3KsAGVfepITey+g04DTn3NfOuW+dc+qUdRj168Prr9vUTN++oWKZU0+1kpqKFW0V6/TpQYcpIlEgM8ndHea9Q2v4CgI1gfOBbsBLzrnSf/sg5250ziU555K2bt16vLFGhcsvt822X3oJnn8+9GZcnO3mVL06tG8fak4jIpJ1mUnuKUBchtexwKbDXDPRe7/He78GWI4l+4N471/w3id67xPLly+f1Zgj3tChlsP79bNBOwCVQr3g69aFzp1hwoQgQxSRCJeZ5D4XqOmcq+acKwx0BSYdcs0HQEsA51w5bJpmdTgDjSYxMTB6NFSrZnt6pKSETpQta9MyiYlWJP/tt4HGKSKR65jJ3Xu/F+gLTAOWAuO894udc0Odcx1Dl00DtjvnlgAzgEHe++05FXQ0KF0aJk6EP/+ESy+FXbtCJ0qVsl7wsbF2Qo3GRCQLnA9oCXxiYqJPSkoK5N/OSz78EDp1sjbBY8ZYXTxgPWjOPttWPs2aZaWTIpLvOefmee8Tj3WdessErEMHePRR2271wQcznKhTx7J9cjL06gXp6YHFKCKRR8k9Dxg0CHr2hH/9C959N8OJ9u1h+HDbBWTo0MDiE5HIo+SeBzhnZZHnnGOD9PnzM5wcOND6FjzwwCGZX0TkyJTc84iiRa1FQbly0LEjbN4cOuEcjBoF555rSV5b9YlIJii55yEVK1q7mR07rNQ9NTV0okgRGDvWvvboAXv2BBqniOR9Su55TEICvPkmzJkDffpk2M+jShWbu/nuOxg2LNAYRSTvU3LPgy67zPrAv/IKPPdchhNXXmkj94cesuwvInIESu551AMP2B4eAwZY25n9nn7aFjh17w5//BFYfCKStym551EFCth+Hqeeai0K9i9ULVUK3njDdnIaODDQGEUk71Jyz8NKlbIe8Kmp9oD1zz9DJ5o3t+L4F1+0J7AiIodQcs/jate2JmPz5sFNN2V4wDp0KDRsCNdfD+vXBxqjiOQ9Su4RoEMHm4N/4w14+eXQm0WKwDvv2JZO7dvDzp2BxigieYuSe4S4+27barVfP1i4MPTm6adb3/fly20XkN27A41RRPIOJfcIERNj9e+lS1tF5P5CmQsusG2dpk+HG27IMG8jIvmZknsEqVgR3n4bVq6Em2/OkMd79YL777d5mwceCDJEEckjlNwjTMuW1j3yrbfg1VcznLjvvgMNxl57LaDoRCSvUHKPQHffDa1aQd++sGhR6M19rSVbt7bpmYNWPolIfqPkHoH27cFasiRcdRX8/nvoROHCMH68bc7apQv89FOgcYpIcJTcI1SlSjb/vmKF7aWdlhY6UaqUbe7x669w9dWwd2+gcYpIMJTcI1irVtZqZsoUW7C6X/361nFsxgyboBeRfKdg0AFI9tx8MyxbBiNG2GrWG28MnejVC776Ch5+GJo2hYsvDjROEcldGrlHgf/+F9q1s/7vn3+e4cRTT1mD+B49YO3aoMITkQAouUeBggVhzBioVcs6SC5fHjpRrJg9YE1Ls5VPf/0VaJwiknuU3KNEyZIweTIUKmR94H/5JXTi1FOt7j0pCfr3DzJEEclFSu5RJD7eWgSvW2dT7unpoROdO8Odd8ILL9ghIlFPyT3KNG1qc/CTJ9vX/YYNgzZtbOXT7NmBxSciuUPJPQr17Wtz70OGWMEMYCuf3n4b4uKsg+SmTYHGKCI5S8k9CjlnjSL3LVTdujV0okwZm7f57TfL/moRLBK1lNyjVKlS8O67sH277aW9fwVr/frWcWz2bD1gFYliSu5RLCHBVrB+8omtZdrvyivtAevzz9vqJxGJOkruUe76623k/q9/waefZjgxbJhV0QwcCIMHZyitEZFooOQe5ZyzNjN161oHyf0LnGJibN7mlltg+HBrMpaaGmisIhI+mUruzrm2zrnlzrlVzrm7DnP+WufcVudccui4PvyhSlYVLw4ffmgLnDp0yLDAKSYGnnnGkvvYsbZJ6/6TIhLJjpncnXMxwEigHVAH6Oacq3OYS8d67xNCx0thjlOyKT7e9tJet86m3PfsCZ1wDu64w/oXfPedFcqvWRNkqCISBpkZuTcBVnnvV3vvdwNjgE45G5bkhGbNbIHq9OlWKHPQXtpdusBnn8GWLTaC378Dt4hEoswk9yrAhgyvU0LvHepy59wPzrnxzrm4sEQnYderlxXKjBoFI0cecvK886wOfvVquPXWQOITkfDITHJ3h3nPH/L6QyDee98A+Ax4/bAf5NyNzrkk51zS1v0rayS3PfwwdOoEAwbAxx8fcrJ5c8v+L78M778fSHwikn2ZSe4pQMaReCxw0Np17/127/2+frIvAo0P90He+xe894ne+8Ty5ctnJV4JgwIF4K23oEED60Qwa9YhFzzwAJxxhtVRqk2BSETKTHKfC9R0zlVzzhUGugKTMl7gnDs5w8uOwNLwhSg5oXhxmDYNqla1TZrmzs1wsnBh24H7zz/h2mtVAy8SgY6Z3L33e4G+wDQsaY/z3i92zg11znUMXdbfObfYObcA6A9cm1MBS/hUqGA7N5UrZw0jf/ghw8nateHxx23l09NPBxajiGSN8/7Q6fPckZiY6JOSkgL5t+Vga9bYs9Q9e2yK5rTTQie8h44dLcHPnWt9aUQkUM65ed77xGNdpxWqQrVqB/ZeveCCDNutOmcPVkuVsuL4jRuDClFEjpOSuwC2/+qnn8L//getWsGGfcWvFSpYm4JNm+Ccc2DJkkDjFJHMUXKX/Ro0sIes27dDy5YZBurNm8PMmTZvc+65GXYAEZG8SsldDnLmmdYieMsWS/D7KyETEuCbb2wk37q19TIQkTxLyV3+5qyzYOpU2LzZpmh++il0olo1+PpraNTIdnL62xJXEckrlNzlsJo2tdWrKSmW4H/+OXSiXDl7+tqhg23W+sEHgcYpIoen5C5H1KwZTJlinSRbt4adO0MnTjgBxo2DxERb5LR6dZBhishhKLnLUbVoARMn2iYfl16aYT+PIkUswTtnu4D89ddRP0dEcpeSuxxT69bw+uvw5ZfQs2eGbgTVqtmJefPg9tsDjVFEDqbkLpnSrRs89piVvA8cmKEXfMeOttnHyJG2m5OI5AkFgw5AIsfAgfaA9YknIC4uw2D94YetTPL6662SZn//AhEJikbukmnOwX//a1Psd9wBb78dOlGokG3TV6SIlUjuf/IqIkFRcpfjUqCATbO3aGGFMh9+GDoRF2dtgpcutQVP334bZJgi+Z6Suxy3okWtgiYhwTb7mDw5dKJNG2tN4Jy1mXzsMfWCFwmIkrtkSalS1qagYUNL8B99FDpx1lnw/ff2oHXQILjkEtCWiiK5Tsldsqx0aUvw9epB587WsmD/ifHjrYLm889tiL9gQaCxiuQ3Su6SLSedZK2C69SxRU6ffBI64RzccgvMmWMT9e3awfr1gcYqkp8ouUu2lSkDn31mO/N16mSD9v0SEqxJzf/+B+3bq5JGJJcouUtYlC1rCf6MM2zTpqFDMyx0qlfPWgQvX24T9Lt3BxqrSH6g5C5hs69hZI8e8K9/wdVXw59/hk5ecIFt2Td9ui12CmjvXpH8QitUJayKFrU6+Lp1YcgQ+PFH6wpcuTLWmGbtWsv81arBAw8EHa5I1NLIXcLOObjzTkvqS5ZAkybWWwyAe++F3r1t3ubxxzWCF8khSu6SYzp2tJYzBQvamqaxY7HM//zzVjt5++1w3XVqFyySA5TcJUc1aADffQeNG0PXrnDPPZAeU8hKau67D1591XoZ7N+sVUTCQcldclyFCvag9brrYNgwK5j5Y1cBm3N/7z1YtMh2dZozJ+hQRaKGkrvkisKF4cUX4cknYdIk26N1zRrgsstg9mx7Etu8OTzzDKSlBR2uSMRTcpdc4xz0729tCjZssNbvEyYA9evD3LnQsiX062ej+Fmzgg5XJKIpuUuuu/BCmD/f9vS4/HLL56knlrWVrGPHwvbtNoq/+mrYuDHocEUikpK7BKJaNesOfNttNhPTtCms+jG02fbSpVYyOWEC1KoFTz0VdLgiEUfJXQJTuLCVuk+caGubzjgjtLvTiSdaHfzSpVZJM2CA/QQQkUxTcpfAdewIyck29X7NNbYZ944d2PB+4kS7oH9/251bRDJFyV3yhKpV4csv4aGHrAS+fn0rn6RgQduftWlT6N4dZswIOlSRiJCp5O6ca+ucW+6cW+Wcu+so113hnPPOucTwhSj5RcGCcPfdVhlZvDi0bm1z8n9SzOona9SwnsLJyUGHKpLnHTO5O+digJFAO6AO0M05V+cw15UA+gNaiSLZkpho1TT9+sETT9hc/IwFZWDaNNvlqV07WL066DBF8rTMjNybAKu896u997uBMUCnw1z3IPAfIDWM8Uk+dcIJViTzySfWeqZVK+gxJJafR39mb1xwgaZoRI4iM8m9CrAhw+uU0Hv7OecaAXHe+8lhjE2ECy+ExYutJ83YsVCrw2k8e/180lxBy/jdu8PPPwcdpkiek5nk7g7z3v4+rc65AsAI4PZjfpBzNzrnkpxzSVu3bs18lJKvFSsGDz4ICxdaA7I+w+M5p8xyFt34FIwbZ7Xwzz6rtgUiGWQmuacAcRlexwIZW/iVAOoBXzjn1gJnA5MO91DVe/+C9z7Re59Yvnz5rEct+VKtWraV3+jRsGZdARq/1o9H+m5kb6MzoU8fOPtsSEoKOkyRPCEzyX0uUNM5V805VxjoCkzad9J7/6v3vpz3Pt57Hw98C3T03uu/Mgk756wrwZIlVjjzzxHlOeePT1j8n8mQkmI7g/TpEyqUF8m/jpncvfd7gb7ANGApMM57v9g5N9Q51zGnAxQ5nPLlbUZm3DhYu9Zxxj0X88iNa0i9+TYYNcqG+W+8oZ2eJN9yPqBv/sTERJ+kX6ElDLZsscH6+PFQpQoM6b6B62Z0p+h3M20LqMcesxG9SBRwzs3z3h9zLZFWqErEq1DBOhN89pl1LOj77zhqbPyCkVd/TeqS1XDWWQd6HIjkE0ruEjUuuABmzrQkHx/v6Pt2U2oUWc+T7afxv5nzrIH8lVdabaVIlFNyl6jinCX5WbPg00+hWvUC3PrRRZwSs4EHW3zGjqlzrHHN8OFBhyqSo5TcJSo5Z71pZs2y4+xzCnDflxdQ1a9jUM0P2DJ4OLz0UtBhiuQYJXeJes2aweTJsGABdOjoeHxVB2oWXMPjNy5l97sTgw5PJEcouUu+0aCBbQayeLGjacsi3O7/S4MutZk6fGHQoYmEnZK75Du1a8NH0wry4ejfSCtYlHaD69Ohxa+sXBl0ZCLho+Qu+ZJzcMnVJVm02PHvUsP4YlYMdep4+vUDtT2SaKDkLvlakZpVGTz7MlaVSuT6vaN4bmQap1bdzcP372bXrqCjE8k6JXeR00+n4qLPee7B7SyKbUfL1I+5+4HCnFb+Fx7vv4YFyZ709KCDFDk+aj8gkpH38NVXzHz0GwZPbcWc9DMBOOnEvzivZUHObxVD27Zw+ukBxyn5VmbbDyi5ixzJH3+w7skP+HLkIr7cXJMvY1rxY1o1AK66CoYOtf5kIrlJvWVEsqt4cU65uzs9Nz7Cy5+ewqr2A9hAHPfGPMyU93dTt67n+uthw4Zjf5RIblNyFzmWfctdJ00iduUMhl7xA6v3xNLvhJd58/U0atTwDBgAq1YFHajIAUruIsejRg0YM4YKX7zLiGpPs3JvNXqU/YiRIz01a9rPgHHjYPfuoAOV/E7JXSQrWrSAefOoOvIuXkrtwXpflYfO/pBVK9Lo0gViY2HQIGt5oP1CJAhK7iJZVbAg3HILrFxJ5Zs7cfd3l/LjH5X4+JYPaXZuOiNGQEKCVdbcd586DUvuUnIXya6yZeGZZyA5mZhGDWj7bEcmrGzA5jFf8txzULkyPPQQ1Ktnx7BhsGZN0EFLtFNyFwmX+vVtp5AJE2DXLspfeT43vdKE6b1eZ9PqVJ55Bk46Ce65B6pXh3PPhWefhW3bgg5copGSu0g4OQedO8OSJfD00/D773DttVRKjKXPhruY9eZa1q6Fhx+GX3+1vV9PPhnatIGRI2H9+qBvQKKFFjGJ5CTvYcYMy9wTJ0J6OrRrB//4B75de35YUpC334b332d/V8qEBNvy9ZJLoHFjKKAhmGSgFaoieU1KCjz/PLz8MmzebCU1111nR1wcy5fDpEl2fP21/VwoX95G9W3bwkUX2WvJ35TcRfKqPXtsa6jnn4dPPrGpnB494LHHoFw5wNoOT5sGU6fa123b7LKysbqFAAAMy0lEQVTEROjQwY6GDe09yV+U3EUiwZo1Vmnz1FNQqhSMGAHdux+UtdPTYf58+PhjmDIFvvvORvVxcTZ106EDnH8+FCsW3G1I7lFyF4kkCxfCP/4Bs2fDBRfAqFG2GvYwfv7ZkvyHH9rAf9cuKFrU1lW1bWtHrVoa1UcrJXeRSJOeblM1d91l/Qu6d7cVUDVq2FG9umXxDFJT4csvbfpm6lRYtszer1oVzjoLzjjDjkaNNF8fLZTcRSLVpk1wxx022f7LLwfed87KZ557zibfD2PtWvtrn31mUzmrVx84FxcHTZpA06ZwzjmW9IsUydlbkfBTcheJBr/8Yu0mV62CFSvgxRdtXubOO62nwTGy844dkJxsiT4pCb791n4AABQubAm+VSu4+GIb6cfE5PwtSfYouYtEo5074bbb4LXXoE4d+3rmmcf1ET/9ZFP7s2dbyeWcOZCWZl0U2ra1RN+yJVSqlCN3INmk5C4SzT7+GG64werlb73V/ly7dpY+ascOezA7ZYp97L52CJUrH5izb9zY+uLExUGhQmG8DzluSu4i0e7XX+H2221RFNjD18sus6NRoyyVy6Sl2fTN7Nkwb55N5yxbxv4NwgsUsLVX8fF21KwJ550HZ5+t+fvcouQukl+kpMAHH1jDspkzLUPHx8Pgwbb6tXDhbH38//5nfemXLoV162zOft+RkmI190WL2kPa88+H5s2th1rZstm/Nfm7sCZ351xb4EkgBnjJe//oIedvAvoAacAfwI3e+yVH+0wld5EcsG2bFcC//LJNqMfHw/33wzXXWP/5MNuxA2bNgi++sCM5+cDmJOXL22OB008/cNSqZSN/9cvJurAld+dcDLACuBBIAeYC3TImb+dcSe/9b6E/dwRu8d63PdrnKrmL5CDvrSbynntsfqV2bUvyrVrZkDqHsuuOHfaAdskSO5Yuta87dx645oQTLMnXqgWnnnpgiic+3urzs/mLRtQLZ3I/B7jfe98m9HoIgPf+kSNc3w3o6b1vd7TPVXIXyQXe25TNvfce2AqqUCErhalc2Y5mzaBLF6hSJcdC+PlnWL7c5u+XLTvw5/XrbRZpH+cs4TdubMe+B7onnZQjoUWkcCb3K4C23vvrQ697AGd57/secl0fYCBQGGjlvV95mM+6EbgRoGrVqo3XrVuXydsRkWxJS7OSmFWrrMJm0yY71q+3TOucTZhfc409kM2lbLp3L2zceGAOf80a68Qwb57N7+9ToYKFVLr0gaNSJXtunJhov5jklxr9cCb3K4E2hyT3Jt77fke4/urQ9b2O9rkauYvkEStWwDvvwOjR1lS+cGHo1cu6VJYsGVhY27bB999bol+zxoqDdu48cKSk2MNesKmeRo3siI+3ef19R+XK0VW+GeS0TAFgh/e+1NE+V8ldJI/x3jLpq69a47K4OHj9detIlgelpdkvHfPmWflmUhL88AP88cfB1zlno/y4uIOPChWgTJkDx0kn2Q+UpUsPTB8tW2bn/u//oFOnvFHuGc7kXhB7oHoBsBF7oHq1935xhmtq7puGcc51AP51rH9cyV0kD/vmG+jZ05rTDBxoO3wf0rQsL/IefvvNRvUpKbBhgx0Z/7xhw4ER/9HExtpD35UrbfaqXDn7heaGG+z9oIS7FLI98ARWCvmK936Yc24okOS9n+ScexJoDewBdgB9Myb/w1FyF8nj/vjDauWfew7q1oXHH7d6xpNPzpGyytzivU3rbN9urXv2ff3lFxu9165tybtECbs+LQ0+/RReesl2Sty710o8y5Sxa/YdpUvbFFCVKgeOypXDP9rXIiYRCY+pU21eYvNmex0TY1mralXLYPvmNPYdJ58MrVvnjTmMMPv5Z5up+vpr2/t83/Hbb1YGmpr6979zwgm2D8u+o3Rp6N8f2rfPWgyZTe6R++NXRHJH27Y2+fzNNzY/sWGDfV2/3p547thhR8aaxvLlbf7ipptsgjtKVKxov8wczr7fCDZuPHBs2nTgAfCvv9rxyy+H/yEQbhq5i0j2eW/TODt2WD39qFG2UrZAAXsSefPNNt9RvLgdETytEzSN3EUk9zh3YPK5alVo184K10eNsh70EyYcfH2RInZtixY25XPRRUr4YaaRu4jkrD//tFYIW7fa6H7fsW0bTJpkXytXhmuvhd69j7h3rBg9UBWRvG/3bpg8GV55xZrJp6fbk8ZhwyAhIejo8qTMJnf1ZhOR4BQubO0OJk+2B7RDh1oz+UaNoFs3KzKXLNHIXUTylp07rfXBiBHw11/Wk/7yy20l0po1BxrRFC0KDz5oO4XkI5qWEZHI9tNPNj3z/POwZ4+9l3ErqJUrrfa+d2945BGrU8wHNC0jIpGtUiV4+mnrZPn55/Djj1Ygvm4dfPmlNZYZPBjeesuWlD71lC0fFUAjdxGJdMuWwYAB1tL41FOtRUK5cgeOSpWgTRv7GgVU5y4i+UPt2tYiYeJEq6vfuNE2fd269cBS0AIFrJa+Rw+49FLrCRDlNHIXkejkPezaZZ0tx4yx6Zv1622FbOfO1hPn0O5hjRvDnXfm6TJMPVAVEckoPd12837zTRg/3l6XLWtJvmxZWzH76afWCaxdOxgyBM47L+io/0bJXUTkeO3cCc8+a2WY27bZ/rI9e0K1atZWoWrVwPvaK7mLiGTVrl3w8stWb79+/cHnKlSwIybGjgIF7ChTxjphduqUoxu6KrmLiGRXWtqB9sb7jnXrbFSfnm7n931dvtwWV9WoAbffbts2FSsW9pCU3EVEclNamnW/HD4c5s61nvY33QRnnHFg49by5a2DZjZoEZOISG6KiYErr4Q5c+CLL6BJE2uP0LkzJCbaCtpixWxkP2ZMjoejOncRkXByzvrUt2hh0zdr1x68O3dKio3gc5iSu4hITtm3SjbxmLMoYadpGRGRKKTkLiIShZTcRUSikJK7iEgUUnIXEYlCSu4iIlFIyV1EJAopuYuIRKHAess457YC67L418sB28IYTtCi6X6i6V5A95OXRdO9QObv5xTv/TGXuAaW3LPDOZeUmcY5kSKa7iea7gV0P3lZNN0LhP9+NC0jIhKFlNxFRKJQpCb3F4IOIMyi6X6i6V5A95OXRdO9QJjvJyLn3EVE5OgideQuIiJHEXHJ3TnX1jm33Dm3yjl3V9DxHC/n3CvOuS3OuUUZ3ivjnPvUObcy9PWkIGPMLOdcnHNuhnNuqXNusXNuQOj9SL2fos6575xzC0L380Do/WrOuTmh+xnrnCscdKyZ5ZyLcc5975ybHHodyfey1jm30DmX7JxLCr0Xqd9rpZ1z451zy0L//ZwT7nuJqOTunIsBRgLtgDpAN+dcnWCjOm6vAW0Pee8u4HPvfU3g89DrSLAXuN17fzpwNtAn9P9HpN7PX0Ar731DIAFo65w7G/g3MCJ0PzuA6wKM8XgNAJZmeB3J9wLQ0nufkKFkMFK/154EpnrvawMNsf+Pwnsv3vuIOYBzgGkZXg8BhgQdVxbuIx5YlOH1cuDk0J9PBpYHHWMW72sicGE03A9wAjAfOAtbWFIw9P5B34N5+QBiQ0miFTAZcJF6L6F41wLlDnkv4r7XgJLAGkLPPHPqXiJq5A5UATZkeJ0Sei/SVfTebwYIfa0QcDzHzTkXDzQC5hDB9xOaxkgGtgCfAj8CO733e0OXRNL33BPAYCA99LoskXsvAB74xDk3zzl3Y+i9SPxeqw5sBV4NTZm95Jw7kTDfS6Qld3eY91TuEzDnXHHgPeBW7/1vQceTHd77NO99AjbqbQKcfrjLcjeq4+ecuwTY4r2fl/Htw1ya5+8lg3O992dg07J9nHPNgw4oiwoCZwDPee8bAf8jB6aTIi25pwBxGV7HApsCiiWcfnbOnQwQ+rol4HgyzTlXCEvso733E0JvR+z97OO93wl8gT1LKO2c27eZfKR8z50LdHTOrQXGYFMzTxCZ9wKA935T6OsW4H3sh28kfq+lACne+zmh1+OxZB/We4m05D4XqBl64l8Y6ApMCjimcJgE9Ar9uRc2d53nOecc8DKw1Hv/eIZTkXo/5Z1zpUN/Lga0xh50zQCuCF0WEffjvR/ivY/13sdj/51M995fQwTeC4Bz7kTnXIl9fwYuAhYRgd9r3vufgA3OuVqhty4AlhDuewn64UIWHka0B1Zgc6F3Bx1PFuJ/B9gM7MF+gl+HzYV+DqwMfS0TdJyZvJdm2K/1PwDJoaN9BN9PA+D70P0sAu4LvV8d+A5YBbwLFAk61uO8r/OByZF8L6G4F4SOxfv+24/g77UEICn0vfYBcFK470UrVEVEolCkTcuIiEgmKLmLiEQhJXcRkSik5C4iEoWU3EVEopCSu4hIFFJyFxGJQkruIiJR6P8BL93X0abeyOIAAAAASUVORK5CYII=\n",
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    }
   ],
   "source": [
    "plt.plot(history.epoch,history.history.get('loss'),c=\"r\",label=\"loss\")\n",
    "plt.plot(history.epoch,history.history.get('val_loss'),c=\"b\",label=\"val_loss\")\n",
    "plt.legend()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "25000/25000 [==============================] - 2s 66us/step\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "[0.2860933108711243, 0.88392]"
      ]
     },
     "execution_count": 14,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "model.evaluate(x_test,y_test)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "查找字典中词汇对应数值"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "array([[0.6524368]], dtype=float32)"
      ]
     },
     "execution_count": 15,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "x_try = np.zeros((1,300))\n",
    "a=np.empty(300); \n",
    "a.fill(37)\n",
    "x_try[0] = a\n",
    "model.predict(x_try)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.8"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
