{
 "metadata": {
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.7.9"
  },
  "orig_nbformat": 2,
  "kernelspec": {
   "name": "python3",
   "display_name": "Python 3.7.9 64-bit ('keras': conda)"
  },
  "interpreter": {
   "hash": "c48318aa3a00788a0c5588bddd5ec4d2d9555ba3ec179d00474fe581f353d319"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2,
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [
    {
     "output_type": "stream",
     "name": "stderr",
     "text": [
      "D:\\anaconda3\\envs\\keras\\lib\\site-packages\\tensorflow\\python\\framework\\dtypes.py:526: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n  _np_qint8 = np.dtype([(\"qint8\", np.int8, 1)])\nD:\\anaconda3\\envs\\keras\\lib\\site-packages\\tensorflow\\python\\framework\\dtypes.py:527: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n  _np_quint8 = np.dtype([(\"quint8\", np.uint8, 1)])\nD:\\anaconda3\\envs\\keras\\lib\\site-packages\\tensorflow\\python\\framework\\dtypes.py:528: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n  _np_qint16 = np.dtype([(\"qint16\", np.int16, 1)])\nD:\\anaconda3\\envs\\keras\\lib\\site-packages\\tensorflow\\python\\framework\\dtypes.py:529: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n  _np_quint16 = np.dtype([(\"quint16\", np.uint16, 1)])\nD:\\anaconda3\\envs\\keras\\lib\\site-packages\\tensorflow\\python\\framework\\dtypes.py:530: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n  _np_qint32 = np.dtype([(\"qint32\", np.int32, 1)])\nD:\\anaconda3\\envs\\keras\\lib\\site-packages\\tensorflow\\python\\framework\\dtypes.py:535: FutureWarning: Passing (type, 1) or '1type' as a synonym of type is deprecated; in a future version of numpy, it will be understood as (type, (1,)) / '(1,)type'.\n  np_resource = np.dtype([(\"resource\", np.ubyte, 1)])\n"
     ]
    }
   ],
   "source": [
    "import tensorflow as tf\n",
    "import cv2\n",
    "from PIL import Image\n",
    "from tensorflow import keras\n",
    "import numpy as np\n",
    "import argparse\n",
    "import math\n",
    "import os"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [
    {
     "output_type": "stream",
     "name": "stdout",
     "text": [
      "True\n"
     ]
    }
   ],
   "source": [
    "print(tf.test.is_gpu_available())"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {},
   "outputs": [],
   "source": [
    "# 数据读取 高分辨率图片\n",
    "data_y_256 = []\n",
    "data_x_128 = []\n",
    "path = '../256train_dir'\n",
    "for fileName in os.listdir(path):\n",
    "    img = cv2.imread(path + '/' + fileName)\n",
    "    data_y_256.append(img)\n",
    "    data_x_128.append(cv2.resize(img,[128,128]))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [],
   "source": [
    "def generator_model():\n",
    "    model = keras.models.Sequential()\n",
    "    model.add(keras.layers.Dense(input_dim=100, units=1024,activation='tanh'))\n",
    "    model.add(keras.layers.Dense(128*7*7,activation='tanh'))\n",
    "    model.add(keras.layers.Reshape((7, 7, 128), input_shape=(128*7*7,)))\n",
    "    model.add(tf.keras.layers.UpSampling2D(size=(2, 2)))\n",
    "    model.add(tf.keras.layers.Conv2D(64, (5, 5), padding='same',activation='tanh'))\n",
    "    model.add(tf.keras.layers.UpSampling2D(size=(2, 2)))\n",
    "    model.add(tf.keras.layers.Conv2D(1, (5, 5), padding='same',activation='tanh'))\n",
    "\n",
    "    # plot_model(model, show_shapes=True, to_file='wgan/keras-wgan-generator_model.png')\n",
    "    model.summary()\n",
    "    return model"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "metadata": {},
   "outputs": [],
   "source": [
    "def discriminator_model():\n",
    "    model = keras.models.Sequential()\n",
    "    model.add(tf.keras.layers.Conv2D(input_shape=[28, 28, 1],filters=64,strides=2,kernel_size=3,padding='same'))\n",
    "    model.add(tf.keras.layers.LeakyReLU(alpha=0.2))\n",
    "    #model.add(BatchNormalization())\n",
    "\n",
    "    model.add(tf.keras.layers.Conv2D(filters=32,strides=2,kernel_size=3,padding='same'))\n",
    "    model.add(tf.keras.layers.LeakyReLU(alpha=0.2))\n",
    "    #model.add(BatchNormalization())\n",
    "\n",
    "    model.add(tf.keras.layers.Conv2D(filters=16,strides=2,kernel_size=3,padding='same'))\n",
    "    model.add(tf.keras.layers.LeakyReLU(alpha=0.2))\n",
    "    #model.add(BatchNormalization())\n",
    "\n",
    "    model.add(tf.keras.layers.Conv2D(filters=1,kernel_size=3,padding='same'))\n",
    "\n",
    "    model.add(tf.keras.layers.GlobalAveragePooling2D())\n",
    "\n",
    "    # plot_model(model, show_shapes=True, to_file='wgan/keras-wgan-discriminator_model.png')\n",
    "    model.summary()\n",
    "    return model"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {},
   "outputs": [],
   "source": [
    "def generator_containing_discriminator(g, d):\n",
    "    model = keras.models.Sequential()\n",
    "    model.add(g)\n",
    "    d.trainable = False\n",
    "    model.add(d)\n",
    "    # plot_model(model, show_shapes=True, to_file='gan/keras-gan-gan_model.png')\n",
    "    return model"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 23,
   "metadata": {},
   "outputs": [],
   "source": [
    "#定义w距离为损失函数\n",
    "def wasserstein(y_true, y_pred):\n",
    "    return keras.backend.mean(y_true * y_pred)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 27,
   "metadata": {
    "tags": [
     "outputPrepend"
    ]
   },
   "outputs": [
    {
     "output_type": "stream",
     "name": "stdout",
     "text": [
      "00 batch 124 g_loss : -0.326947\n",
      "epoch 40/100 batch 125 d_loss : 0.000085\n",
      "epoch 40/100 batch 125 g_loss : -0.281952\n",
      "epoch 40/100 batch 126 d_loss : -0.000473\n",
      "epoch 40/100 batch 126 g_loss : -0.244175\n",
      "epoch 40/100 batch 127 d_loss : -0.001841\n",
      "epoch 40/100 batch 127 g_loss : -0.301928\n",
      "epoch 40/100 batch 128 d_loss : -0.003975\n",
      "epoch 40/100 batch 128 g_loss : -0.318811\n",
      "epoch 40/100 batch 129 d_loss : -0.000276\n",
      "epoch 40/100 batch 129 g_loss : -0.298224\n",
      "epoch 40/100 batch 130 d_loss : -0.002579\n",
      "epoch 40/100 batch 130 g_loss : -0.323940\n",
      "epoch 40/100 batch 131 d_loss : -0.001817\n",
      "epoch 40/100 batch 131 g_loss : -0.334603\n",
      "epoch 40/100 batch 132 d_loss : -0.001694\n",
      "epoch 40/100 batch 132 g_loss : -0.303155\n",
      "epoch 40/100 batch 133 d_loss : -0.003316\n",
      "epoch 40/100 batch 133 g_loss : -0.316749\n",
      "epoch 40/100 batch 134 d_loss : -0.002713\n",
      "epoch 40/100 batch 134 g_loss : -0.293121\n",
      "epoch 40/100 batch 135 d_loss : -0.001261\n",
      "epoch 40/100 batch 135 g_loss : -0.290137\n",
      "epoch 40/100 batch 136 d_loss : -0.000910\n",
      "epoch 40/100 batch 136 g_loss : -0.331261\n",
      "epoch 40/100 batch 137 d_loss : -0.002647\n",
      "epoch 40/100 batch 137 g_loss : -0.378567\n",
      "epoch 40/100 batch 138 d_loss : -0.000460\n",
      "epoch 40/100 batch 138 g_loss : -0.302252\n",
      "epoch 40/100 batch 139 d_loss : -0.003461\n",
      "epoch 40/100 batch 139 g_loss : -0.310896\n",
      "epoch 40/100 batch 140 d_loss : -0.002751\n",
      "epoch 40/100 batch 140 g_loss : -0.400540\n",
      "epoch 40/100 batch 141 d_loss : 0.003089\n",
      "epoch 40/100 batch 141 g_loss : -0.309198\n",
      "epoch 40/100 batch 142 d_loss : -0.002573\n",
      "epoch 40/100 batch 142 g_loss : -0.269456\n",
      "epoch 40/100 batch 143 d_loss : -0.001996\n",
      "epoch 40/100 batch 143 g_loss : -0.219680\n",
      "epoch 40/100 batch 144 d_loss : -0.000464\n",
      "epoch 40/100 batch 144 g_loss : -0.249109\n",
      "epoch 40/100 batch 145 d_loss : -0.001866\n",
      "epoch 40/100 batch 145 g_loss : -0.264272\n",
      "epoch 40/100 batch 146 d_loss : -0.001528\n",
      "epoch 40/100 batch 146 g_loss : -0.324452\n",
      "epoch 40/100 batch 147 d_loss : -0.002803\n",
      "epoch 40/100 batch 147 g_loss : -0.343459\n",
      "epoch 40/100 batch 148 d_loss : -0.002225\n",
      "epoch 40/100 batch 148 g_loss : -0.357682\n",
      "epoch 40/100 batch 149 d_loss : -0.001895\n",
      "epoch 40/100 batch 149 g_loss : -0.341126\n",
      "epoch 40/100 batch 150 d_loss : -0.002576\n",
      "epoch 40/100 batch 150 g_loss : -0.319864\n",
      "epoch 40/100 batch 151 d_loss : -0.002321\n",
      "epoch 40/100 batch 151 g_loss : -0.293166\n",
      "epoch 40/100 batch 152 d_loss : -0.001800\n",
      "epoch 40/100 batch 152 g_loss : -0.273713\n",
      "epoch 40/100 batch 153 d_loss : 0.000297\n",
      "epoch 40/100 batch 153 g_loss : -0.256711\n",
      "epoch 40/100 batch 154 d_loss : -0.000183\n",
      "epoch 40/100 batch 154 g_loss : -0.283342\n",
      "epoch 40/100 batch 155 d_loss : -0.000825\n",
      "epoch 40/100 batch 155 g_loss : -0.362584\n",
      "epoch 40/100 batch 156 d_loss : -0.000587\n",
      "epoch 40/100 batch 156 g_loss : -0.354849\n",
      "epoch 40/100 batch 157 d_loss : -0.002764\n",
      "epoch 40/100 batch 157 g_loss : -0.320517\n",
      "epoch 40/100 batch 158 d_loss : -0.001444\n",
      "epoch 40/100 batch 158 g_loss : -0.281450\n",
      "epoch 40/100 batch 159 d_loss : -0.002967\n",
      "epoch 40/100 batch 159 g_loss : -0.313935\n",
      "epoch 40/100 batch 160 d_loss : -0.003153\n",
      "epoch 40/100 batch 160 g_loss : -0.324987\n",
      "epoch 40/100 batch 161 d_loss : -0.001928\n",
      "epoch 40/100 batch 161 g_loss : -0.306265\n",
      "epoch 40/100 batch 162 d_loss : -0.001886\n",
      "epoch 40/100 batch 162 g_loss : -0.335190\n",
      "epoch 40/100 batch 163 d_loss : -0.001522\n",
      "epoch 40/100 batch 163 g_loss : -0.340563\n",
      "epoch 40/100 batch 164 d_loss : -0.000681\n",
      "epoch 40/100 batch 164 g_loss : -0.261625\n",
      "epoch 40/100 batch 165 d_loss : -0.001525\n",
      "epoch 40/100 batch 165 g_loss : -0.279950\n",
      "epoch 40/100 batch 166 d_loss : -0.001517\n",
      "epoch 40/100 batch 166 g_loss : -0.258984\n",
      "epoch 40/100 batch 167 d_loss : -0.001967\n",
      "epoch 40/100 batch 167 g_loss : -0.302226\n",
      "epoch 40/100 batch 168 d_loss : -0.001734\n",
      "epoch 40/100 batch 168 g_loss : -0.315079\n",
      "epoch 40/100 batch 169 d_loss : -0.002743\n",
      "epoch 40/100 batch 169 g_loss : -0.304789\n",
      "epoch 40/100 batch 170 d_loss : -0.002550\n",
      "epoch 40/100 batch 170 g_loss : -0.284086\n",
      "epoch 40/100 batch 171 d_loss : -0.001907\n",
      "epoch 40/100 batch 171 g_loss : -0.358318\n",
      "epoch 40/100 batch 172 d_loss : -0.002487\n",
      "epoch 40/100 batch 172 g_loss : -0.331627\n",
      "epoch 40/100 batch 173 d_loss : -0.002676\n",
      "epoch 40/100 batch 173 g_loss : -0.314291\n",
      "epoch 40/100 batch 174 d_loss : -0.002873\n",
      "epoch 40/100 batch 174 g_loss : -0.315660\n",
      "epoch 40/100 batch 175 d_loss : -0.002168\n",
      "epoch 40/100 batch 175 g_loss : -0.321951\n",
      "epoch 40/100 batch 176 d_loss : -0.001749\n",
      "epoch 40/100 batch 176 g_loss : -0.286942\n",
      "epoch 40/100 batch 177 d_loss : -0.002523\n",
      "epoch 40/100 batch 177 g_loss : -0.343641\n",
      "epoch 40/100 batch 178 d_loss : -0.002002\n",
      "epoch 40/100 batch 178 g_loss : -0.343507\n",
      "epoch 40/100 batch 179 d_loss : -0.001442\n",
      "epoch 40/100 batch 179 g_loss : -0.374040\n",
      "epoch 40/100 batch 180 d_loss : -0.003328\n",
      "epoch 40/100 batch 180 g_loss : -0.362135\n",
      "epoch 40/100 batch 181 d_loss : -0.002074\n",
      "epoch 40/100 batch 181 g_loss : -0.284683\n",
      "epoch 40/100 batch 182 d_loss : -0.003522\n",
      "epoch 40/100 batch 182 g_loss : -0.229298\n",
      "epoch 40/100 batch 183 d_loss : -0.001540\n",
      "epoch 40/100 batch 183 g_loss : -0.255836\n",
      "epoch 40/100 batch 184 d_loss : -0.000194\n",
      "epoch 40/100 batch 184 g_loss : -0.339322\n",
      "epoch 40/100 batch 185 d_loss : 0.000183\n",
      "epoch 40/100 batch 185 g_loss : -0.327075\n",
      "epoch 40/100 batch 186 d_loss : -0.001468\n",
      "epoch 40/100 batch 186 g_loss : -0.267384\n",
      "epoch 40/100 batch 187 d_loss : -0.001937\n",
      "epoch 40/100 batch 187 g_loss : -0.257756\n",
      "epoch 40/100 batch 188 d_loss : -0.001162\n",
      "epoch 40/100 batch 188 g_loss : -0.244086\n",
      "epoch 40/100 batch 189 d_loss : -0.000008\n",
      "epoch 40/100 batch 189 g_loss : -0.297122\n",
      "epoch 40/100 batch 190 d_loss : -0.000932\n",
      "epoch 40/100 batch 190 g_loss : -0.313940\n",
      "epoch 40/100 batch 191 d_loss : -0.001678\n",
      "epoch 40/100 batch 191 g_loss : -0.382134\n",
      "epoch 40/100 batch 192 d_loss : -0.000199\n",
      "epoch 40/100 batch 192 g_loss : -0.357204\n",
      "epoch 40/100 batch 193 d_loss : -0.000917\n",
      "epoch 40/100 batch 193 g_loss : -0.302742\n",
      "epoch 40/100 batch 194 d_loss : -0.002776\n",
      "epoch 40/100 batch 194 g_loss : -0.328874\n",
      "epoch 40/100 batch 195 d_loss : -0.002642\n",
      "epoch 40/100 batch 195 g_loss : -0.341166\n",
      "epoch 40/100 batch 196 d_loss : -0.002247\n",
      "epoch 40/100 batch 196 g_loss : -0.334782\n",
      "epoch 40/100 batch 197 d_loss : -0.002605\n",
      "epoch 40/100 batch 197 g_loss : -0.340219\n",
      "epoch 40/100 batch 198 d_loss : -0.001825\n",
      "epoch 40/100 batch 198 g_loss : -0.320605\n",
      "epoch 40/100 batch 199 d_loss : -0.002121\n",
      "epoch 40/100 batch 199 g_loss : -0.326142\n",
      "epoch 40/100 batch 200 d_loss : -0.002007\n",
      "epoch 40/100 batch 200 g_loss : -0.265349\n",
      "epoch 40/100 batch 201 d_loss : -0.003166\n",
      "epoch 40/100 batch 201 g_loss : -0.301774\n",
      "epoch 40/100 batch 202 d_loss : -0.002147\n",
      "epoch 40/100 batch 202 g_loss : -0.352381\n",
      "epoch 40/100 batch 203 d_loss : -0.002164\n",
      "epoch 40/100 batch 203 g_loss : -0.394497\n",
      "epoch 40/100 batch 204 d_loss : -0.001415\n",
      "epoch 40/100 batch 204 g_loss : -0.346444\n",
      "epoch 40/100 batch 205 d_loss : -0.003625\n",
      "epoch 40/100 batch 205 g_loss : -0.314213\n",
      "epoch 40/100 batch 206 d_loss : -0.002410\n",
      "epoch 40/100 batch 206 g_loss : -0.314295\n",
      "epoch 40/100 batch 207 d_loss : -0.002543\n",
      "epoch 40/100 batch 207 g_loss : -0.277802\n",
      "epoch 40/100 batch 208 d_loss : -0.001627\n",
      "epoch 40/100 batch 208 g_loss : -0.326673\n",
      "epoch 40/100 batch 209 d_loss : -0.003318\n",
      "epoch 40/100 batch 209 g_loss : -0.365637\n",
      "epoch 40/100 batch 210 d_loss : -0.002858\n",
      "epoch 40/100 batch 210 g_loss : -0.353195\n",
      "epoch 40/100 batch 211 d_loss : -0.001867\n",
      "epoch 40/100 batch 211 g_loss : -0.353210\n",
      "epoch 40/100 batch 212 d_loss : -0.001511\n",
      "epoch 40/100 batch 212 g_loss : -0.276784\n",
      "epoch 40/100 batch 213 d_loss : -0.001756\n",
      "epoch 40/100 batch 213 g_loss : -0.271154\n",
      "epoch 40/100 batch 214 d_loss : -0.001171\n",
      "epoch 40/100 batch 214 g_loss : -0.317273\n",
      "epoch 40/100 batch 215 d_loss : -0.002303\n",
      "epoch 40/100 batch 215 g_loss : -0.331576\n",
      "epoch 40/100 batch 216 d_loss : -0.001996\n",
      "epoch 40/100 batch 216 g_loss : -0.333795\n",
      "epoch 40/100 batch 217 d_loss : 0.000079\n",
      "epoch 40/100 batch 217 g_loss : -0.251170\n",
      "epoch 40/100 batch 218 d_loss : -0.002903\n",
      "epoch 40/100 batch 218 g_loss : -0.277253\n",
      "epoch 40/100 batch 219 d_loss : -0.000693\n",
      "epoch 40/100 batch 219 g_loss : -0.342133\n",
      "epoch 40/100 batch 220 d_loss : -0.001816\n",
      "epoch 40/100 batch 220 g_loss : -0.388832\n",
      "epoch 40/100 batch 221 d_loss : -0.000231\n",
      "epoch 40/100 batch 221 g_loss : -0.325519\n",
      "epoch 40/100 batch 222 d_loss : -0.002393\n",
      "epoch 40/100 batch 222 g_loss : -0.337046\n",
      "epoch 40/100 batch 223 d_loss : -0.001758\n",
      "epoch 40/100 batch 223 g_loss : -0.307629\n",
      "epoch 40/100 batch 224 d_loss : -0.002717\n",
      "epoch 40/100 batch 224 g_loss : -0.267708\n",
      "epoch 40/100 batch 225 d_loss : -0.002127\n",
      "epoch 40/100 batch 225 g_loss : -0.333864\n",
      "epoch 40/100 batch 226 d_loss : -0.002735\n",
      "epoch 40/100 batch 226 g_loss : -0.360067\n",
      "epoch 40/100 batch 227 d_loss : -0.003173\n",
      "epoch 40/100 batch 227 g_loss : -0.335170\n",
      "epoch 40/100 batch 228 d_loss : -0.001863\n",
      "epoch 40/100 batch 228 g_loss : -0.330307\n",
      "epoch 40/100 batch 229 d_loss : -0.004221\n",
      "epoch 40/100 batch 229 g_loss : -0.339426\n",
      "epoch 40/100 batch 230 d_loss : -0.003860\n",
      "epoch 40/100 batch 230 g_loss : -0.298987\n",
      "epoch 40/100 batch 231 d_loss : -0.003084\n",
      "epoch 40/100 batch 231 g_loss : -0.224923\n",
      "epoch 40/100 batch 232 d_loss : -0.004520\n",
      "epoch 40/100 batch 232 g_loss : -0.192397\n",
      "epoch 40/100 batch 233 d_loss : 0.000403\n",
      "epoch 40/100 batch 233 g_loss : -0.260901\n",
      "Epoch is %d/100 40\n",
      "Number of batches 234\n",
      "epoch 41/100 batch 0 d_loss : -0.001112\n",
      "epoch 41/100 batch 0 g_loss : -0.331190\n",
      "epoch 41/100 batch 1 d_loss : -0.002589\n",
      "epoch 41/100 batch 1 g_loss : -0.378760\n",
      "epoch 41/100 batch 2 d_loss : -0.002019\n",
      "epoch 41/100 batch 2 g_loss : -0.366484\n",
      "epoch 41/100 batch 3 d_loss : -0.002518\n",
      "epoch 41/100 batch 3 g_loss : -0.318458\n",
      "epoch 41/100 batch 4 d_loss : -0.000536\n",
      "epoch 41/100 batch 4 g_loss : -0.279499\n",
      "epoch 41/100 batch 5 d_loss : -0.002587\n",
      "epoch 41/100 batch 5 g_loss : -0.244321\n",
      "epoch 41/100 batch 6 d_loss : -0.001112\n",
      "epoch 41/100 batch 6 g_loss : -0.302249\n",
      "epoch 41/100 batch 7 d_loss : -0.000614\n",
      "epoch 41/100 batch 7 g_loss : -0.285898\n",
      "epoch 41/100 batch 8 d_loss : -0.002252\n",
      "epoch 41/100 batch 8 g_loss : -0.256028\n",
      "epoch 41/100 batch 9 d_loss : -0.003009\n",
      "epoch 41/100 batch 9 g_loss : -0.270058\n",
      "epoch 41/100 batch 10 d_loss : -0.002317\n",
      "epoch 41/100 batch 10 g_loss : -0.291969\n",
      "epoch 41/100 batch 11 d_loss : -0.003117\n",
      "epoch 41/100 batch 11 g_loss : -0.316631\n",
      "epoch 41/100 batch 12 d_loss : -0.003501\n",
      "epoch 41/100 batch 12 g_loss : -0.355155\n",
      "epoch 41/100 batch 13 d_loss : -0.003244\n",
      "epoch 41/100 batch 13 g_loss : -0.338514\n",
      "epoch 41/100 batch 14 d_loss : -0.000494\n",
      "epoch 41/100 batch 14 g_loss : -0.269900\n",
      "epoch 41/100 batch 15 d_loss : -0.002439\n",
      "epoch 41/100 batch 15 g_loss : -0.282283\n",
      "epoch 41/100 batch 16 d_loss : -0.002863\n",
      "epoch 41/100 batch 16 g_loss : -0.305670\n",
      "epoch 41/100 batch 17 d_loss : -0.003321\n",
      "epoch 41/100 batch 17 g_loss : -0.330340\n",
      "epoch 41/100 batch 18 d_loss : -0.000877\n",
      "epoch 41/100 batch 18 g_loss : -0.250357\n",
      "epoch 41/100 batch 19 d_loss : -0.002304\n",
      "epoch 41/100 batch 19 g_loss : -0.267816\n",
      "epoch 41/100 batch 20 d_loss : -0.002444\n",
      "epoch 41/100 batch 20 g_loss : -0.274169\n",
      "epoch 41/100 batch 21 d_loss : -0.002101\n",
      "epoch 41/100 batch 21 g_loss : -0.273691\n",
      "epoch 41/100 batch 22 d_loss : -0.001924\n",
      "epoch 41/100 batch 22 g_loss : -0.329404\n",
      "epoch 41/100 batch 23 d_loss : -0.001142\n",
      "epoch 41/100 batch 23 g_loss : -0.259603\n",
      "epoch 41/100 batch 24 d_loss : -0.002409\n",
      "epoch 41/100 batch 24 g_loss : -0.245682\n",
      "epoch 41/100 batch 25 d_loss : -0.001861\n",
      "epoch 41/100 batch 25 g_loss : -0.298878\n",
      "epoch 41/100 batch 26 d_loss : -0.000699\n",
      "epoch 41/100 batch 26 g_loss : -0.383132\n",
      "epoch 41/100 batch 27 d_loss : -0.002746\n",
      "epoch 41/100 batch 27 g_loss : -0.367528\n",
      "epoch 41/100 batch 28 d_loss : -0.000996\n",
      "epoch 41/100 batch 28 g_loss : -0.385943\n",
      "epoch 41/100 batch 29 d_loss : -0.001347\n",
      "epoch 41/100 batch 29 g_loss : -0.348460\n",
      "epoch 41/100 batch 30 d_loss : -0.001266\n",
      "epoch 41/100 batch 30 g_loss : -0.284597\n",
      "epoch 41/100 batch 31 d_loss : -0.003036\n",
      "epoch 41/100 batch 31 g_loss : -0.256799\n",
      "epoch 41/100 batch 32 d_loss : -0.002140\n",
      "epoch 41/100 batch 32 g_loss : -0.266822\n",
      "epoch 41/100 batch 33 d_loss : -0.002293\n",
      "epoch 41/100 batch 33 g_loss : -0.212899\n",
      "epoch 41/100 batch 34 d_loss : -0.000732\n",
      "epoch 41/100 batch 34 g_loss : -0.272711\n",
      "epoch 41/100 batch 35 d_loss : -0.000444\n",
      "epoch 41/100 batch 35 g_loss : -0.330273\n",
      "epoch 41/100 batch 36 d_loss : -0.001248\n",
      "epoch 41/100 batch 36 g_loss : -0.344664\n",
      "epoch 41/100 batch 37 d_loss : -0.000055\n",
      "epoch 41/100 batch 37 g_loss : -0.311770\n",
      "epoch 41/100 batch 38 d_loss : -0.002559\n",
      "epoch 41/100 batch 38 g_loss : -0.292674\n",
      "epoch 41/100 batch 39 d_loss : -0.001850\n",
      "epoch 41/100 batch 39 g_loss : -0.257547\n",
      "epoch 41/100 batch 40 d_loss : -0.002658\n",
      "epoch 41/100 batch 40 g_loss : -0.196436\n",
      "epoch 41/100 batch 41 d_loss : 0.002294\n",
      "epoch 41/100 batch 41 g_loss : -0.287257\n",
      "epoch 41/100 batch 42 d_loss : -0.003668\n",
      "epoch 41/100 batch 42 g_loss : -0.316819\n",
      "epoch 41/100 batch 43 d_loss : -0.001762\n",
      "epoch 41/100 batch 43 g_loss : -0.335556\n",
      "epoch 41/100 batch 44 d_loss : -0.002621\n",
      "epoch 41/100 batch 44 g_loss : -0.364610\n",
      "epoch 41/100 batch 45 d_loss : -0.001464\n",
      "epoch 41/100 batch 45 g_loss : -0.367559\n",
      "epoch 41/100 batch 46 d_loss : -0.001029\n",
      "epoch 41/100 batch 46 g_loss : -0.331393\n",
      "epoch 41/100 batch 47 d_loss : -0.001969\n",
      "epoch 41/100 batch 47 g_loss : -0.280770\n",
      "epoch 41/100 batch 48 d_loss : -0.002542\n",
      "epoch 41/100 batch 48 g_loss : -0.262145\n",
      "epoch 41/100 batch 49 d_loss : -0.002558\n",
      "epoch 41/100 batch 49 g_loss : -0.280050\n",
      "epoch 41/100 batch 50 d_loss : 0.000280\n",
      "epoch 41/100 batch 50 g_loss : -0.310205\n",
      "epoch 41/100 batch 51 d_loss : -0.001134\n",
      "epoch 41/100 batch 51 g_loss : -0.288705\n",
      "epoch 41/100 batch 52 d_loss : -0.003146\n",
      "epoch 41/100 batch 52 g_loss : -0.269190\n",
      "epoch 41/100 batch 53 d_loss : 0.000378\n",
      "epoch 41/100 batch 53 g_loss : -0.352563\n",
      "epoch 41/100 batch 54 d_loss : -0.002106\n",
      "epoch 41/100 batch 54 g_loss : -0.349439\n",
      "epoch 41/100 batch 55 d_loss : 0.001171\n",
      "epoch 41/100 batch 55 g_loss : -0.274851\n",
      "epoch 41/100 batch 56 d_loss : -0.001132\n",
      "epoch 41/100 batch 56 g_loss : -0.311746\n",
      "epoch 41/100 batch 57 d_loss : -0.000853\n",
      "epoch 41/100 batch 57 g_loss : -0.319749\n",
      "epoch 41/100 batch 58 d_loss : -0.002611\n",
      "epoch 41/100 batch 58 g_loss : -0.337588\n",
      "epoch 41/100 batch 59 d_loss : -0.003419\n",
      "epoch 41/100 batch 59 g_loss : -0.370546\n",
      "epoch 41/100 batch 60 d_loss : -0.003471\n",
      "epoch 41/100 batch 60 g_loss : -0.359055\n",
      "epoch 41/100 batch 61 d_loss : -0.003092\n",
      "epoch 41/100 batch 61 g_loss : -0.323548\n",
      "epoch 41/100 batch 62 d_loss : -0.000593\n",
      "epoch 41/100 batch 62 g_loss : -0.269624\n",
      "epoch 41/100 batch 63 d_loss : -0.001349\n",
      "epoch 41/100 batch 63 g_loss : -0.275489\n",
      "epoch 41/100 batch 64 d_loss : -0.001375\n",
      "epoch 41/100 batch 64 g_loss : -0.328081\n",
      "epoch 41/100 batch 65 d_loss : -0.002855\n",
      "epoch 41/100 batch 65 g_loss : -0.357312\n",
      "epoch 41/100 batch 66 d_loss : -0.001613\n",
      "epoch 41/100 batch 66 g_loss : -0.334416\n",
      "epoch 41/100 batch 67 d_loss : -0.001767\n",
      "epoch 41/100 batch 67 g_loss : -0.288942\n",
      "epoch 41/100 batch 68 d_loss : -0.001271\n",
      "epoch 41/100 batch 68 g_loss : -0.348281\n",
      "epoch 41/100 batch 69 d_loss : -0.001292\n",
      "epoch 41/100 batch 69 g_loss : -0.372649\n",
      "epoch 41/100 batch 70 d_loss : -0.001238\n",
      "epoch 41/100 batch 70 g_loss : -0.307381\n",
      "epoch 41/100 batch 71 d_loss : -0.001597\n",
      "epoch 41/100 batch 71 g_loss : -0.301305\n",
      "epoch 41/100 batch 72 d_loss : -0.002130\n",
      "epoch 41/100 batch 72 g_loss : -0.336078\n",
      "epoch 41/100 batch 73 d_loss : -0.002908\n",
      "epoch 41/100 batch 73 g_loss : -0.356956\n",
      "epoch 41/100 batch 74 d_loss : -0.003580\n",
      "epoch 41/100 batch 74 g_loss : -0.353602\n",
      "epoch 41/100 batch 75 d_loss : -0.001233\n",
      "epoch 41/100 batch 75 g_loss : -0.261158\n",
      "epoch 41/100 batch 76 d_loss : -0.001992\n",
      "epoch 41/100 batch 76 g_loss : -0.226031\n",
      "epoch 41/100 batch 77 d_loss : -0.004136\n",
      "epoch 41/100 batch 77 g_loss : -0.227924\n",
      "epoch 41/100 batch 78 d_loss : -0.001056\n",
      "epoch 41/100 batch 78 g_loss : -0.295295\n",
      "epoch 41/100 batch 79 d_loss : -0.002558\n",
      "epoch 41/100 batch 79 g_loss : -0.358149\n",
      "epoch 41/100 batch 80 d_loss : -0.000873\n",
      "epoch 41/100 batch 80 g_loss : -0.356435\n",
      "epoch 41/100 batch 81 d_loss : -0.001556\n",
      "epoch 41/100 batch 81 g_loss : -0.349104\n",
      "epoch 41/100 batch 82 d_loss : -0.003941\n",
      "epoch 41/100 batch 82 g_loss : -0.350192\n",
      "epoch 41/100 batch 83 d_loss : -0.001145\n",
      "epoch 41/100 batch 83 g_loss : -0.299676\n",
      "epoch 41/100 batch 84 d_loss : -0.004096\n",
      "epoch 41/100 batch 84 g_loss : -0.336434\n",
      "epoch 41/100 batch 85 d_loss : -0.000935\n",
      "epoch 41/100 batch 85 g_loss : -0.256268\n",
      "epoch 41/100 batch 86 d_loss : -0.004201\n",
      "epoch 41/100 batch 86 g_loss : -0.289222\n",
      "epoch 41/100 batch 87 d_loss : -0.001619\n",
      "epoch 41/100 batch 87 g_loss : -0.334153\n",
      "epoch 41/100 batch 88 d_loss : 0.000350\n",
      "epoch 41/100 batch 88 g_loss : -0.287479\n",
      "epoch 41/100 batch 89 d_loss : -0.000919\n",
      "epoch 41/100 batch 89 g_loss : -0.267814\n",
      "epoch 41/100 batch 90 d_loss : -0.001343\n",
      "epoch 41/100 batch 90 g_loss : -0.295093\n",
      "epoch 41/100 batch 91 d_loss : -0.002956\n",
      "epoch 41/100 batch 91 g_loss : -0.316835\n",
      "epoch 41/100 batch 92 d_loss : -0.002309\n",
      "epoch 41/100 batch 92 g_loss : -0.349925\n",
      "epoch 41/100 batch 93 d_loss : -0.001559\n",
      "epoch 41/100 batch 93 g_loss : -0.338933\n",
      "epoch 41/100 batch 94 d_loss : -0.001634\n",
      "epoch 41/100 batch 94 g_loss : -0.328436\n",
      "epoch 41/100 batch 95 d_loss : -0.002774\n",
      "epoch 41/100 batch 95 g_loss : -0.339662\n",
      "epoch 41/100 batch 96 d_loss : -0.001105\n",
      "epoch 41/100 batch 96 g_loss : -0.303435\n",
      "epoch 41/100 batch 97 d_loss : -0.002949\n",
      "epoch 41/100 batch 97 g_loss : -0.372458\n",
      "epoch 41/100 batch 98 d_loss : -0.002608\n",
      "epoch 41/100 batch 98 g_loss : -0.313096\n",
      "epoch 41/100 batch 99 d_loss : -0.002894\n",
      "epoch 41/100 batch 99 g_loss : -0.250246\n",
      "epoch 41/100 batch 100 d_loss : -0.000554\n",
      "epoch 41/100 batch 100 g_loss : -0.322700\n",
      "epoch 41/100 batch 101 d_loss : -0.002865\n",
      "epoch 41/100 batch 101 g_loss : -0.362285\n",
      "epoch 41/100 batch 102 d_loss : -0.002882\n",
      "epoch 41/100 batch 102 g_loss : -0.370143\n",
      "epoch 41/100 batch 103 d_loss : -0.002092\n",
      "epoch 41/100 batch 103 g_loss : -0.381169\n",
      "epoch 41/100 batch 104 d_loss : -0.001319\n",
      "epoch 41/100 batch 104 g_loss : -0.317365\n",
      "epoch 41/100 batch 105 d_loss : -0.002516\n",
      "epoch 41/100 batch 105 g_loss : -0.300873\n",
      "epoch 41/100 batch 106 d_loss : -0.002074\n",
      "epoch 41/100 batch 106 g_loss : -0.284928\n",
      "epoch 41/100 batch 107 d_loss : -0.002712\n",
      "epoch 41/100 batch 107 g_loss : -0.322320\n",
      "epoch 41/100 batch 108 d_loss : -0.001706\n",
      "epoch 41/100 batch 108 g_loss : -0.372603\n",
      "epoch 41/100 batch 109 d_loss : -0.000256\n",
      "epoch 41/100 batch 109 g_loss : -0.284510\n",
      "epoch 41/100 batch 110 d_loss : -0.002035\n",
      "epoch 41/100 batch 110 g_loss : -0.255024\n",
      "epoch 41/100 batch 111 d_loss : -0.000943\n",
      "epoch 41/100 batch 111 g_loss : -0.292626\n",
      "epoch 41/100 batch 112 d_loss : -0.001044\n",
      "epoch 41/100 batch 112 g_loss : -0.315243\n",
      "epoch 41/100 batch 113 d_loss : -0.001482\n",
      "epoch 41/100 batch 113 g_loss : -0.356512\n",
      "epoch 41/100 batch 114 d_loss : 0.000587\n",
      "epoch 41/100 batch 114 g_loss : -0.300554\n",
      "epoch 41/100 batch 115 d_loss : -0.003399\n",
      "epoch 41/100 batch 115 g_loss : -0.307603\n",
      "epoch 41/100 batch 116 d_loss : -0.000922\n",
      "epoch 41/100 batch 116 g_loss : -0.374576\n",
      "epoch 41/100 batch 117 d_loss : -0.000754\n",
      "epoch 41/100 batch 117 g_loss : -0.375398\n",
      "epoch 41/100 batch 118 d_loss : -0.002224\n",
      "epoch 41/100 batch 118 g_loss : -0.337254\n",
      "epoch 41/100 batch 119 d_loss : -0.000555\n",
      "epoch 41/100 batch 119 g_loss : -0.353095\n",
      "epoch 41/100 batch 120 d_loss : -0.002282\n",
      "epoch 41/100 batch 120 g_loss : -0.363698\n",
      "epoch 41/100 batch 121 d_loss : -0.002394\n",
      "epoch 41/100 batch 121 g_loss : -0.341576\n",
      "epoch 41/100 batch 122 d_loss : -0.003036\n",
      "epoch 41/100 batch 122 g_loss : -0.339404\n",
      "epoch 41/100 batch 123 d_loss : -0.002908\n",
      "epoch 41/100 batch 123 g_loss : -0.387476\n",
      "epoch 41/100 batch 124 d_loss : -0.003334\n",
      "epoch 41/100 batch 124 g_loss : -0.346112\n",
      "epoch 41/100 batch 125 d_loss : 0.000611\n",
      "epoch 41/100 batch 125 g_loss : -0.304574\n",
      "epoch 41/100 batch 126 d_loss : -0.000753\n",
      "epoch 41/100 batch 126 g_loss : -0.247488\n",
      "epoch 41/100 batch 127 d_loss : -0.000621\n",
      "epoch 41/100 batch 127 g_loss : -0.322275\n",
      "epoch 41/100 batch 128 d_loss : -0.003549\n",
      "epoch 41/100 batch 128 g_loss : -0.321017\n",
      "epoch 41/100 batch 129 d_loss : -0.000689\n",
      "epoch 41/100 batch 129 g_loss : -0.270705\n",
      "epoch 41/100 batch 130 d_loss : -0.002756\n",
      "epoch 41/100 batch 130 g_loss : -0.314265\n"
     ]
    },
    {
     "output_type": "error",
     "ename": "KeyboardInterrupt",
     "evalue": "",
     "traceback": [
      "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
      "\u001b[1;31mKeyboardInterrupt\u001b[0m                         Traceback (most recent call last)",
      "\u001b[1;32m<ipython-input-27-bc49066e8d3e>\u001b[0m in \u001b[0;36m<module>\u001b[1;34m()\u001b[0m\n\u001b[0;32m     36\u001b[0m         \u001b[0mnoise\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mnp\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mrandom\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0muniform\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m-\u001b[0m\u001b[1;36m1\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;36m1\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0msize\u001b[0m\u001b[1;33m=\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mBATCH_SIZE\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;36m100\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m     37\u001b[0m         \u001b[0mimage_batch\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mX_train\u001b[0m\u001b[1;33m[\u001b[0m\u001b[0mindex\u001b[0m\u001b[1;33m*\u001b[0m\u001b[0mBATCH_SIZE\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mindex\u001b[0m\u001b[1;33m+\u001b[0m\u001b[1;36m1\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m*\u001b[0m\u001b[0mBATCH_SIZE\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m---> 38\u001b[1;33m         \u001b[0mgenerated_images\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mg\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mpredict\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mnoise\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mverbose\u001b[0m\u001b[1;33m=\u001b[0m\u001b[1;36m0\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m     39\u001b[0m         \u001b[1;31m# if index % 100 == 0:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m     40\u001b[0m             \u001b[1;31m# image = combine_images(generated_images)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
      "\u001b[1;32mD:\\anaconda3\\envs\\keras\\lib\\site-packages\\tensorflow\\python\\keras\\engine\\training.py\u001b[0m in \u001b[0;36mpredict\u001b[1;34m(self, x, batch_size, verbose, steps, max_queue_size, workers, use_multiprocessing)\u001b[0m\n\u001b[0;32m   1111\u001b[0m     \u001b[1;32melse\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m   1112\u001b[0m       return training_arrays.predict_loop(\n\u001b[1;32m-> 1113\u001b[1;33m           self, x, batch_size=batch_size, verbose=verbose, steps=steps)\n\u001b[0m\u001b[0;32m   1114\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m   1115\u001b[0m   \u001b[1;32mdef\u001b[0m \u001b[0mreset_metrics\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
      "\u001b[1;32mD:\\anaconda3\\envs\\keras\\lib\\site-packages\\tensorflow\\python\\keras\\engine\\training_arrays.py\u001b[0m in \u001b[0;36mmodel_iteration\u001b[1;34m(model, inputs, targets, sample_weights, batch_size, epochs, verbose, callbacks, val_inputs, val_targets, val_sample_weights, shuffle, initial_epoch, steps_per_epoch, validation_steps, mode, validation_in_fit, **kwargs)\u001b[0m\n\u001b[0;32m    327\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m    328\u001b[0m         \u001b[1;31m# Get outputs.\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 329\u001b[1;33m         \u001b[0mbatch_outs\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mf\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mins_batch\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m    330\u001b[0m         \u001b[1;32mif\u001b[0m \u001b[1;32mnot\u001b[0m \u001b[0misinstance\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mbatch_outs\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mlist\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m    331\u001b[0m           \u001b[0mbatch_outs\u001b[0m \u001b[1;33m=\u001b[0m \u001b[1;33m[\u001b[0m\u001b[0mbatch_outs\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
      "\u001b[1;32mD:\\anaconda3\\envs\\keras\\lib\\site-packages\\tensorflow\\python\\keras\\backend.py\u001b[0m in \u001b[0;36m__call__\u001b[1;34m(self, inputs)\u001b[0m\n\u001b[0;32m   3074\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m   3075\u001b[0m     fetched = self._callable_fn(*array_vals,\n\u001b[1;32m-> 3076\u001b[1;33m                                 run_metadata=self.run_metadata)\n\u001b[0m\u001b[0;32m   3077\u001b[0m     \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_call_fetch_callbacks\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mfetched\u001b[0m\u001b[1;33m[\u001b[0m\u001b[1;33m-\u001b[0m\u001b[0mlen\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_fetches\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m   3078\u001b[0m     return nest.pack_sequence_as(self._outputs_structure,\n",
      "\u001b[1;32mD:\\anaconda3\\envs\\keras\\lib\\site-packages\\tensorflow\\python\\client\\session.py\u001b[0m in \u001b[0;36m__call__\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m   1437\u001b[0m           ret = tf_session.TF_SessionRunCallable(\n\u001b[0;32m   1438\u001b[0m               \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_session\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_session\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_handle\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0margs\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mstatus\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m-> 1439\u001b[1;33m               run_metadata_ptr)\n\u001b[0m\u001b[0;32m   1440\u001b[0m         \u001b[1;32mif\u001b[0m \u001b[0mrun_metadata\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m   1441\u001b[0m           \u001b[0mproto_data\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mtf_session\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mTF_GetBuffer\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mrun_metadata_ptr\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
      "\u001b[1;31mKeyboardInterrupt\u001b[0m: "
     ]
    }
   ],
   "source": [
    "BATCH_SIZE = 256\n",
    "\n",
    "(X_train, y_train), (X_test, y_test) = keras.datasets.mnist.load_data()\n",
    "X_train = (X_train.astype(np.float32) - 127.5)/127.5\n",
    "X_train = X_train[:, :, :, None]\n",
    "X_test = X_test[:, :, :, None]\n",
    "# X_train = X_train.reshape((X_train.shape, 1) + X_train.shape[1:])\n",
    "d = discriminator_model()\n",
    "g = generator_model()\n",
    "d_on_g = generator_containing_discriminator(g, d)\n",
    "#d_optim = SGD(lr=0.0005, momentum=0.9, nesterov=True)\n",
    "#g_optim = SGD(lr=0.0005, momentum=0.9, nesterov=True)\n",
    "d_optim = tf.keras.optimizers.RMSprop(lr=5E-5)\n",
    "g_optim = tf.keras.optimizers.RMSprop(lr=5E-5)\n",
    "\n",
    "c_lower = -0.1\n",
    "c_upper = 0.1\n",
    "\n",
    "#g的损失函数使用mse\n",
    "#g.compile(loss='binary_crossentropy', optimizer=\"SGD\")\n",
    "g.compile(loss='mse', optimizer=g_optim)\n",
    "#gan的损失函数使用wasserstein\n",
    "#d_on_g.compile(loss='binary_crossentropy', optimizer=g_optim)\n",
    "d_on_g.compile(loss=wasserstein, optimizer=g_optim)\n",
    "d.trainable = True\n",
    "#d的损失函数使用wasserstein\n",
    "d.compile(loss=wasserstein, optimizer=d_optim)\n",
    "\n",
    "for epoch in range(100):\n",
    "    print (\"Epoch is %d/100\",epoch)\n",
    "    nb_batches=int(X_train.shape[0]/BATCH_SIZE)\n",
    "    print(\"Number of batches\", nb_batches)\n",
    "\n",
    "    for index in range(nb_batches):\n",
    "\n",
    "        noise = np.random.uniform(-1, 1, size=(BATCH_SIZE, 100))\n",
    "        image_batch = X_train[index*BATCH_SIZE:(index+1)*BATCH_SIZE]\n",
    "        generated_images = g.predict(noise, verbose=0)\n",
    "        # if index % 100 == 0:\n",
    "            # image = combine_images(generated_images)\n",
    "            # image = image*127.5+127.5\n",
    "            #调试阶段不生成图片\n",
    "            # Image.fromarray(image.astype(np.uint8)).save('wgan/'+str(epoch)+\"_\"+str(index)+\".png\")\n",
    "\n",
    "        X = np.concatenate((image_batch, generated_images))\n",
    "        y = [-1] * BATCH_SIZE + [1] * BATCH_SIZE\n",
    "        d_loss = d.train_on_batch(X, y)\n",
    "\n",
    "        # 训练d之后 修正参数 wgan的精髓之一\n",
    "        for l in d.layers:\n",
    "            weights = l.get_weights()\n",
    "            #print weights\n",
    "            weights = [np.clip(w, c_lower, c_upper) for w in weights]\n",
    "            l.set_weights(weights)\n",
    "\n",
    "\n",
    "        print(\"epoch %d/100 batch %d d_loss : %f\" % (epoch+1, index, d_loss))\n",
    "        noise = np.random.uniform(-1, 1, (BATCH_SIZE, 100))\n",
    "        d.trainable = False\n",
    "        g_loss = d_on_g.train_on_batch(noise, [-1] * BATCH_SIZE)\n",
    "        d.trainable = True\n",
    "        print(\"epoch %d/100 batch %d g_loss : %f\" % (epoch+1,index, g_loss))\n",
    "        # if index % 10 == 9:\n",
    "        #     g.save_weights('wgan/wgan_generator', True)\n",
    "        #     d.save_weights('wgan/wgan_discriminator', True)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 29,
   "metadata": {},
   "outputs": [],
   "source": [
    "noise = np.random.uniform(-1, 1, size=(10, 100))\n",
    "generated_images = g.predict(noise, verbose=0)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 33,
   "metadata": {},
   "outputs": [
    {
     "output_type": "execute_result",
     "data": {
      "text/plain": [
       "(28, 28, 1)"
      ]
     },
     "metadata": {},
     "execution_count": 33
    }
   ],
   "source": [
    "generated_images[0].shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 40,
   "metadata": {},
   "outputs": [
    {
     "output_type": "execute_result",
     "data": {
      "text/plain": [
       "<PIL.Image.Image image mode=L size=28x28 at 0x20E0B648C48>"
      ],
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAABwAAAAcCAAAAABXZoBIAAAAiElEQVR4nIWSSxIFIQgD05T3v3JmMeMH5flYWTaEQIFkJEnWCIwxYv3cAn4z9Uqybq+UP6ado6aFabMWl5Yl5AaHgT+yTJkTXrYQVR0DVpXfvFdDrT+cBLEkIrHZg7dnjoUyIILDd5uZOqYq5+z6Idc7oi++oHbhNpmOmXew9W7PC4yUnhnXxT/27iQ3J7ZvLQAAAABJRU5ErkJggg==\n"
     },
     "metadata": {},
     "execution_count": 40
    }
   ],
   "source": [
    "Image.fromarray(generated_images[7].reshape([28,28]).astype(np.uint8))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ]
}