{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# Training"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Select training_env as the kernel (Kernel -> change kernel) . If it is not available, refer to the README.md section Training a model. You will need to install the anaconda environment :  training_env.\n",
    "Change the path in the Notebook.\n",
    "Moreover to check in realtime the evolution of the loss and metrics look at the README.md section detailling the process with Tensorboard. The saved logs will be at the location PATH_THIS_TRAINING+\"logs\"\n",
    "Eventually to visualize the image generated look at the notebook Visualize_training_data.ipynb\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [],
   "source": [
    "import sys"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [],
   "source": [
    "sys.path.append(\"/home/idumeur/code\")\n",
    "sys.path.append(\"/home/idumeur/code/sent2_cloud_remover/\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [],
   "source": [
    "from utils.load_dataset import load_data\n",
    "from models import clean_gan\n",
    "from train import open_yaml,saving_yaml\n",
    "\n",
    "from tensorflow.python.client import device_lib"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "PATH=\"/srv/osirim/idumeur/data/dataset6/prepro1/input_large_dataset/\" #path to the dataset which contains the tile for the training\n",
    "PATH_TRAININGS=\"/srv/osirim/idumeur/trainings/\"\n",
    "NAME_MODEL=\"new_model\" #name of the model\n",
    "TRAINING_NBER=\"9\" #Id of the training\n",
    "PATH_THIS_MODEL=PATH_TRAININGS+NAME_MODEL+\"/\"\n",
    "PATH_THIS_TRAINING=\"{}{}/training_{}/\".format(PATH_TRAININGS,NAME_MODEL,TRAINING_NBER)\n",
    "PATH_CHECKPOINT=\"{}checkpoints/\".format(PATH_THIS_TRAINING)\n",
    "PATH_SAVED_IM=\"{}saved_training_images/\".format(PATH_THIS_TRAINING)\n",
    "PATH_TRAIN_YAML=\"/home/idumeur/code/sent2_cloud_remover/GAN_confs/train.yaml\" #The based configuratiion file for the training param\n",
    "PATH_MODEL_YAML=\"/home/idumeur/code/sent2_cloud_remover/GAN_confs/model.yaml\" #The based configuratiion file for the model param"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/home/idumeur/code/sent2_cloud_remover/train.py:43: UnsafeLoaderWarning: \n",
      "The default 'Loader' for 'load(stream)' without further arguments can be unsafe.\n",
      "Use 'load(stream, Loader=ruamel.yaml.Loader)' explicitly if that is OK.\n",
      "Alternatively include the following in your code:\n",
      "\n",
      "  import warnings\n",
      "  warnings.simplefilter('ignore', ruamel.yaml.error.UnsafeLoaderWarning)\n",
      "\n",
      "In most other cases you should consider using 'safe_load(stream)'\n",
      "  return yaml.load(f)\n"
     ]
    }
   ],
   "source": [
    "train_param=open_yaml(PATH_TRAIN_YAML) #dict\n",
    "model_param=open_yaml(PATH_MODEL_YAML)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [],
   "source": [
    "model_param[\"model_name\"]=NAME_MODEL"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## Discriminator\n",
    "here are the current parameters for the discriminator convolutional layers. You can change them by setting by model_param[\"dict_discri_archi\"]= new_dictionnary. In the dictionnary, keys are the number of the conv layer, and the list corresponds respectively to [\"padding\",\"stride\",\"kernel\",\"nfilter\"]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "{1: [1, 2, 4, 64],\n",
       " 2: [1, 2, 4, 256],\n",
       " 3: [1, 2, 4, 156],\n",
       " 4: [1, 1, 4, 512],\n",
       " 5: [1, 1, 4, 1]}"
      ]
     },
     "execution_count": 8,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "model_param[\"dict_discri_archi\"]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {},
   "outputs": [],
   "source": [
    "model_param[\"d_last_activ\"] #The last activation of the discriminator\n",
    "model_param[\"real_label_smoothing\"]=[1.0,1.0]\n",
    "model_param[\"fake_label_smoothing\"]=[0.0,0.0]\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {},
   "outputs": [],
   "source": [
    "#Set Training parameters\n",
    "train_param[\"train_directory\"]=PATH+\"train/\" #path to the train data\n",
    "train_param[\"val_directory\"]=PATH+\"val/\" #path to the val data\n",
    "train_param[\"training_number\"]=TRAINING_NBER #id of the training\n",
    "train_param[\"epoch\"]=500 #max nber of epochs\n",
    "train_param[\"lambda\"]=0 #factor for the L1 loss\n",
    "train_param[\"lr\"]=0.001"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {},
   "outputs": [],
   "source": [
    "train_param[\"normalization\"]=True #id set to true normalization to the data is apllied, the norm implemented is standardization for SAR data and normalization for RGBNIR\n",
    "#train_param[\"dict_band_x\"]=None #the stats for normalization procedure is going to be compute on each of these group of bands\n",
    "#train_param[\"dict_band_label\"]=None\n",
    "#train_param[\"dict_rescale_type\"]=None\n",
    "train_param[\"training_dir\"]=PATH_TRAININGS\n",
    "train_param[\"s2_scale\"]=1/7 #for the normalization, do not change\n",
    "train_param[\"s1_scale\"]=1/5 #for the normalization, do not change\n",
    "train_param[\"s2bands\"]=[\"R,G,B\",\"NIR\"] #for the normalization, do not change"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {},
   "outputs": [],
   "source": [
    "train_param[\"lim_train_tile\"]=10\n",
    "train_param[\"lim_val_tile\"]=3"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "metadata": {},
   "outputs": [],
   "source": [
    "# Option for tracking the training \n",
    "train_param[\"im_saving_step\"]=10 #every n epochs image from the training set are saved\n",
    "train_param[\"weights_saving_step\"]=50 # every n epoch, the model is saved\n",
    "train_param[\"metric_step\"]=10 #every n epochs, the metrics will be computed and displayed with tensorboard"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Remove the negative values in order to have no error in the log : negative value will be replaced usingknn algorithm\n",
      "Important the index of the bands in lband_index should be index that follow each other\n",
      "No scaler was defined before\n",
      "Remove the negative values in order to have no error in the log : negative value will be replaced usingknn algorithm\n",
      "Important the index of the bands in lband_index should be index that follow each other\n",
      "No scaler was defined before\n",
      "No scaler was defined before\n",
      "No scaler was defined before\n",
      "Remove the negative values in order to have no error in the log : negative value will be replaced usingknn algorithm\n",
      "Important the index of the bands in lband_index should be index that follow each other\n",
      "Remove the negative values in order to have no error in the log : negative value will be replaced usingknn algorithm\n",
      "Important the index of the bands in lband_index should be index that follow each other\n",
      "Loading the data done dataX (10, 256, 256, 8) dataY (10, 256, 256, 4)\n",
      "Model: \"discriminator\"\n",
      "_________________________________________________________________\n",
      "Layer (type)                 Output Shape              Param #   \n",
      "=================================================================\n",
      "d_input (InputLayer)         [(None, 256, 256, 12)]    0         \n",
      "_________________________________________________________________\n",
      "tf_op_layer_SYMMETRIC_paddin [(None, 258, 258, 12)]    0         \n",
      "_________________________________________________________________\n",
      "d_conv1 (Conv2D)             (None, 128, 128, 64)      12352     \n",
      "_________________________________________________________________\n",
      "tf_op_layer_SYMMETRIC_paddin [(None, 130, 130, 64)]    0         \n",
      "_________________________________________________________________\n",
      "d_conv2 (Conv2D)             (None, 64, 64, 256)       262400    \n",
      "_________________________________________________________________\n",
      "d_bn2 (BatchNormalization)   (None, 64, 64, 256)       1024      \n",
      "_________________________________________________________________\n",
      "tf_op_layer_SYMMETRIC_paddin [(None, 66, 66, 256)]     0         \n",
      "_________________________________________________________________\n",
      "d_conv3 (Conv2D)             (None, 32, 32, 156)       639132    \n",
      "_________________________________________________________________\n",
      "d_bn3 (BatchNormalization)   (None, 32, 32, 156)       624       \n",
      "_________________________________________________________________\n",
      "tf_op_layer_SYMMETRIC_paddin [(None, 34, 34, 156)]     0         \n",
      "_________________________________________________________________\n",
      "d_conv4 (Conv2D)             (None, 31, 31, 512)       1278464   \n",
      "_________________________________________________________________\n",
      "d_bn4 (BatchNormalization)   (None, 31, 31, 512)       2048      \n",
      "_________________________________________________________________\n",
      "tf_op_layer_SYMMETRIC_paddin [(None, 33, 33, 512)]     0         \n",
      "_________________________________________________________________\n",
      "d_conv5 (Conv2D)             (None, 30, 30, 1)         8193      \n",
      "_________________________________________________________________\n",
      "d_bn5 (BatchNormalization)   (None, 30, 30, 1)         4         \n",
      "_________________________________________________________________\n",
      "d_last_activ (Activation)    (None, 30, 30, 1)         0         \n",
      "=================================================================\n",
      "Total params: 2,204,241\n",
      "Trainable params: 2,202,391\n",
      "Non-trainable params: 1,850\n",
      "_________________________________________________________________\n",
      "use tanh keras\n",
      "Model: \"Generator\"\n",
      "__________________________________________________________________________________________________\n",
      "Layer (type)                    Output Shape         Param #     Connected to                     \n",
      "==================================================================================================\n",
      "g_input_data (InputLayer)       [(None, 256, 256, 8) 0                                            \n",
      "__________________________________________________________________________________________________\n",
      "g_conv0 (Conv2D)                (None, 256, 256, 64) 25152       g_input_data[0][0]               \n",
      "__________________________________________________________________________________________________\n",
      "g_0_bn (BatchNormalization)     (None, 256, 256, 64) 256         g_conv0[0][0]                    \n",
      "__________________________________________________________________________________________________\n",
      "g_0_lay_relu (ReLU)             (None, 256, 256, 64) 0           g_0_bn[0][0]                     \n",
      "__________________________________________________________________________________________________\n",
      "g_conv1 (Conv2D)                (None, 256, 256, 128 73856       g_0_lay_relu[0][0]               \n",
      "__________________________________________________________________________________________________\n",
      "g_1_bn (BatchNormalization)     (None, 256, 256, 128 512         g_conv1[0][0]                    \n",
      "__________________________________________________________________________________________________\n",
      "g_1_lay_relu (ReLU)             (None, 256, 256, 128 0           g_1_bn[0][0]                     \n",
      "__________________________________________________________________________________________________\n",
      "g_conv2 (Conv2D)                (None, 256, 256, 256 295168      g_1_lay_relu[0][0]               \n",
      "__________________________________________________________________________________________________\n",
      "g_2_bn (BatchNormalization)     (None, 256, 256, 256 1024        g_conv2[0][0]                    \n",
      "__________________________________________________________________________________________________\n",
      "g_2_lay_relu (ReLU)             (None, 256, 256, 256 0           g_2_bn[0][0]                     \n",
      "__________________________________________________________________________________________________\n",
      "g_block_0_conv1 (Conv2D)        (None, 256, 256, 256 590080      g_2_lay_relu[0][0]               \n",
      "__________________________________________________________________________________________________\n",
      "g_block_0_bn1 (BatchNormalizati (None, 256, 256, 256 1024        g_block_0_conv1[0][0]            \n",
      "__________________________________________________________________________________________________\n",
      "g_block_0_relu1 (ReLU)          (None, 256, 256, 256 0           g_block_0_bn1[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "g_block_0_do (Dropout)          (None, 256, 256, 256 0           g_block_0_relu1[0][0]            \n",
      "__________________________________________________________________________________________________\n",
      "g_block_0_conv2 (Conv2D)        (None, 256, 256, 256 590080      g_block_0_do[0][0]               \n",
      "__________________________________________________________________________________________________\n",
      "g_block_0_bn2 (BatchNormalizati (None, 256, 256, 256 1024        g_block_0_conv2[0][0]            \n",
      "__________________________________________________________________________________________________\n",
      "g_block_0_add (Add)             (None, 256, 256, 256 0           g_block_0_bn2[0][0]              \n",
      "                                                                 g_2_lay_relu[0][0]               \n",
      "__________________________________________________________________________________________________\n",
      "g_block_0_relu2 (ReLU)          (None, 256, 256, 256 0           g_block_0_add[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "g_block_1_conv1 (Conv2D)        (None, 256, 256, 256 590080      g_block_0_relu2[0][0]            \n",
      "__________________________________________________________________________________________________\n",
      "g_block_1_bn1 (BatchNormalizati (None, 256, 256, 256 1024        g_block_1_conv1[0][0]            \n",
      "__________________________________________________________________________________________________\n",
      "g_block_1_relu1 (ReLU)          (None, 256, 256, 256 0           g_block_1_bn1[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "g_block_1_do (Dropout)          (None, 256, 256, 256 0           g_block_1_relu1[0][0]            \n",
      "__________________________________________________________________________________________________\n",
      "g_block_1_conv2 (Conv2D)        (None, 256, 256, 256 590080      g_block_1_do[0][0]               \n",
      "__________________________________________________________________________________________________\n",
      "g_block_1_bn2 (BatchNormalizati (None, 256, 256, 256 1024        g_block_1_conv2[0][0]            \n",
      "__________________________________________________________________________________________________\n",
      "g_block_1_add (Add)             (None, 256, 256, 256 0           g_block_1_bn2[0][0]              \n",
      "                                                                 g_block_0_relu2[0][0]            \n",
      "__________________________________________________________________________________________________\n",
      "g_block_1_relu2 (ReLU)          (None, 256, 256, 256 0           g_block_1_add[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "g_block_2_conv1 (Conv2D)        (None, 256, 256, 256 590080      g_block_1_relu2[0][0]            \n",
      "__________________________________________________________________________________________________\n",
      "g_block_2_bn1 (BatchNormalizati (None, 256, 256, 256 1024        g_block_2_conv1[0][0]            \n",
      "__________________________________________________________________________________________________\n",
      "g_block_2_relu1 (ReLU)          (None, 256, 256, 256 0           g_block_2_bn1[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "g_block_2_do (Dropout)          (None, 256, 256, 256 0           g_block_2_relu1[0][0]            \n",
      "__________________________________________________________________________________________________\n",
      "g_block_2_conv2 (Conv2D)        (None, 256, 256, 256 590080      g_block_2_do[0][0]               \n",
      "__________________________________________________________________________________________________\n",
      "g_block_2_bn2 (BatchNormalizati (None, 256, 256, 256 1024        g_block_2_conv2[0][0]            \n",
      "__________________________________________________________________________________________________\n",
      "g_block_2_add (Add)             (None, 256, 256, 256 0           g_block_2_bn2[0][0]              \n",
      "                                                                 g_block_1_relu2[0][0]            \n",
      "__________________________________________________________________________________________________\n",
      "g_block_2_relu2 (ReLU)          (None, 256, 256, 256 0           g_block_2_add[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "g_block_3_conv1 (Conv2D)        (None, 256, 256, 256 590080      g_block_2_relu2[0][0]            \n",
      "__________________________________________________________________________________________________\n",
      "g_block_3_bn1 (BatchNormalizati (None, 256, 256, 256 1024        g_block_3_conv1[0][0]            \n",
      "__________________________________________________________________________________________________\n",
      "g_block_3_relu1 (ReLU)          (None, 256, 256, 256 0           g_block_3_bn1[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "g_block_3_do (Dropout)          (None, 256, 256, 256 0           g_block_3_relu1[0][0]            \n",
      "__________________________________________________________________________________________________\n",
      "g_block_3_conv2 (Conv2D)        (None, 256, 256, 256 590080      g_block_3_do[0][0]               \n",
      "__________________________________________________________________________________________________\n",
      "g_block_3_bn2 (BatchNormalizati (None, 256, 256, 256 1024        g_block_3_conv2[0][0]            \n",
      "__________________________________________________________________________________________________\n",
      "g_block_3_add (Add)             (None, 256, 256, 256 0           g_block_3_bn2[0][0]              \n",
      "                                                                 g_block_2_relu2[0][0]            \n",
      "__________________________________________________________________________________________________\n",
      "g_block_3_relu2 (ReLU)          (None, 256, 256, 256 0           g_block_3_add[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "g_block_4_conv1 (Conv2D)        (None, 256, 256, 256 590080      g_block_3_relu2[0][0]            \n",
      "__________________________________________________________________________________________________\n",
      "g_block_4_bn1 (BatchNormalizati (None, 256, 256, 256 1024        g_block_4_conv1[0][0]            \n",
      "__________________________________________________________________________________________________\n",
      "g_block_4_relu1 (ReLU)          (None, 256, 256, 256 0           g_block_4_bn1[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "g_block_4_do (Dropout)          (None, 256, 256, 256 0           g_block_4_relu1[0][0]            \n",
      "__________________________________________________________________________________________________\n",
      "g_block_4_conv2 (Conv2D)        (None, 256, 256, 256 590080      g_block_4_do[0][0]               \n",
      "__________________________________________________________________________________________________\n",
      "g_block_4_bn2 (BatchNormalizati (None, 256, 256, 256 1024        g_block_4_conv2[0][0]            \n",
      "__________________________________________________________________________________________________\n",
      "g_block_4_add (Add)             (None, 256, 256, 256 0           g_block_4_bn2[0][0]              \n",
      "                                                                 g_block_3_relu2[0][0]            \n",
      "__________________________________________________________________________________________________\n",
      "g_block_4_relu2 (ReLU)          (None, 256, 256, 256 0           g_block_4_add[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "g_block_5_conv1 (Conv2D)        (None, 256, 256, 256 590080      g_block_4_relu2[0][0]            \n",
      "__________________________________________________________________________________________________\n",
      "g_block_5_bn1 (BatchNormalizati (None, 256, 256, 256 1024        g_block_5_conv1[0][0]            \n",
      "__________________________________________________________________________________________________\n",
      "g_block_5_relu1 (ReLU)          (None, 256, 256, 256 0           g_block_5_bn1[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "g_block_5_do (Dropout)          (None, 256, 256, 256 0           g_block_5_relu1[0][0]            \n",
      "__________________________________________________________________________________________________\n",
      "g_block_5_conv2 (Conv2D)        (None, 256, 256, 256 590080      g_block_5_do[0][0]               \n",
      "__________________________________________________________________________________________________\n",
      "g_block_5_bn2 (BatchNormalizati (None, 256, 256, 256 1024        g_block_5_conv2[0][0]            \n",
      "__________________________________________________________________________________________________\n",
      "g_block_5_add (Add)             (None, 256, 256, 256 0           g_block_5_bn2[0][0]              \n",
      "                                                                 g_block_4_relu2[0][0]            \n",
      "__________________________________________________________________________________________________\n",
      "g_block_5_relu2 (ReLU)          (None, 256, 256, 256 0           g_block_5_add[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "g_block_6_conv1 (Conv2D)        (None, 256, 256, 256 590080      g_block_5_relu2[0][0]            \n",
      "__________________________________________________________________________________________________\n",
      "g_block_6_bn1 (BatchNormalizati (None, 256, 256, 256 1024        g_block_6_conv1[0][0]            \n",
      "__________________________________________________________________________________________________\n",
      "g_block_6_relu1 (ReLU)          (None, 256, 256, 256 0           g_block_6_bn1[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "g_block_6_do (Dropout)          (None, 256, 256, 256 0           g_block_6_relu1[0][0]            \n",
      "__________________________________________________________________________________________________\n",
      "g_block_6_conv2 (Conv2D)        (None, 256, 256, 256 590080      g_block_6_do[0][0]               \n",
      "__________________________________________________________________________________________________\n",
      "g_block_6_bn2 (BatchNormalizati (None, 256, 256, 256 1024        g_block_6_conv2[0][0]            \n",
      "__________________________________________________________________________________________________\n",
      "g_block_6_add (Add)             (None, 256, 256, 256 0           g_block_6_bn2[0][0]              \n",
      "                                                                 g_block_5_relu2[0][0]            \n",
      "__________________________________________________________________________________________________\n",
      "g_block_6_relu2 (ReLU)          (None, 256, 256, 256 0           g_block_6_add[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "g_block_7_conv1 (Conv2D)        (None, 256, 256, 256 590080      g_block_6_relu2[0][0]            \n",
      "__________________________________________________________________________________________________\n",
      "g_block_7_bn1 (BatchNormalizati (None, 256, 256, 256 1024        g_block_7_conv1[0][0]            \n",
      "__________________________________________________________________________________________________\n",
      "g_block_7_relu1 (ReLU)          (None, 256, 256, 256 0           g_block_7_bn1[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "g_block_7_do (Dropout)          (None, 256, 256, 256 0           g_block_7_relu1[0][0]            \n",
      "__________________________________________________________________________________________________\n",
      "g_block_7_conv2 (Conv2D)        (None, 256, 256, 256 590080      g_block_7_do[0][0]               \n",
      "__________________________________________________________________________________________________\n",
      "g_block_7_bn2 (BatchNormalizati (None, 256, 256, 256 1024        g_block_7_conv2[0][0]            \n",
      "__________________________________________________________________________________________________\n",
      "g_block_7_add (Add)             (None, 256, 256, 256 0           g_block_7_bn2[0][0]              \n",
      "                                                                 g_block_6_relu2[0][0]            \n",
      "__________________________________________________________________________________________________\n",
      "g_block_7_relu2 (ReLU)          (None, 256, 256, 256 0           g_block_7_add[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "g_block_8_conv1 (Conv2D)        (None, 256, 256, 256 590080      g_block_7_relu2[0][0]            \n",
      "__________________________________________________________________________________________________\n",
      "g_block_8_bn1 (BatchNormalizati (None, 256, 256, 256 1024        g_block_8_conv1[0][0]            \n",
      "__________________________________________________________________________________________________\n",
      "g_block_8_relu1 (ReLU)          (None, 256, 256, 256 0           g_block_8_bn1[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "g_block_8_do (Dropout)          (None, 256, 256, 256 0           g_block_8_relu1[0][0]            \n",
      "__________________________________________________________________________________________________\n",
      "g_block_8_conv2 (Conv2D)        (None, 256, 256, 256 590080      g_block_8_do[0][0]               \n",
      "__________________________________________________________________________________________________\n",
      "g_block_8_bn2 (BatchNormalizati (None, 256, 256, 256 1024        g_block_8_conv2[0][0]            \n",
      "__________________________________________________________________________________________________\n",
      "g_block_8_add (Add)             (None, 256, 256, 256 0           g_block_8_bn2[0][0]              \n",
      "                                                                 g_block_7_relu2[0][0]            \n",
      "__________________________________________________________________________________________________\n",
      "g_block_8_relu2 (ReLU)          (None, 256, 256, 256 0           g_block_8_add[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "g_conv_after_resnetblock0 (Conv (None, 256, 256, 128 295040      g_block_8_relu2[0][0]            \n",
      "__________________________________________________________________________________________________\n",
      "g_after_resnetblock0_bn2 (Batch (None, 256, 256, 128 512         g_conv_after_resnetblock0[0][0]  \n",
      "__________________________________________________________________________________________________\n",
      "g_after_resnetblock_relu_0 (ReL (None, 256, 256, 128 0           g_after_resnetblock0_bn2[0][0]   \n",
      "__________________________________________________________________________________________________\n",
      "g_conv_after_resnetblock1 (Conv (None, 256, 256, 64) 73792       g_after_resnetblock_relu_0[0][0] \n",
      "__________________________________________________________________________________________________\n",
      "g_after_resnetblock1_bn2 (Batch (None, 256, 256, 64) 256         g_conv_after_resnetblock1[0][0]  \n",
      "__________________________________________________________________________________________________\n",
      "g_after_resnetblock_relu_1 (ReL (None, 256, 256, 64) 0           g_after_resnetblock1_bn2[0][0]   \n",
      "__________________________________________________________________________________________________\n",
      "g_final_conv (Conv2D)           (None, 256, 256, 4)  2308        g_after_resnetblock_relu_1[0][0] \n",
      "==================================================================================================\n",
      "Total params: 11,407,748\n",
      "Trainable params: 11,397,252\n",
      "Non-trainable params: 10,496\n",
      "__________________________________________________________________________________________________\n",
      "Input G\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "G Tensor(\"Generator/Identity:0\", shape=(None, 256, 256, 4), dtype=float32)\n",
      "INPUT DISCRI  Tensor(\"concat:0\", shape=(None, 256, 256, 12), dtype=float32)\n",
      "[INFO] combined model loss are : \n"
     ]
    }
   ],
   "source": [
    "#load data and model \n",
    "gan=clean_gan.GAN(model_param, train_param)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[name: \"/device:CPU:0\"\n",
      "device_type: \"CPU\"\n",
      "memory_limit: 268435456\n",
      "locality {\n",
      "}\n",
      "incarnation: 6987912960863644197\n",
      ", name: \"/device:XLA_CPU:0\"\n",
      "device_type: \"XLA_CPU\"\n",
      "memory_limit: 17179869184\n",
      "locality {\n",
      "}\n",
      "incarnation: 7632721629351614333\n",
      "physical_device_desc: \"device: XLA_CPU device\"\n",
      ", name: \"/device:GPU:0\"\n",
      "device_type: \"GPU\"\n",
      "memory_limit: 11211344448\n",
      "locality {\n",
      "  bus_id: 1\n",
      "  links {\n",
      "  }\n",
      "}\n",
      "incarnation: 15102665228234073574\n",
      "physical_device_desc: \"device: 0, name: Tesla K40m, pci bus id: 0000:03:00.0, compute capability: 3.5\"\n",
      ", name: \"/device:XLA_GPU:0\"\n",
      "device_type: \"XLA_GPU\"\n",
      "memory_limit: 17179869184\n",
      "locality {\n",
      "}\n",
      "incarnation: 8528616695278607355\n",
      "physical_device_desc: \"device: XLA_GPU device\"\n",
      "]\n"
     ]
    }
   ],
   "source": [
    "#check available ressources\n",
    "\n",
    "print(device_lib.list_local_devices())"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0 iter 0 [D loss: 0.798994, acc.: 49.00%] [G loss: 0.573215 0.573215]\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/home/idumeur/code/sent2_cloud_remover/utils/metrics.py:56: UserWarning: DEPRECATED: skimage.measure.compare_ssim has been moved to skimage.metrics.structural_similarity. It will be removed from skimage.measure in version 0.18.\n",
      "  lssim_batch += [ssim(batch1[i, :, :, :], batch2[i, :, :, :], multichannel=True)]\n",
      "/home/idumeur/miniconda3/envs/training_env/lib/python3.6/site-packages/skimage/metrics/_structural_similarity.py:108: UserWarning: Inputs have mismatched dtype.  Setting data_range based on im1.dtype.\n",
      "  im2[..., ch], **args)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0 iter 1 [D loss: 0.806540, acc.: 47.78%] [G loss: 0.365085 0.365085]\n",
      "0 iter 2 [D loss: 0.793997, acc.: 52.00%] [G loss: 0.291942 0.291942]\n",
      "0 iter 3 [D loss: 0.789682, acc.: 47.22%] [G loss: 0.199970 0.199970]\n",
      "0 iter 4 [D loss: 0.777531, acc.: 48.06%] [G loss: 0.553296 0.553296]\n",
      "0 iter 5 [D loss: 0.784808, acc.: 50.06%] [G loss: 0.195326 0.195326]\n",
      "0 iter 6 [D loss: 0.780038, acc.: 50.39%] [G loss: 0.167596 0.167596]\n",
      "0 iter 7 [D loss: 0.769406, acc.: 52.50%] [G loss: 0.845658 0.845658]\n",
      "0 iter 8 [D loss: 0.779096, acc.: 64.06%] [G loss: 0.011596 0.011596]\n",
      "0 iter 9 [D loss: 0.772808, acc.: 44.94%] [G loss: 0.444333 0.444333]\n",
      "Saving model at /srv/osirim/idumeur/trainings/new_model/training_9/checkpoints/ step 0\n",
      "1 iter 10 [D loss: 0.787733, acc.: 49.33%] [G loss: 0.099657 0.099657]\n",
      "1 iter 11 [D loss: 0.784751, acc.: 42.17%] [G loss: 0.054520 0.054520]\n",
      "1 iter 12 [D loss: 0.780950, acc.: 52.00%] [G loss: 0.456693 0.456693]\n",
      "1 iter 13 [D loss: 0.776485, acc.: 36.00%] [G loss: 0.541178 0.541178]\n",
      "1 iter 14 [D loss: 0.762095, acc.: 50.17%] [G loss: 0.656208 0.656208]\n",
      "1 iter 15 [D loss: 0.777740, acc.: 56.06%] [G loss: 0.588749 0.588749]\n",
      "1 iter 16 [D loss: 0.769630, acc.: 49.00%] [G loss: 0.759579 0.759579]\n",
      "1 iter 17 [D loss: 0.753116, acc.: 46.89%] [G loss: 0.680141 0.680141]\n",
      "1 iter 18 [D loss: 0.758791, acc.: 42.33%] [G loss: 0.166365 0.166365]\n",
      "1 iter 19 [D loss: 0.753753, acc.: 50.78%] [G loss: 0.133589 0.133589]\n",
      "2 iter 20 [D loss: 0.777623, acc.: 48.44%] [G loss: 0.062825 0.062825]\n",
      "2 iter 21 [D loss: 0.770191, acc.: 53.11%] [G loss: 0.333127 0.333127]\n",
      "2 iter 22 [D loss: 0.778464, acc.: 51.17%] [G loss: 0.583528 0.583528]\n",
      "2 iter 23 [D loss: 0.774766, acc.: 47.78%] [G loss: 0.679733 0.679733]\n",
      "2 iter 24 [D loss: 0.760559, acc.: 58.72%] [G loss: 0.524667 0.524667]\n",
      "2 iter 25 [D loss: 0.762753, acc.: 45.78%] [G loss: 0.235402 0.235402]\n",
      "2 iter 26 [D loss: 0.767955, acc.: 37.61%] [G loss: 0.262449 0.262449]\n",
      "2 iter 27 [D loss: 0.739756, acc.: 45.72%] [G loss: 0.820153 0.820153]\n",
      "2 iter 28 [D loss: 0.761144, acc.: 58.44%] [G loss: 0.488271 0.488271]\n",
      "2 iter 29 [D loss: 0.745956, acc.: 50.89%] [G loss: 0.562809 0.562809]\n",
      "3 iter 30 [D loss: 0.765876, acc.: 52.22%] [G loss: 0.529821 0.529821]\n",
      "3 iter 31 [D loss: 0.765272, acc.: 54.78%] [G loss: 0.293126 0.293126]\n",
      "3 iter 32 [D loss: 0.758945, acc.: 51.78%] [G loss: 0.456348 0.456348]\n",
      "3 iter 33 [D loss: 0.774482, acc.: 25.61%] [G loss: 0.442765 0.442765]\n",
      "3 iter 34 [D loss: 0.747925, acc.: 65.56%] [G loss: 0.763211 0.763211]\n",
      "3 iter 35 [D loss: 0.761605, acc.: 53.00%] [G loss: 0.715643 0.715643]\n",
      "3 iter 36 [D loss: 0.764428, acc.: 54.17%] [G loss: 0.840432 0.840432]\n",
      "3 iter 37 [D loss: 0.737966, acc.: 63.44%] [G loss: 0.042963 0.042963]\n",
      "3 iter 38 [D loss: 0.755165, acc.: 41.33%] [G loss: 0.548379 0.548379]\n",
      "3 iter 39 [D loss: 0.751192, acc.: 50.94%] [G loss: 0.799944 0.799944]\n",
      "4 iter 40 [D loss: 0.759594, acc.: 49.61%] [G loss: 0.730665 0.730665]\n",
      "4 iter 41 [D loss: 0.753606, acc.: 47.33%] [G loss: 0.717927 0.717927]\n",
      "4 iter 42 [D loss: 0.775347, acc.: 52.00%] [G loss: 0.808556 0.808556]\n",
      "4 iter 43 [D loss: 0.751446, acc.: 34.28%] [G loss: 0.776554 0.776554]\n",
      "4 iter 44 [D loss: 0.738057, acc.: 50.39%] [G loss: 0.843701 0.843701]\n",
      "4 iter 45 [D loss: 0.752276, acc.: 47.06%] [G loss: 0.625437 0.625437]\n",
      "4 iter 46 [D loss: 0.749659, acc.: 34.28%] [G loss: 0.888130 0.888130]\n",
      "4 iter 47 [D loss: 0.733750, acc.: 51.61%] [G loss: 0.244787 0.244787]\n",
      "4 iter 48 [D loss: 0.745477, acc.: 42.67%] [G loss: 0.838646 0.838646]\n",
      "4 iter 49 [D loss: 0.732075, acc.: 48.89%] [G loss: 0.795988 0.795988]\n",
      "5 iter 50 [D loss: 0.747235, acc.: 54.50%] [G loss: 0.814328 0.814328]\n",
      "5 iter 51 [D loss: 0.741093, acc.: 47.61%] [G loss: 0.710720 0.710720]\n",
      "5 iter 52 [D loss: 0.759362, acc.: 56.11%] [G loss: 0.517107 0.517107]\n",
      "5 iter 53 [D loss: 0.763241, acc.: 26.56%] [G loss: 0.729828 0.729828]\n",
      "5 iter 54 [D loss: 0.743166, acc.: 48.72%] [G loss: 0.850318 0.850318]\n",
      "5 iter 55 [D loss: 0.746569, acc.: 51.78%] [G loss: 0.682152 0.682152]\n",
      "5 iter 56 [D loss: 0.728977, acc.: 42.00%] [G loss: 0.808775 0.808775]\n",
      "5 iter 57 [D loss: 0.726712, acc.: 64.67%] [G loss: 0.307505 0.307505]\n",
      "5 iter 58 [D loss: 0.740754, acc.: 48.83%] [G loss: 0.802257 0.802257]\n",
      "5 iter 59 [D loss: 0.729385, acc.: 46.50%] [G loss: 0.781940 0.781940]\n",
      "6 iter 60 [D loss: 0.742595, acc.: 57.06%] [G loss: 0.761607 0.761607]\n",
      "6 iter 61 [D loss: 0.736944, acc.: 41.78%] [G loss: 0.776860 0.776860]\n",
      "6 iter 62 [D loss: 0.739849, acc.: 60.56%] [G loss: 0.820496 0.820496]\n",
      "6 iter 63 [D loss: 0.746601, acc.: 45.22%] [G loss: 0.523114 0.523114]\n",
      "6 iter 64 [D loss: 0.741349, acc.: 51.17%] [G loss: 0.335856 0.335856]\n",
      "6 iter 65 [D loss: 0.743250, acc.: 62.83%] [G loss: 1.113788 1.113788]\n",
      "6 iter 66 [D loss: 0.732594, acc.: 35.22%] [G loss: 0.506587 0.506587]\n",
      "6 iter 67 [D loss: 0.727696, acc.: 53.72%] [G loss: 0.223375 0.223375]\n",
      "6 iter 68 [D loss: 0.740529, acc.: 48.33%] [G loss: 0.770776 0.770776]\n",
      "6 iter 69 [D loss: 0.731217, acc.: 37.78%] [G loss: 0.503639 0.503639]\n",
      "7 iter 70 [D loss: 0.743333, acc.: 60.61%] [G loss: 0.739124 0.739124]\n",
      "7 iter 71 [D loss: 0.734941, acc.: 35.06%] [G loss: 0.777377 0.777377]\n",
      "7 iter 72 [D loss: 0.732085, acc.: 47.44%] [G loss: 0.806391 0.806391]\n",
      "7 iter 73 [D loss: 0.732878, acc.: 34.33%] [G loss: 0.799931 0.799931]\n",
      "7 iter 74 [D loss: 0.738837, acc.: 61.44%] [G loss: 0.772084 0.772084]\n",
      "7 iter 75 [D loss: 0.725842, acc.: 40.22%] [G loss: 0.615921 0.615921]\n",
      "7 iter 76 [D loss: 0.719432, acc.: 47.22%] [G loss: 0.703397 0.703397]\n",
      "7 iter 77 [D loss: 0.717437, acc.: 58.72%] [G loss: 0.704661 0.704661]\n",
      "7 iter 78 [D loss: 0.725407, acc.: 55.61%] [G loss: 0.699997 0.699997]\n",
      "7 iter 79 [D loss: 0.724183, acc.: 48.39%] [G loss: 0.769323 0.769323]\n",
      "8 iter 80 [D loss: 0.734969, acc.: 38.61%] [G loss: 0.781986 0.781986]\n",
      "8 iter 81 [D loss: 0.733617, acc.: 51.78%] [G loss: 0.716233 0.716233]\n",
      "8 iter 82 [D loss: 0.717231, acc.: 42.11%] [G loss: 0.781188 0.781188]\n",
      "8 iter 83 [D loss: 0.723252, acc.: 50.44%] [G loss: 0.797635 0.797635]\n",
      "8 iter 84 [D loss: 0.731405, acc.: 41.50%] [G loss: 0.762126 0.762126]\n",
      "8 iter 85 [D loss: 0.723614, acc.: 60.28%] [G loss: 0.707464 0.707464]\n",
      "8 iter 86 [D loss: 0.715358, acc.: 45.44%] [G loss: 0.744683 0.744683]\n",
      "8 iter 87 [D loss: 0.717261, acc.: 65.89%] [G loss: 0.674548 0.674548]\n",
      "8 iter 88 [D loss: 0.725460, acc.: 30.94%] [G loss: 0.741159 0.741159]\n",
      "8 iter 89 [D loss: 0.723207, acc.: 56.61%] [G loss: 0.788311 0.788311]\n",
      "9 iter 90 [D loss: 0.732978, acc.: 36.06%] [G loss: 0.780572 0.780572]\n",
      "9 iter 91 [D loss: 0.729794, acc.: 47.56%] [G loss: 0.735346 0.735346]\n",
      "9 iter 92 [D loss: 0.714950, acc.: 48.61%] [G loss: 0.753501 0.753501]\n",
      "9 iter 93 [D loss: 0.723996, acc.: 75.33%] [G loss: 0.680076 0.680076]\n",
      "9 iter 94 [D loss: 0.728848, acc.: 42.39%] [G loss: 0.765388 0.765388]\n",
      "9 iter 95 [D loss: 0.722742, acc.: 49.06%] [G loss: 0.731435 0.731435]\n",
      "9 iter 96 [D loss: 0.711088, acc.: 37.78%] [G loss: 0.771183 0.771183]\n",
      "9 iter 97 [D loss: 0.719973, acc.: 71.89%] [G loss: 0.549576 0.549576]\n",
      "9 iter 98 [D loss: 0.729861, acc.: 50.94%] [G loss: 0.735483 0.735483]\n",
      "9 iter 99 [D loss: 0.722666, acc.: 45.94%] [G loss: 0.723193 0.723193]\n",
      "10 iter 100 [D loss: 0.726532, acc.: 51.39%] [G loss: 0.757667 0.757667]\n",
      "10 iter 101 [D loss: 0.718426, acc.: 47.06%] [G loss: 0.771889 0.771889]\n",
      "10 iter 102 [D loss: 0.716292, acc.: 46.78%] [G loss: 0.763294 0.763294]\n",
      "10 iter 103 [D loss: 0.715201, acc.: 45.00%] [G loss: 0.756762 0.756762]\n",
      "10 iter 104 [D loss: 0.721563, acc.: 41.89%] [G loss: 0.737434 0.737434]\n",
      "10 iter 105 [D loss: 0.723396, acc.: 59.33%] [G loss: 0.741916 0.741916]\n",
      "10 iter 106 [D loss: 0.715288, acc.: 34.94%] [G loss: 0.684248 0.684248]\n",
      "10 iter 107 [D loss: 0.712454, acc.: 52.33%] [G loss: 0.819063 0.819063]\n",
      "10 iter 108 [D loss: 0.730807, acc.: 51.28%] [G loss: 0.770201 0.770201]\n",
      "10 iter 109 [D loss: 0.722994, acc.: 43.44%] [G loss: 0.742182 0.742182]\n",
      "11 iter 110 [D loss: 0.724380, acc.: 40.33%] [G loss: 0.757506 0.757506]\n",
      "11 iter 111 [D loss: 0.716900, acc.: 51.33%] [G loss: 0.757402 0.757402]\n",
      "11 iter 112 [D loss: 0.715061, acc.: 44.33%] [G loss: 0.759884 0.759884]\n",
      "11 iter 113 [D loss: 0.715985, acc.: 29.89%] [G loss: 0.731297 0.731297]\n",
      "11 iter 114 [D loss: 0.716022, acc.: 46.56%] [G loss: 0.738188 0.738188]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "11 iter 115 [D loss: 0.717836, acc.: 53.50%] [G loss: 0.772792 0.772792]\n",
      "11 iter 116 [D loss: 0.709521, acc.: 48.06%] [G loss: 0.737469 0.737469]\n",
      "11 iter 117 [D loss: 0.711638, acc.: 51.56%] [G loss: 0.781883 0.781883]\n",
      "11 iter 118 [D loss: 0.715560, acc.: 60.44%] [G loss: 0.760207 0.760207]\n",
      "11 iter 119 [D loss: 0.716599, acc.: 46.83%] [G loss: 0.764892 0.764892]\n",
      "12 iter 120 [D loss: 0.721346, acc.: 64.67%] [G loss: 0.763846 0.763846]\n",
      "12 iter 121 [D loss: 0.715715, acc.: 45.00%] [G loss: 0.776274 0.776274]\n",
      "12 iter 122 [D loss: 0.710528, acc.: 48.39%] [G loss: 0.810687 0.810687]\n",
      "12 iter 123 [D loss: 0.712252, acc.: 46.00%] [G loss: 0.783228 0.783228]\n",
      "12 iter 124 [D loss: 0.715224, acc.: 42.28%] [G loss: 0.767156 0.767156]\n",
      "12 iter 125 [D loss: 0.719056, acc.: 55.11%] [G loss: 0.761735 0.761735]\n",
      "12 iter 126 [D loss: 0.707670, acc.: 39.78%] [G loss: 0.708022 0.708022]\n",
      "12 iter 127 [D loss: 0.711068, acc.: 50.89%] [G loss: 0.779043 0.779043]\n",
      "12 iter 128 [D loss: 0.713869, acc.: 53.22%] [G loss: 0.772883 0.772883]\n",
      "12 iter 129 [D loss: 0.713935, acc.: 46.17%] [G loss: 0.749048 0.749048]\n",
      "13 iter 130 [D loss: 0.715858, acc.: 54.94%] [G loss: 0.782760 0.782760]\n",
      "13 iter 131 [D loss: 0.713103, acc.: 49.72%] [G loss: 0.768184 0.768184]\n",
      "13 iter 132 [D loss: 0.707655, acc.: 49.50%] [G loss: 0.809644 0.809644]\n",
      "13 iter 133 [D loss: 0.711160, acc.: 40.94%] [G loss: 0.768743 0.768743]\n",
      "13 iter 134 [D loss: 0.713780, acc.: 44.50%] [G loss: 0.798701 0.798701]\n",
      "13 iter 135 [D loss: 0.716242, acc.: 54.33%] [G loss: 0.772874 0.772874]\n",
      "13 iter 136 [D loss: 0.707488, acc.: 54.67%] [G loss: 0.727896 0.727896]\n",
      "13 iter 137 [D loss: 0.709050, acc.: 45.67%] [G loss: 0.775384 0.775384]\n",
      "13 iter 138 [D loss: 0.712773, acc.: 56.67%] [G loss: 0.764996 0.764996]\n",
      "13 iter 139 [D loss: 0.710246, acc.: 43.56%] [G loss: 0.738121 0.738121]\n",
      "14 iter 140 [D loss: 0.713808, acc.: 52.61%] [G loss: 0.777971 0.777971]\n",
      "14 iter 141 [D loss: 0.712032, acc.: 50.28%] [G loss: 0.764623 0.764623]\n",
      "14 iter 142 [D loss: 0.707103, acc.: 51.39%] [G loss: 0.742787 0.742787]\n",
      "14 iter 143 [D loss: 0.708364, acc.: 47.33%] [G loss: 0.763709 0.763709]\n",
      "14 iter 144 [D loss: 0.712348, acc.: 49.94%] [G loss: 0.738675 0.738675]\n",
      "14 iter 145 [D loss: 0.714521, acc.: 56.33%] [G loss: 0.773753 0.773753]\n",
      "14 iter 146 [D loss: 0.707411, acc.: 53.33%] [G loss: 0.723598 0.723598]\n",
      "14 iter 147 [D loss: 0.707174, acc.: 52.06%] [G loss: 0.765224 0.765224]\n",
      "14 iter 148 [D loss: 0.711061, acc.: 50.39%] [G loss: 0.750061 0.750061]\n",
      "14 iter 149 [D loss: 0.708203, acc.: 52.00%] [G loss: 0.755361 0.755361]\n",
      "15 iter 150 [D loss: 0.712846, acc.: 43.89%] [G loss: 0.752390 0.752390]\n",
      "15 iter 151 [D loss: 0.710348, acc.: 52.00%] [G loss: 0.755092 0.755092]\n",
      "15 iter 152 [D loss: 0.706798, acc.: 49.78%] [G loss: 0.740675 0.740675]\n",
      "15 iter 153 [D loss: 0.708439, acc.: 36.67%] [G loss: 0.733712 0.733712]\n",
      "15 iter 154 [D loss: 0.712021, acc.: 56.22%] [G loss: 0.698611 0.698611]\n",
      "15 iter 155 [D loss: 0.712693, acc.: 54.33%] [G loss: 0.749446 0.749446]\n",
      "15 iter 156 [D loss: 0.706573, acc.: 51.67%] [G loss: 0.720567 0.720567]\n",
      "15 iter 157 [D loss: 0.708162, acc.: 43.44%] [G loss: 0.775690 0.775690]\n",
      "15 iter 158 [D loss: 0.709057, acc.: 55.50%] [G loss: 0.750266 0.750266]\n",
      "15 iter 159 [D loss: 0.707177, acc.: 54.17%] [G loss: 0.763546 0.763546]\n",
      "16 iter 160 [D loss: 0.711923, acc.: 49.33%] [G loss: 0.739015 0.739015]\n",
      "16 iter 161 [D loss: 0.707897, acc.: 55.83%] [G loss: 0.744912 0.744912]\n",
      "16 iter 162 [D loss: 0.706583, acc.: 49.78%] [G loss: 0.760323 0.760323]\n",
      "16 iter 163 [D loss: 0.706700, acc.: 49.06%] [G loss: 0.731866 0.731866]\n",
      "16 iter 164 [D loss: 0.708781, acc.: 50.72%] [G loss: 0.700283 0.700283]\n",
      "16 iter 165 [D loss: 0.712166, acc.: 55.94%] [G loss: 0.750344 0.750344]\n",
      "16 iter 166 [D loss: 0.706631, acc.: 53.44%] [G loss: 0.712198 0.712198]\n",
      "16 iter 167 [D loss: 0.706641, acc.: 50.17%] [G loss: 0.765619 0.765619]\n",
      "16 iter 168 [D loss: 0.708339, acc.: 47.72%] [G loss: 0.759885 0.759885]\n",
      "16 iter 169 [D loss: 0.707299, acc.: 47.89%] [G loss: 0.739360 0.739360]\n",
      "17 iter 170 [D loss: 0.711513, acc.: 52.89%] [G loss: 0.741166 0.741166]\n",
      "17 iter 171 [D loss: 0.706702, acc.: 54.44%] [G loss: 0.733879 0.733879]\n",
      "17 iter 172 [D loss: 0.706412, acc.: 50.33%] [G loss: 0.749208 0.749208]\n",
      "17 iter 173 [D loss: 0.706621, acc.: 48.61%] [G loss: 0.742196 0.742196]\n",
      "17 iter 174 [D loss: 0.706781, acc.: 42.28%] [G loss: 0.747353 0.747353]\n",
      "17 iter 175 [D loss: 0.710156, acc.: 50.17%] [G loss: 0.725981 0.725981]\n",
      "17 iter 176 [D loss: 0.706437, acc.: 52.61%] [G loss: 0.721934 0.721934]\n",
      "17 iter 177 [D loss: 0.706721, acc.: 54.78%] [G loss: 0.755002 0.755002]\n",
      "17 iter 178 [D loss: 0.707144, acc.: 56.11%] [G loss: 0.722867 0.722867]\n",
      "17 iter 179 [D loss: 0.706900, acc.: 52.78%] [G loss: 0.741188 0.741188]\n",
      "18 iter 180 [D loss: 0.710905, acc.: 53.83%] [G loss: 0.731396 0.731396]\n",
      "18 iter 181 [D loss: 0.706633, acc.: 55.67%] [G loss: 0.738424 0.738424]\n",
      "18 iter 182 [D loss: 0.706258, acc.: 48.33%] [G loss: 0.732053 0.732053]\n",
      "18 iter 183 [D loss: 0.706299, acc.: 49.83%] [G loss: 0.725294 0.725294]\n",
      "18 iter 184 [D loss: 0.707301, acc.: 38.67%] [G loss: 0.746303 0.746303]\n",
      "18 iter 185 [D loss: 0.707665, acc.: 51.33%] [G loss: 0.718484 0.718484]\n",
      "18 iter 186 [D loss: 0.706256, acc.: 48.33%] [G loss: 0.720029 0.720029]\n",
      "18 iter 187 [D loss: 0.706464, acc.: 51.28%] [G loss: 0.729597 0.729597]\n",
      "18 iter 188 [D loss: 0.706393, acc.: 53.56%] [G loss: 0.724832 0.724832]\n",
      "18 iter 189 [D loss: 0.706278, acc.: 54.06%] [G loss: 0.721179 0.721179]\n",
      "19 iter 190 [D loss: 0.708896, acc.: 48.83%] [G loss: 0.719628 0.719628]\n",
      "19 iter 191 [D loss: 0.706686, acc.: 50.83%] [G loss: 0.729146 0.729146]\n",
      "19 iter 192 [D loss: 0.706272, acc.: 50.33%] [G loss: 0.735363 0.735363]\n",
      "19 iter 193 [D loss: 0.706464, acc.: 48.00%] [G loss: 0.704362 0.704362]\n",
      "19 iter 194 [D loss: 0.706441, acc.: 49.56%] [G loss: 0.695176 0.695176]\n",
      "19 iter 195 [D loss: 0.707449, acc.: 52.06%] [G loss: 0.639153 0.639153]\n",
      "19 iter 196 [D loss: 0.706259, acc.: 48.06%] [G loss: 0.820587 0.820587]\n",
      "19 iter 197 [D loss: 0.709066, acc.: 59.44%] [G loss: 0.890134 0.890134]\n",
      "19 iter 198 [D loss: 0.707910, acc.: 60.11%] [G loss: 0.708630 0.708630]\n",
      "19 iter 199 [D loss: 0.706672, acc.: 49.00%] [G loss: 0.692890 0.692890]\n",
      "20 iter 200 [D loss: 0.708263, acc.: 45.56%] [G loss: 0.717728 0.717728]\n",
      "20 iter 201 [D loss: 0.706617, acc.: 51.44%] [G loss: 0.706087 0.706087]\n",
      "20 iter 202 [D loss: 0.706072, acc.: 47.50%] [G loss: 0.699716 0.699716]\n",
      "20 iter 203 [D loss: 0.706294, acc.: 45.61%] [G loss: 0.662158 0.662158]\n",
      "20 iter 204 [D loss: 0.706469, acc.: 48.67%] [G loss: 0.673299 0.673299]\n",
      "20 iter 205 [D loss: 0.706970, acc.: 47.67%] [G loss: 0.711024 0.711024]\n",
      "20 iter 206 [D loss: 0.706086, acc.: 49.33%] [G loss: 0.702057 0.702057]\n",
      "20 iter 207 [D loss: 0.706023, acc.: 51.94%] [G loss: 0.721234 0.721234]\n",
      "20 iter 208 [D loss: 0.706111, acc.: 54.06%] [G loss: 0.700505 0.700505]\n",
      "20 iter 209 [D loss: 0.706598, acc.: 48.72%] [G loss: 0.688968 0.688968]\n",
      "21 iter 210 [D loss: 0.706190, acc.: 51.11%] [G loss: 0.721043 0.721043]\n",
      "21 iter 211 [D loss: 0.706236, acc.: 51.50%] [G loss: 0.712484 0.712484]\n",
      "21 iter 212 [D loss: 0.705863, acc.: 49.17%] [G loss: 0.674191 0.674191]\n",
      "21 iter 213 [D loss: 0.705975, acc.: 51.28%] [G loss: 0.682857 0.682857]\n",
      "21 iter 214 [D loss: 0.706873, acc.: 30.06%] [G loss: 0.801797 0.801797]\n",
      "21 iter 215 [D loss: 0.706544, acc.: 50.78%] [G loss: 0.722457 0.722457]\n",
      "21 iter 216 [D loss: 0.705819, acc.: 48.72%] [G loss: 0.675541 0.675541]\n",
      "21 iter 217 [D loss: 0.705877, acc.: 49.94%] [G loss: 0.740507 0.740507]\n",
      "21 iter 218 [D loss: 0.706018, acc.: 49.78%] [G loss: 0.708752 0.708752]\n",
      "21 iter 219 [D loss: 0.706096, acc.: 53.72%] [G loss: 0.713240 0.713240]\n",
      "22 iter 220 [D loss: 0.706113, acc.: 47.22%] [G loss: 0.728337 0.728337]\n",
      "22 iter 221 [D loss: 0.706142, acc.: 54.00%] [G loss: 0.720442 0.720442]\n",
      "22 iter 222 [D loss: 0.705763, acc.: 50.33%] [G loss: 0.694401 0.694401]\n",
      "22 iter 223 [D loss: 0.705854, acc.: 49.89%] [G loss: 0.691101 0.691101]\n",
      "22 iter 224 [D loss: 0.705848, acc.: 49.17%] [G loss: 0.668827 0.668827]\n",
      "22 iter 225 [D loss: 0.705971, acc.: 51.78%] [G loss: 0.705612 0.705612]\n",
      "22 iter 226 [D loss: 0.705792, acc.: 47.50%] [G loss: 0.723653 0.723653]\n",
      "22 iter 227 [D loss: 0.705783, acc.: 52.17%] [G loss: 0.727655 0.727655]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "22 iter 228 [D loss: 0.705792, acc.: 49.61%] [G loss: 0.698757 0.698757]\n",
      "22 iter 229 [D loss: 0.705806, acc.: 52.94%] [G loss: 0.705424 0.705424]\n",
      "23 iter 230 [D loss: 0.705838, acc.: 47.33%] [G loss: 0.718748 0.718748]\n",
      "23 iter 231 [D loss: 0.705769, acc.: 53.72%] [G loss: 0.712706 0.712706]\n",
      "23 iter 232 [D loss: 0.705651, acc.: 49.50%] [G loss: 0.707458 0.707458]\n",
      "23 iter 233 [D loss: 0.705848, acc.: 50.11%] [G loss: 0.696636 0.696636]\n",
      "23 iter 234 [D loss: 0.705816, acc.: 48.78%] [G loss: 0.682330 0.682330]\n",
      "23 iter 235 [D loss: 0.705859, acc.: 51.44%] [G loss: 0.694247 0.694247]\n",
      "23 iter 236 [D loss: 0.705623, acc.: 48.28%] [G loss: 0.718509 0.718509]\n",
      "23 iter 237 [D loss: 0.705735, acc.: 51.83%] [G loss: 0.682881 0.682881]\n",
      "23 iter 238 [D loss: 0.705747, acc.: 46.78%] [G loss: 0.693302 0.693302]\n",
      "23 iter 239 [D loss: 0.705717, acc.: 53.50%] [G loss: 0.703698 0.703698]\n",
      "24 iter 240 [D loss: 0.705723, acc.: 47.83%] [G loss: 0.708571 0.708571]\n",
      "24 iter 241 [D loss: 0.705700, acc.: 50.94%] [G loss: 0.706833 0.706833]\n",
      "24 iter 242 [D loss: 0.705541, acc.: 48.78%] [G loss: 0.700683 0.700683]\n",
      "24 iter 243 [D loss: 0.705662, acc.: 49.28%] [G loss: 0.697649 0.697649]\n",
      "24 iter 244 [D loss: 0.705617, acc.: 47.67%] [G loss: 0.683853 0.683853]\n",
      "24 iter 245 [D loss: 0.705667, acc.: 53.83%] [G loss: 0.700602 0.700602]\n",
      "24 iter 246 [D loss: 0.705513, acc.: 47.83%] [G loss: 0.717544 0.717544]\n",
      "24 iter 247 [D loss: 0.705528, acc.: 50.61%] [G loss: 0.649223 0.649223]\n",
      "24 iter 248 [D loss: 0.705558, acc.: 49.67%] [G loss: 0.698146 0.698146]\n",
      "24 iter 249 [D loss: 0.705560, acc.: 50.50%] [G loss: 0.699470 0.699470]\n",
      "25 iter 250 [D loss: 0.705561, acc.: 48.72%] [G loss: 0.706368 0.706368]\n",
      "25 iter 251 [D loss: 0.705534, acc.: 49.83%] [G loss: 0.705096 0.705096]\n",
      "25 iter 252 [D loss: 0.705429, acc.: 48.44%] [G loss: 0.701200 0.701200]\n",
      "25 iter 253 [D loss: 0.705459, acc.: 48.89%] [G loss: 0.697374 0.697374]\n",
      "25 iter 254 [D loss: 0.705441, acc.: 48.94%] [G loss: 0.686111 0.686111]\n",
      "25 iter 255 [D loss: 0.705497, acc.: 51.28%] [G loss: 0.694607 0.694607]\n",
      "25 iter 256 [D loss: 0.705384, acc.: 47.94%] [G loss: 0.714991 0.714991]\n",
      "25 iter 257 [D loss: 0.705402, acc.: 50.56%] [G loss: 0.616758 0.616758]\n",
      "25 iter 258 [D loss: 0.705416, acc.: 49.78%] [G loss: 0.696457 0.696457]\n",
      "25 iter 259 [D loss: 0.705383, acc.: 49.33%] [G loss: 0.685511 0.685511]\n",
      "26 iter 260 [D loss: 0.705402, acc.: 49.67%] [G loss: 0.691177 0.691177]\n",
      "26 iter 261 [D loss: 0.705394, acc.: 50.22%] [G loss: 0.693515 0.693515]\n",
      "26 iter 262 [D loss: 0.705337, acc.: 47.11%] [G loss: 0.698268 0.698268]\n",
      "26 iter 263 [D loss: 0.705315, acc.: 48.72%] [G loss: 0.694849 0.694849]\n",
      "26 iter 264 [D loss: 0.705346, acc.: 48.61%] [G loss: 0.687869 0.687869]\n",
      "26 iter 265 [D loss: 0.705383, acc.: 52.94%] [G loss: 0.693310 0.693310]\n",
      "26 iter 266 [D loss: 0.705286, acc.: 46.89%] [G loss: 0.714304 0.714304]\n",
      "26 iter 267 [D loss: 0.705280, acc.: 50.83%] [G loss: 0.610848 0.610848]\n",
      "26 iter 268 [D loss: 0.705305, acc.: 48.22%] [G loss: 0.689432 0.689432]\n",
      "26 iter 269 [D loss: 0.705264, acc.: 49.22%] [G loss: 0.678500 0.678500]\n",
      "27 iter 270 [D loss: 0.705275, acc.: 49.83%] [G loss: 0.684476 0.684476]\n",
      "27 iter 271 [D loss: 0.705279, acc.: 49.28%] [G loss: 0.686968 0.686968]\n",
      "27 iter 272 [D loss: 0.705225, acc.: 46.56%] [G loss: 0.698182 0.698182]\n",
      "27 iter 273 [D loss: 0.705202, acc.: 48.67%] [G loss: 0.688220 0.688220]\n",
      "27 iter 274 [D loss: 0.705236, acc.: 48.06%] [G loss: 0.688505 0.688505]\n",
      "27 iter 275 [D loss: 0.705222, acc.: 51.94%] [G loss: 0.676519 0.676519]\n",
      "27 iter 276 [D loss: 0.705171, acc.: 47.56%] [G loss: 0.703337 0.703337]\n",
      "27 iter 277 [D loss: 0.705159, acc.: 50.67%] [G loss: 0.580439 0.580439]\n",
      "27 iter 278 [D loss: 0.705172, acc.: 49.22%] [G loss: 0.686605 0.686605]\n",
      "27 iter 279 [D loss: 0.705153, acc.: 48.44%] [G loss: 0.672257 0.672257]\n",
      "28 iter 280 [D loss: 0.705162, acc.: 49.44%] [G loss: 0.682344 0.682344]\n",
      "28 iter 281 [D loss: 0.705182, acc.: 50.11%] [G loss: 0.684640 0.684640]\n",
      "28 iter 282 [D loss: 0.705122, acc.: 47.67%] [G loss: 0.700029 0.700029]\n",
      "28 iter 283 [D loss: 0.705100, acc.: 48.39%] [G loss: 0.692344 0.692344]\n",
      "28 iter 284 [D loss: 0.705110, acc.: 48.56%] [G loss: 0.693374 0.693374]\n",
      "28 iter 285 [D loss: 0.705100, acc.: 52.78%] [G loss: 0.676934 0.676934]\n",
      "28 iter 286 [D loss: 0.705061, acc.: 46.61%] [G loss: 0.701582 0.701582]\n",
      "28 iter 287 [D loss: 0.705051, acc.: 50.22%] [G loss: 0.576268 0.576268]\n",
      "28 iter 288 [D loss: 0.705058, acc.: 49.17%] [G loss: 0.687871 0.687871]\n",
      "28 iter 289 [D loss: 0.705031, acc.: 48.94%] [G loss: 0.675081 0.675081]\n",
      "29 iter 290 [D loss: 0.705038, acc.: 50.22%] [G loss: 0.681088 0.681088]\n",
      "29 iter 291 [D loss: 0.705033, acc.: 50.33%] [G loss: 0.681933 0.681933]\n",
      "29 iter 292 [D loss: 0.705013, acc.: 46.83%] [G loss: 0.692831 0.692831]\n",
      "29 iter 293 [D loss: 0.704983, acc.: 48.67%] [G loss: 0.689210 0.689210]\n",
      "29 iter 294 [D loss: 0.704977, acc.: 48.33%] [G loss: 0.688918 0.688918]\n",
      "29 iter 295 [D loss: 0.704992, acc.: 51.11%] [G loss: 0.681993 0.681993]\n",
      "29 iter 296 [D loss: 0.704950, acc.: 45.39%] [G loss: 0.700635 0.700635]\n",
      "29 iter 297 [D loss: 0.704940, acc.: 50.44%] [G loss: 0.590022 0.590022]\n",
      "29 iter 298 [D loss: 0.704938, acc.: 49.83%] [G loss: 0.690934 0.690934]\n",
      "29 iter 299 [D loss: 0.704915, acc.: 47.78%] [G loss: 0.676715 0.676715]\n",
      "30 iter 300 [D loss: 0.704924, acc.: 49.39%] [G loss: 0.685813 0.685813]\n",
      "30 iter 301 [D loss: 0.704918, acc.: 50.28%] [G loss: 0.688338 0.688338]\n",
      "30 iter 302 [D loss: 0.704910, acc.: 45.33%] [G loss: 0.695856 0.695856]\n",
      "30 iter 303 [D loss: 0.704870, acc.: 48.78%] [G loss: 0.689590 0.689590]\n",
      "30 iter 304 [D loss: 0.704868, acc.: 48.28%] [G loss: 0.688925 0.688925]\n",
      "30 iter 305 [D loss: 0.704877, acc.: 51.72%] [G loss: 0.683717 0.683717]\n",
      "30 iter 306 [D loss: 0.704836, acc.: 47.28%] [G loss: 0.694562 0.694562]\n",
      "30 iter 307 [D loss: 0.704828, acc.: 50.44%] [G loss: 0.590155 0.590155]\n",
      "30 iter 308 [D loss: 0.704825, acc.: 49.94%] [G loss: 0.687815 0.687815]\n",
      "30 iter 309 [D loss: 0.704805, acc.: 47.83%] [G loss: 0.674760 0.674760]\n",
      "31 iter 310 [D loss: 0.704810, acc.: 49.89%] [G loss: 0.684356 0.684356]\n",
      "31 iter 311 [D loss: 0.704805, acc.: 49.33%] [G loss: 0.686182 0.686182]\n",
      "31 iter 312 [D loss: 0.704785, acc.: 45.72%] [G loss: 0.694948 0.694948]\n",
      "31 iter 313 [D loss: 0.704757, acc.: 48.72%] [G loss: 0.688420 0.688420]\n",
      "31 iter 314 [D loss: 0.704757, acc.: 47.94%] [G loss: 0.688063 0.688063]\n",
      "31 iter 315 [D loss: 0.704757, acc.: 51.11%] [G loss: 0.686190 0.686190]\n",
      "31 iter 316 [D loss: 0.704726, acc.: 47.00%] [G loss: 0.692518 0.692518]\n",
      "31 iter 317 [D loss: 0.704717, acc.: 49.56%] [G loss: 0.594053 0.594053]\n",
      "31 iter 318 [D loss: 0.704712, acc.: 49.94%] [G loss: 0.686852 0.686852]\n",
      "31 iter 319 [D loss: 0.704693, acc.: 47.89%] [G loss: 0.673922 0.673922]\n",
      "32 iter 320 [D loss: 0.704696, acc.: 50.17%] [G loss: 0.685086 0.685086]\n",
      "32 iter 321 [D loss: 0.704688, acc.: 50.00%] [G loss: 0.684910 0.684910]\n",
      "32 iter 322 [D loss: 0.704674, acc.: 45.72%] [G loss: 0.692988 0.692988]\n",
      "32 iter 323 [D loss: 0.704648, acc.: 48.50%] [G loss: 0.686919 0.686919]\n",
      "32 iter 324 [D loss: 0.704642, acc.: 47.17%] [G loss: 0.685892 0.685892]\n",
      "32 iter 325 [D loss: 0.704645, acc.: 50.83%] [G loss: 0.686929 0.686929]\n",
      "32 iter 326 [D loss: 0.704623, acc.: 47.28%] [G loss: 0.688548 0.688548]\n",
      "32 iter 327 [D loss: 0.704605, acc.: 49.94%] [G loss: 0.610853 0.610853]\n",
      "32 iter 328 [D loss: 0.704598, acc.: 48.89%] [G loss: 0.687375 0.687375]\n",
      "32 iter 329 [D loss: 0.704595, acc.: 46.94%] [G loss: 0.675689 0.675689]\n",
      "33 iter 330 [D loss: 0.704585, acc.: 50.22%] [G loss: 0.688413 0.688413]\n",
      "33 iter 331 [D loss: 0.704594, acc.: 49.94%] [G loss: 0.687031 0.687031]\n",
      "33 iter 332 [D loss: 0.704560, acc.: 48.61%] [G loss: 0.696263 0.696263]\n",
      "33 iter 333 [D loss: 0.704542, acc.: 47.83%] [G loss: 0.689882 0.689882]\n",
      "33 iter 334 [D loss: 0.704537, acc.: 48.44%] [G loss: 0.692083 0.692083]\n",
      "33 iter 335 [D loss: 0.704530, acc.: 50.17%] [G loss: 0.686655 0.686655]\n",
      "33 iter 336 [D loss: 0.704502, acc.: 46.39%] [G loss: 0.691375 0.691375]\n",
      "33 iter 337 [D loss: 0.704491, acc.: 50.44%] [G loss: 0.599113 0.599113]\n",
      "33 iter 338 [D loss: 0.704481, acc.: 49.33%] [G loss: 0.687899 0.687899]\n",
      "33 iter 339 [D loss: 0.704475, acc.: 46.94%] [G loss: 0.676764 0.676764]\n",
      "34 iter 340 [D loss: 0.704464, acc.: 49.56%] [G loss: 0.683993 0.683993]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "34 iter 341 [D loss: 0.704457, acc.: 49.44%] [G loss: 0.686006 0.686006]\n",
      "34 iter 342 [D loss: 0.704439, acc.: 48.11%] [G loss: 0.692737 0.692737]\n",
      "34 iter 343 [D loss: 0.704415, acc.: 48.67%] [G loss: 0.685658 0.685658]\n",
      "34 iter 344 [D loss: 0.704413, acc.: 46.89%] [G loss: 0.686942 0.686942]\n",
      "34 iter 345 [D loss: 0.704405, acc.: 50.61%] [G loss: 0.683347 0.683347]\n",
      "34 iter 346 [D loss: 0.704379, acc.: 47.61%] [G loss: 0.690812 0.690812]\n",
      "34 iter 347 [D loss: 0.704371, acc.: 50.06%] [G loss: 0.594800 0.594800]\n",
      "34 iter 348 [D loss: 0.704364, acc.: 49.00%] [G loss: 0.684818 0.684818]\n",
      "34 iter 349 [D loss: 0.704351, acc.: 47.78%] [G loss: 0.671923 0.671923]\n",
      "35 iter 350 [D loss: 0.704344, acc.: 49.33%] [G loss: 0.682865 0.682865]\n",
      "35 iter 351 [D loss: 0.704336, acc.: 49.39%] [G loss: 0.684629 0.684629]\n",
      "35 iter 352 [D loss: 0.704318, acc.: 48.11%] [G loss: 0.688980 0.688980]\n",
      "35 iter 353 [D loss: 0.704296, acc.: 49.06%] [G loss: 0.681963 0.681963]\n",
      "35 iter 354 [D loss: 0.704293, acc.: 47.44%] [G loss: 0.681891 0.681891]\n",
      "35 iter 355 [D loss: 0.704287, acc.: 50.33%] [G loss: 0.683551 0.683551]\n",
      "35 iter 356 [D loss: 0.704261, acc.: 48.06%] [G loss: 0.687109 0.687109]\n",
      "35 iter 357 [D loss: 0.704253, acc.: 50.28%] [G loss: 0.597276 0.597276]\n",
      "35 iter 358 [D loss: 0.704244, acc.: 48.78%] [G loss: 0.683429 0.683429]\n",
      "35 iter 359 [D loss: 0.704231, acc.: 46.22%] [G loss: 0.668558 0.668558]\n",
      "36 iter 360 [D loss: 0.704227, acc.: 49.72%] [G loss: 0.687728 0.687728]\n",
      "36 iter 361 [D loss: 0.704214, acc.: 48.61%] [G loss: 0.683153 0.683153]\n",
      "36 iter 362 [D loss: 0.704208, acc.: 46.33%] [G loss: 0.688843 0.688843]\n",
      "36 iter 363 [D loss: 0.704180, acc.: 49.11%] [G loss: 0.681830 0.681830]\n",
      "36 iter 364 [D loss: 0.704172, acc.: 47.78%] [G loss: 0.682620 0.682620]\n",
      "36 iter 365 [D loss: 0.704166, acc.: 50.61%] [G loss: 0.681697 0.681697]\n",
      "36 iter 366 [D loss: 0.704145, acc.: 46.94%] [G loss: 0.686315 0.686315]\n",
      "36 iter 367 [D loss: 0.704132, acc.: 50.28%] [G loss: 0.597316 0.597316]\n",
      "36 iter 368 [D loss: 0.704124, acc.: 47.78%] [G loss: 0.683654 0.683654]\n",
      "36 iter 369 [D loss: 0.704111, acc.: 46.61%] [G loss: 0.670205 0.670205]\n",
      "37 iter 370 [D loss: 0.704105, acc.: 49.17%] [G loss: 0.685898 0.685898]\n",
      "37 iter 371 [D loss: 0.704093, acc.: 49.22%] [G loss: 0.682891 0.682891]\n",
      "37 iter 372 [D loss: 0.704073, acc.: 49.61%] [G loss: 0.690841 0.690841]\n",
      "37 iter 373 [D loss: 0.704056, acc.: 48.89%] [G loss: 0.682575 0.682575]\n",
      "37 iter 374 [D loss: 0.704051, acc.: 47.56%] [G loss: 0.679593 0.679593]\n",
      "37 iter 375 [D loss: 0.704045, acc.: 50.33%] [G loss: 0.683150 0.683150]\n",
      "37 iter 376 [D loss: 0.704019, acc.: 47.94%] [G loss: 0.684572 0.684572]\n",
      "37 iter 377 [D loss: 0.704010, acc.: 49.83%] [G loss: 0.601897 0.601897]\n",
      "37 iter 378 [D loss: 0.704001, acc.: 47.56%] [G loss: 0.684028 0.684028]\n",
      "37 iter 379 [D loss: 0.704002, acc.: 45.67%] [G loss: 0.673435 0.673435]\n",
      "38 iter 380 [D loss: 0.703983, acc.: 49.11%] [G loss: 0.684245 0.684245]\n",
      "38 iter 381 [D loss: 0.703971, acc.: 49.11%] [G loss: 0.684454 0.684454]\n",
      "38 iter 382 [D loss: 0.703951, acc.: 48.11%] [G loss: 0.690185 0.690185]\n",
      "38 iter 383 [D loss: 0.703940, acc.: 49.00%] [G loss: 0.682811 0.682811]\n",
      "38 iter 384 [D loss: 0.703932, acc.: 47.67%] [G loss: 0.683447 0.683447]\n",
      "38 iter 385 [D loss: 0.703923, acc.: 50.83%] [G loss: 0.674290 0.674290]\n",
      "38 iter 386 [D loss: 0.703897, acc.: 48.00%] [G loss: 0.685016 0.685016]\n",
      "38 iter 387 [D loss: 0.703888, acc.: 49.28%] [G loss: 0.588487 0.588487]\n",
      "38 iter 388 [D loss: 0.703880, acc.: 48.39%] [G loss: 0.684263 0.684263]\n",
      "38 iter 389 [D loss: 0.703867, acc.: 46.39%] [G loss: 0.673350 0.673350]\n",
      "39 iter 390 [D loss: 0.703859, acc.: 49.28%] [G loss: 0.684369 0.684369]\n",
      "39 iter 391 [D loss: 0.703846, acc.: 49.83%] [G loss: 0.682154 0.682154]\n",
      "39 iter 392 [D loss: 0.703826, acc.: 50.17%] [G loss: 0.686502 0.686502]\n",
      "39 iter 393 [D loss: 0.703818, acc.: 49.72%] [G loss: 0.681542 0.681542]\n",
      "39 iter 394 [D loss: 0.703802, acc.: 47.83%] [G loss: 0.682520 0.682520]\n",
      "39 iter 395 [D loss: 0.703795, acc.: 50.61%] [G loss: 0.678434 0.678434]\n",
      "39 iter 396 [D loss: 0.703771, acc.: 48.56%] [G loss: 0.682023 0.682023]\n",
      "39 iter 397 [D loss: 0.703761, acc.: 49.83%] [G loss: 0.593306 0.593306]\n",
      "39 iter 398 [D loss: 0.703751, acc.: 48.78%] [G loss: 0.681504 0.681504]\n",
      "39 iter 399 [D loss: 0.703736, acc.: 46.89%] [G loss: 0.671675 0.671675]\n",
      "40 iter 400 [D loss: 0.703731, acc.: 48.94%] [G loss: 0.680575 0.680575]\n",
      "40 iter 401 [D loss: 0.703718, acc.: 49.17%] [G loss: 0.680374 0.680374]\n",
      "40 iter 402 [D loss: 0.703709, acc.: 46.50%] [G loss: 0.683465 0.683465]\n",
      "40 iter 403 [D loss: 0.703685, acc.: 49.28%] [G loss: 0.676071 0.676071]\n",
      "40 iter 404 [D loss: 0.703676, acc.: 46.67%] [G loss: 0.677321 0.677321]\n",
      "40 iter 405 [D loss: 0.703667, acc.: 50.44%] [G loss: 0.682370 0.682370]\n",
      "40 iter 406 [D loss: 0.703645, acc.: 47.72%] [G loss: 0.680602 0.680602]\n",
      "40 iter 407 [D loss: 0.703635, acc.: 49.56%] [G loss: 0.602491 0.602491]\n",
      "40 iter 408 [D loss: 0.703623, acc.: 47.67%] [G loss: 0.681333 0.681333]\n",
      "40 iter 409 [D loss: 0.703612, acc.: 46.28%] [G loss: 0.672355 0.672355]\n",
      "41 iter 410 [D loss: 0.703603, acc.: 50.11%] [G loss: 0.681825 0.681825]\n",
      "41 iter 411 [D loss: 0.703593, acc.: 48.94%] [G loss: 0.681339 0.681339]\n",
      "41 iter 412 [D loss: 0.703570, acc.: 49.67%] [G loss: 0.685756 0.685756]\n",
      "41 iter 413 [D loss: 0.703554, acc.: 49.56%] [G loss: 0.678737 0.678737]\n",
      "41 iter 414 [D loss: 0.703545, acc.: 48.22%] [G loss: 0.679596 0.679596]\n",
      "41 iter 415 [D loss: 0.703536, acc.: 51.44%] [G loss: 0.678340 0.678340]\n",
      "41 iter 416 [D loss: 0.703516, acc.: 47.44%] [G loss: 0.679612 0.679612]\n",
      "41 iter 417 [D loss: 0.703505, acc.: 49.83%] [G loss: 0.597251 0.597251]\n",
      "41 iter 418 [D loss: 0.703494, acc.: 48.28%] [G loss: 0.681116 0.681116]\n",
      "41 iter 419 [D loss: 0.703481, acc.: 46.72%] [G loss: 0.672351 0.672351]\n",
      "42 iter 420 [D loss: 0.703473, acc.: 49.56%] [G loss: 0.681374 0.681374]\n",
      "42 iter 421 [D loss: 0.703460, acc.: 49.33%] [G loss: 0.680765 0.680765]\n",
      "42 iter 422 [D loss: 0.703441, acc.: 49.50%] [G loss: 0.686109 0.686109]\n",
      "42 iter 423 [D loss: 0.703425, acc.: 49.28%] [G loss: 0.679427 0.679427]\n",
      "42 iter 424 [D loss: 0.703415, acc.: 48.50%] [G loss: 0.679998 0.679998]\n",
      "42 iter 425 [D loss: 0.703407, acc.: 50.44%] [G loss: 0.679485 0.679485]\n",
      "42 iter 426 [D loss: 0.703386, acc.: 47.11%] [G loss: 0.680227 0.680227]\n",
      "42 iter 427 [D loss: 0.703374, acc.: 49.94%] [G loss: 0.597960 0.597960]\n",
      "42 iter 428 [D loss: 0.703363, acc.: 47.83%] [G loss: 0.679649 0.679649]\n",
      "42 iter 429 [D loss: 0.703352, acc.: 47.44%] [G loss: 0.674392 0.674392]\n",
      "43 iter 430 [D loss: 0.703341, acc.: 49.56%] [G loss: 0.681203 0.681203]\n",
      "43 iter 431 [D loss: 0.703329, acc.: 49.06%] [G loss: 0.682307 0.682307]\n",
      "43 iter 432 [D loss: 0.703309, acc.: 49.33%] [G loss: 0.685361 0.685361]\n",
      "43 iter 433 [D loss: 0.703294, acc.: 49.17%] [G loss: 0.679282 0.679282]\n",
      "43 iter 434 [D loss: 0.703283, acc.: 48.22%] [G loss: 0.679599 0.679599]\n",
      "43 iter 435 [D loss: 0.703275, acc.: 50.56%] [G loss: 0.678948 0.678948]\n",
      "43 iter 436 [D loss: 0.703254, acc.: 47.50%] [G loss: 0.679748 0.679748]\n",
      "43 iter 437 [D loss: 0.703242, acc.: 50.33%] [G loss: 0.600595 0.600595]\n",
      "43 iter 438 [D loss: 0.703230, acc.: 48.22%] [G loss: 0.679272 0.679272]\n",
      "43 iter 439 [D loss: 0.703217, acc.: 46.94%] [G loss: 0.672621 0.672621]\n",
      "44 iter 440 [D loss: 0.703211, acc.: 48.56%] [G loss: 0.680278 0.680278]\n",
      "44 iter 441 [D loss: 0.703201, acc.: 46.94%] [G loss: 0.682562 0.682562]\n",
      "44 iter 442 [D loss: 0.703176, acc.: 49.61%] [G loss: 0.690313 0.690313]\n",
      "44 iter 443 [D loss: 0.703162, acc.: 48.22%] [G loss: 0.681374 0.681374]\n",
      "44 iter 444 [D loss: 0.703151, acc.: 48.11%] [G loss: 0.680829 0.680829]\n",
      "44 iter 445 [D loss: 0.703142, acc.: 49.28%] [G loss: 0.680337 0.680337]\n",
      "44 iter 446 [D loss: 0.703119, acc.: 48.28%] [G loss: 0.682207 0.682207]\n",
      "44 iter 447 [D loss: 0.703108, acc.: 50.44%] [G loss: 0.604236 0.604236]\n",
      "44 iter 448 [D loss: 0.703096, acc.: 48.44%] [G loss: 0.680851 0.680851]\n",
      "44 iter 449 [D loss: 0.703084, acc.: 46.89%] [G loss: 0.674789 0.674789]\n",
      "45 iter 450 [D loss: 0.703075, acc.: 49.61%] [G loss: 0.680846 0.680846]\n",
      "45 iter 451 [D loss: 0.703059, acc.: 49.00%] [G loss: 0.681171 0.681171]\n",
      "45 iter 452 [D loss: 0.703041, acc.: 49.67%] [G loss: 0.686492 0.686492]\n",
      "45 iter 453 [D loss: 0.703026, acc.: 48.78%] [G loss: 0.680017 0.680017]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "45 iter 454 [D loss: 0.703014, acc.: 48.39%] [G loss: 0.680557 0.680557]\n",
      "45 iter 455 [D loss: 0.703007, acc.: 49.00%] [G loss: 0.679571 0.679571]\n",
      "45 iter 456 [D loss: 0.702984, acc.: 48.17%] [G loss: 0.682631 0.682631]\n",
      "45 iter 457 [D loss: 0.702972, acc.: 49.89%] [G loss: 0.602258 0.602258]\n",
      "45 iter 458 [D loss: 0.702959, acc.: 48.39%] [G loss: 0.680824 0.680824]\n",
      "45 iter 459 [D loss: 0.702944, acc.: 46.67%] [G loss: 0.674245 0.674245]\n",
      "46 iter 460 [D loss: 0.702936, acc.: 49.72%] [G loss: 0.679700 0.679700]\n",
      "46 iter 461 [D loss: 0.702922, acc.: 49.44%] [G loss: 0.680728 0.680728]\n",
      "46 iter 462 [D loss: 0.702904, acc.: 49.67%] [G loss: 0.686144 0.686144]\n",
      "46 iter 463 [D loss: 0.702888, acc.: 49.44%] [G loss: 0.679805 0.679805]\n",
      "46 iter 464 [D loss: 0.702879, acc.: 48.11%] [G loss: 0.681813 0.681813]\n",
      "46 iter 465 [D loss: 0.702868, acc.: 48.78%] [G loss: 0.677024 0.677024]\n",
      "46 iter 466 [D loss: 0.702846, acc.: 48.94%] [G loss: 0.683112 0.683112]\n",
      "46 iter 467 [D loss: 0.702835, acc.: 49.78%] [G loss: 0.599236 0.599236]\n",
      "46 iter 468 [D loss: 0.702822, acc.: 47.83%] [G loss: 0.681194 0.681194]\n",
      "46 iter 469 [D loss: 0.702807, acc.: 46.67%] [G loss: 0.676176 0.676176]\n",
      "47 iter 470 [D loss: 0.702799, acc.: 49.28%] [G loss: 0.681301 0.681301]\n",
      "47 iter 471 [D loss: 0.702786, acc.: 48.06%] [G loss: 0.682014 0.682014]\n",
      "47 iter 472 [D loss: 0.702765, acc.: 49.78%] [G loss: 0.688325 0.688325]\n",
      "47 iter 473 [D loss: 0.702749, acc.: 49.00%] [G loss: 0.681214 0.681214]\n",
      "47 iter 474 [D loss: 0.702738, acc.: 48.78%] [G loss: 0.682072 0.682072]\n",
      "47 iter 475 [D loss: 0.702729, acc.: 49.33%] [G loss: 0.679359 0.679359]\n",
      "47 iter 476 [D loss: 0.702707, acc.: 47.78%] [G loss: 0.682990 0.682990]\n",
      "47 iter 477 [D loss: 0.702697, acc.: 48.56%] [G loss: 0.600784 0.600784]\n",
      "47 iter 478 [D loss: 0.702683, acc.: 48.67%] [G loss: 0.683045 0.683045]\n",
      "47 iter 479 [D loss: 0.702668, acc.: 46.44%] [G loss: 0.677854 0.677854]\n",
      "48 iter 480 [D loss: 0.702659, acc.: 49.56%] [G loss: 0.680057 0.680057]\n",
      "48 iter 481 [D loss: 0.702645, acc.: 48.83%] [G loss: 0.682203 0.682203]\n",
      "48 iter 482 [D loss: 0.702625, acc.: 49.50%] [G loss: 0.688062 0.688062]\n",
      "48 iter 483 [D loss: 0.702609, acc.: 49.06%] [G loss: 0.682044 0.682044]\n",
      "48 iter 484 [D loss: 0.702597, acc.: 48.17%] [G loss: 0.683353 0.683353]\n",
      "48 iter 485 [D loss: 0.702586, acc.: 49.56%] [G loss: 0.678625 0.678625]\n",
      "48 iter 486 [D loss: 0.702566, acc.: 48.22%] [G loss: 0.682636 0.682636]\n",
      "48 iter 487 [D loss: 0.702555, acc.: 49.56%] [G loss: 0.607685 0.607685]\n",
      "48 iter 488 [D loss: 0.702541, acc.: 47.83%] [G loss: 0.683243 0.683243]\n",
      "48 iter 489 [D loss: 0.702526, acc.: 46.44%] [G loss: 0.678804 0.678804]\n",
      "49 iter 490 [D loss: 0.702515, acc.: 49.44%] [G loss: 0.681937 0.681937]\n",
      "49 iter 491 [D loss: 0.702500, acc.: 49.33%] [G loss: 0.682627 0.682627]\n",
      "49 iter 492 [D loss: 0.702483, acc.: 49.44%] [G loss: 0.686938 0.686938]\n",
      "49 iter 493 [D loss: 0.702467, acc.: 49.06%] [G loss: 0.681155 0.681155]\n",
      "49 iter 494 [D loss: 0.702455, acc.: 48.28%] [G loss: 0.682024 0.682024]\n",
      "49 iter 495 [D loss: 0.702444, acc.: 49.89%] [G loss: 0.681387 0.681387]\n",
      "49 iter 496 [D loss: 0.702423, acc.: 47.89%] [G loss: 0.682256 0.682256]\n",
      "49 iter 497 [D loss: 0.702411, acc.: 49.89%] [G loss: 0.607665 0.607665]\n",
      "49 iter 498 [D loss: 0.702397, acc.: 48.39%] [G loss: 0.679535 0.679535]\n",
      "49 iter 499 [D loss: 0.702383, acc.: 46.89%] [G loss: 0.676156 0.676156]\n",
      "50 iter 500 [D loss: 0.702373, acc.: 48.83%] [G loss: 0.679438 0.679438]\n",
      "50 iter 501 [D loss: 0.702361, acc.: 47.33%] [G loss: 0.683445 0.683445]\n",
      "50 iter 502 [D loss: 0.702339, acc.: 49.72%] [G loss: 0.689488 0.689488]\n",
      "50 iter 503 [D loss: 0.702322, acc.: 48.67%] [G loss: 0.681610 0.681610]\n",
      "50 iter 504 [D loss: 0.702310, acc.: 48.39%] [G loss: 0.682618 0.682618]\n",
      "50 iter 505 [D loss: 0.702298, acc.: 50.06%] [G loss: 0.681627 0.681627]\n",
      "50 iter 506 [D loss: 0.702279, acc.: 47.61%] [G loss: 0.681530 0.681530]\n",
      "50 iter 507 [D loss: 0.702267, acc.: 50.06%] [G loss: 0.610833 0.610833]\n",
      "50 iter 508 [D loss: 0.702253, acc.: 47.28%] [G loss: 0.680263 0.680263]\n",
      "50 iter 509 [D loss: 0.702237, acc.: 46.22%] [G loss: 0.675813 0.675813]\n",
      "Saving model at /srv/osirim/idumeur/trainings/new_model/training_9/checkpoints/ step 50\n",
      "51 iter 510 [D loss: 0.702227, acc.: 49.06%] [G loss: 0.680541 0.680541]\n",
      "51 iter 511 [D loss: 0.702213, acc.: 48.22%] [G loss: 0.681959 0.681959]\n",
      "51 iter 512 [D loss: 0.702193, acc.: 49.72%] [G loss: 0.688631 0.688631]\n",
      "51 iter 513 [D loss: 0.702179, acc.: 48.56%] [G loss: 0.682029 0.682029]\n",
      "51 iter 514 [D loss: 0.702166, acc.: 47.89%] [G loss: 0.683240 0.683240]\n",
      "51 iter 515 [D loss: 0.702153, acc.: 49.28%] [G loss: 0.678971 0.678971]\n",
      "51 iter 516 [D loss: 0.702133, acc.: 48.39%] [G loss: 0.684601 0.684601]\n",
      "51 iter 517 [D loss: 0.702121, acc.: 49.44%] [G loss: 0.608481 0.608481]\n",
      "51 iter 518 [D loss: 0.702106, acc.: 46.56%] [G loss: 0.681579 0.681579]\n",
      "51 iter 519 [D loss: 0.702090, acc.: 46.94%] [G loss: 0.676213 0.676213]\n",
      "52 iter 520 [D loss: 0.702082, acc.: 48.11%] [G loss: 0.680374 0.680374]\n",
      "52 iter 521 [D loss: 0.702066, acc.: 47.17%] [G loss: 0.684996 0.684996]\n",
      "52 iter 522 [D loss: 0.702046, acc.: 49.67%] [G loss: 0.689599 0.689599]\n",
      "52 iter 523 [D loss: 0.702029, acc.: 48.89%] [G loss: 0.682256 0.682256]\n",
      "52 iter 524 [D loss: 0.702017, acc.: 48.06%] [G loss: 0.684469 0.684469]\n",
      "52 iter 525 [D loss: 0.702005, acc.: 49.33%] [G loss: 0.679418 0.679418]\n",
      "52 iter 526 [D loss: 0.701984, acc.: 48.06%] [G loss: 0.683849 0.683849]\n",
      "52 iter 527 [D loss: 0.701972, acc.: 48.72%] [G loss: 0.608158 0.608158]\n",
      "52 iter 528 [D loss: 0.701958, acc.: 47.61%] [G loss: 0.682151 0.682151]\n",
      "52 iter 529 [D loss: 0.701945, acc.: 44.89%] [G loss: 0.680133 0.680133]\n",
      "53 iter 530 [D loss: 0.701931, acc.: 48.78%] [G loss: 0.679460 0.679460]\n",
      "53 iter 531 [D loss: 0.701918, acc.: 49.22%] [G loss: 0.683398 0.683398]\n",
      "53 iter 532 [D loss: 0.701897, acc.: 49.72%] [G loss: 0.688982 0.688982]\n",
      "53 iter 533 [D loss: 0.701880, acc.: 48.39%] [G loss: 0.683162 0.683162]\n",
      "53 iter 534 [D loss: 0.701867, acc.: 46.44%] [G loss: 0.684397 0.684397]\n",
      "53 iter 535 [D loss: 0.701854, acc.: 49.83%] [G loss: 0.681783 0.681783]\n",
      "53 iter 536 [D loss: 0.701835, acc.: 47.56%] [G loss: 0.683469 0.683469]\n",
      "53 iter 537 [D loss: 0.701822, acc.: 50.50%] [G loss: 0.615802 0.615802]\n",
      "53 iter 538 [D loss: 0.701807, acc.: 47.94%] [G loss: 0.681436 0.681436]\n",
      "53 iter 539 [D loss: 0.701792, acc.: 46.78%] [G loss: 0.677801 0.677801]\n",
      "54 iter 540 [D loss: 0.701782, acc.: 49.72%] [G loss: 0.679890 0.679890]\n",
      "54 iter 541 [D loss: 0.701765, acc.: 47.83%] [G loss: 0.685525 0.685525]\n",
      "54 iter 542 [D loss: 0.701746, acc.: 49.67%] [G loss: 0.688823 0.688823]\n",
      "54 iter 543 [D loss: 0.701730, acc.: 48.11%] [G loss: 0.683679 0.683679]\n",
      "54 iter 544 [D loss: 0.701716, acc.: 47.50%] [G loss: 0.683576 0.683576]\n",
      "54 iter 545 [D loss: 0.701703, acc.: 49.83%] [G loss: 0.683840 0.683840]\n",
      "54 iter 546 [D loss: 0.701684, acc.: 47.89%] [G loss: 0.684559 0.684559]\n",
      "54 iter 547 [D loss: 0.701670, acc.: 49.67%] [G loss: 0.618491 0.618491]\n",
      "54 iter 548 [D loss: 0.701656, acc.: 48.44%] [G loss: 0.681640 0.681640]\n",
      "54 iter 549 [D loss: 0.701640, acc.: 47.11%] [G loss: 0.681287 0.681287]\n",
      "55 iter 550 [D loss: 0.701628, acc.: 49.17%] [G loss: 0.680189 0.680189]\n",
      "55 iter 551 [D loss: 0.701613, acc.: 49.28%] [G loss: 0.683244 0.683244]\n",
      "55 iter 552 [D loss: 0.701595, acc.: 48.78%] [G loss: 0.687529 0.687529]\n",
      "55 iter 553 [D loss: 0.701578, acc.: 48.56%] [G loss: 0.682112 0.682112]\n",
      "55 iter 554 [D loss: 0.701565, acc.: 47.78%] [G loss: 0.683000 0.683000]\n",
      "55 iter 555 [D loss: 0.701551, acc.: 49.28%] [G loss: 0.682640 0.682640]\n",
      "55 iter 556 [D loss: 0.701531, acc.: 48.06%] [G loss: 0.682485 0.682485]\n",
      "55 iter 557 [D loss: 0.701518, acc.: 49.06%] [G loss: 0.616290 0.616290]\n",
      "55 iter 558 [D loss: 0.701502, acc.: 48.11%] [G loss: 0.680867 0.680867]\n",
      "55 iter 559 [D loss: 0.701486, acc.: 46.39%] [G loss: 0.678744 0.678744]\n",
      "56 iter 560 [D loss: 0.701475, acc.: 48.94%] [G loss: 0.679137 0.679137]\n",
      "56 iter 561 [D loss: 0.701459, acc.: 48.83%] [G loss: 0.682255 0.682255]\n",
      "56 iter 562 [D loss: 0.701440, acc.: 48.67%] [G loss: 0.687258 0.687258]\n",
      "56 iter 563 [D loss: 0.701423, acc.: 48.28%] [G loss: 0.681740 0.681740]\n",
      "56 iter 564 [D loss: 0.701409, acc.: 47.33%] [G loss: 0.683325 0.683325]\n",
      "56 iter 565 [D loss: 0.701395, acc.: 49.50%] [G loss: 0.682287 0.682287]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "56 iter 566 [D loss: 0.701377, acc.: 48.11%] [G loss: 0.683011 0.683011]\n",
      "56 iter 567 [D loss: 0.701362, acc.: 49.28%] [G loss: 0.612144 0.612144]\n",
      "56 iter 568 [D loss: 0.701347, acc.: 48.17%] [G loss: 0.679979 0.679979]\n",
      "56 iter 569 [D loss: 0.701331, acc.: 46.78%] [G loss: 0.678689 0.678689]\n",
      "57 iter 570 [D loss: 0.701318, acc.: 49.39%] [G loss: 0.678371 0.678371]\n",
      "57 iter 571 [D loss: 0.701302, acc.: 48.67%] [G loss: 0.680833 0.680833]\n",
      "57 iter 572 [D loss: 0.701284, acc.: 49.56%] [G loss: 0.687623 0.687623]\n",
      "57 iter 573 [D loss: 0.701266, acc.: 48.78%] [G loss: 0.681653 0.681653]\n",
      "57 iter 574 [D loss: 0.701252, acc.: 47.39%] [G loss: 0.683307 0.683307]\n",
      "57 iter 575 [D loss: 0.701238, acc.: 49.50%] [G loss: 0.679248 0.679248]\n",
      "57 iter 576 [D loss: 0.701219, acc.: 47.67%] [G loss: 0.682371 0.682371]\n",
      "57 iter 577 [D loss: 0.701206, acc.: 49.94%] [G loss: 0.615368 0.615368]\n",
      "57 iter 578 [D loss: 0.701190, acc.: 48.44%] [G loss: 0.678516 0.678516]\n",
      "57 iter 579 [D loss: 0.701174, acc.: 46.44%] [G loss: 0.679019 0.679019]\n",
      "58 iter 580 [D loss: 0.701161, acc.: 48.89%] [G loss: 0.677195 0.677195]\n",
      "58 iter 581 [D loss: 0.701145, acc.: 48.67%] [G loss: 0.681077 0.681077]\n",
      "58 iter 582 [D loss: 0.701126, acc.: 49.61%] [G loss: 0.688237 0.688237]\n",
      "58 iter 583 [D loss: 0.701109, acc.: 48.44%] [G loss: 0.681904 0.681904]\n",
      "58 iter 584 [D loss: 0.701094, acc.: 47.33%] [G loss: 0.683528 0.683528]\n",
      "58 iter 585 [D loss: 0.701081, acc.: 49.33%] [G loss: 0.680223 0.680223]\n",
      "58 iter 586 [D loss: 0.701061, acc.: 46.94%] [G loss: 0.682604 0.682604]\n",
      "58 iter 587 [D loss: 0.701047, acc.: 50.00%] [G loss: 0.613315 0.613315]\n",
      "58 iter 588 [D loss: 0.701031, acc.: 47.89%] [G loss: 0.678840 0.678840]\n",
      "58 iter 589 [D loss: 0.701014, acc.: 47.00%] [G loss: 0.679355 0.679355]\n",
      "59 iter 590 [D loss: 0.701001, acc.: 49.56%] [G loss: 0.676337 0.676337]\n",
      "59 iter 591 [D loss: 0.700986, acc.: 49.33%] [G loss: 0.682159 0.682159]\n",
      "59 iter 592 [D loss: 0.700970, acc.: 48.39%] [G loss: 0.686792 0.686792]\n",
      "59 iter 593 [D loss: 0.700950, acc.: 46.89%] [G loss: 0.681571 0.681571]\n",
      "59 iter 594 [D loss: 0.700935, acc.: 48.22%] [G loss: 0.683242 0.683242]\n",
      "59 iter 595 [D loss: 0.700921, acc.: 49.56%] [G loss: 0.681200 0.681200]\n",
      "59 iter 596 [D loss: 0.700901, acc.: 47.72%] [G loss: 0.682353 0.682353]\n",
      "59 iter 597 [D loss: 0.700887, acc.: 50.11%] [G loss: 0.618714 0.618714]\n",
      "59 iter 598 [D loss: 0.700871, acc.: 48.22%] [G loss: 0.677963 0.677963]\n",
      "59 iter 599 [D loss: 0.700854, acc.: 47.11%] [G loss: 0.678959 0.678959]\n",
      "60 iter 600 [D loss: 0.700840, acc.: 49.50%] [G loss: 0.678680 0.678680]\n",
      "60 iter 601 [D loss: 0.700825, acc.: 48.33%] [G loss: 0.680551 0.680551]\n",
      "60 iter 602 [D loss: 0.700806, acc.: 50.06%] [G loss: 0.687904 0.687904]\n",
      "60 iter 603 [D loss: 0.700788, acc.: 47.94%] [G loss: 0.681950 0.681950]\n",
      "60 iter 604 [D loss: 0.700773, acc.: 47.83%] [G loss: 0.683343 0.683343]\n",
      "60 iter 605 [D loss: 0.700759, acc.: 49.61%] [G loss: 0.681284 0.681284]\n",
      "60 iter 606 [D loss: 0.700740, acc.: 46.72%] [G loss: 0.683053 0.683053]\n",
      "60 iter 607 [D loss: 0.700725, acc.: 50.22%] [G loss: 0.616036 0.616036]\n",
      "60 iter 608 [D loss: 0.700709, acc.: 48.22%] [G loss: 0.680866 0.680866]\n",
      "60 iter 609 [D loss: 0.700692, acc.: 47.28%] [G loss: 0.679818 0.679818]\n",
      "61 iter 610 [D loss: 0.700678, acc.: 49.78%] [G loss: 0.677813 0.677813]\n",
      "61 iter 611 [D loss: 0.700662, acc.: 49.94%] [G loss: 0.682999 0.682999]\n",
      "61 iter 612 [D loss: 0.700648, acc.: 47.61%] [G loss: 0.687345 0.687345]\n",
      "61 iter 613 [D loss: 0.700626, acc.: 47.44%] [G loss: 0.681598 0.681598]\n",
      "61 iter 614 [D loss: 0.700611, acc.: 48.22%] [G loss: 0.683108 0.683108]\n",
      "61 iter 615 [D loss: 0.700596, acc.: 49.61%] [G loss: 0.680929 0.680929]\n",
      "61 iter 616 [D loss: 0.700576, acc.: 46.61%] [G loss: 0.682621 0.682621]\n",
      "61 iter 617 [D loss: 0.700562, acc.: 50.39%] [G loss: 0.617828 0.617828]\n",
      "61 iter 618 [D loss: 0.700545, acc.: 47.78%] [G loss: 0.678894 0.678894]\n",
      "61 iter 619 [D loss: 0.700528, acc.: 46.78%] [G loss: 0.679363 0.679363]\n",
      "62 iter 620 [D loss: 0.700514, acc.: 49.94%] [G loss: 0.678072 0.678072]\n",
      "62 iter 621 [D loss: 0.700498, acc.: 48.89%] [G loss: 0.680461 0.680461]\n",
      "62 iter 622 [D loss: 0.700479, acc.: 50.22%] [G loss: 0.687717 0.687717]\n",
      "62 iter 623 [D loss: 0.700461, acc.: 47.50%] [G loss: 0.681692 0.681692]\n",
      "62 iter 624 [D loss: 0.700446, acc.: 47.83%] [G loss: 0.682910 0.682910]\n",
      "62 iter 625 [D loss: 0.700431, acc.: 49.22%] [G loss: 0.681780 0.681780]\n",
      "62 iter 626 [D loss: 0.700411, acc.: 46.00%] [G loss: 0.683583 0.683583]\n",
      "62 iter 627 [D loss: 0.700396, acc.: 50.11%] [G loss: 0.617386 0.617386]\n",
      "62 iter 628 [D loss: 0.700380, acc.: 47.67%] [G loss: 0.680075 0.680075]\n",
      "62 iter 629 [D loss: 0.700363, acc.: 47.28%] [G loss: 0.679339 0.679339]\n",
      "63 iter 630 [D loss: 0.700348, acc.: 49.94%] [G loss: 0.676009 0.676009]\n",
      "63 iter 631 [D loss: 0.700332, acc.: 48.56%] [G loss: 0.680924 0.680924]\n",
      "63 iter 632 [D loss: 0.700313, acc.: 50.11%] [G loss: 0.688064 0.688064]\n",
      "63 iter 633 [D loss: 0.700295, acc.: 47.44%] [G loss: 0.681946 0.681946]\n",
      "63 iter 634 [D loss: 0.700279, acc.: 47.61%] [G loss: 0.683575 0.683575]\n",
      "63 iter 635 [D loss: 0.700264, acc.: 49.28%] [G loss: 0.681321 0.681321]\n",
      "63 iter 636 [D loss: 0.700245, acc.: 46.06%] [G loss: 0.683009 0.683009]\n",
      "63 iter 637 [D loss: 0.700230, acc.: 49.06%] [G loss: 0.619461 0.619461]\n",
      "63 iter 638 [D loss: 0.700213, acc.: 48.67%] [G loss: 0.679197 0.679197]\n",
      "63 iter 639 [D loss: 0.700195, acc.: 46.72%] [G loss: 0.680103 0.680103]\n",
      "64 iter 640 [D loss: 0.700181, acc.: 49.44%] [G loss: 0.679297 0.679297]\n",
      "64 iter 641 [D loss: 0.700164, acc.: 48.28%] [G loss: 0.683285 0.683285]\n",
      "64 iter 642 [D loss: 0.700145, acc.: 50.22%] [G loss: 0.687905 0.687905]\n",
      "64 iter 643 [D loss: 0.700127, acc.: 47.61%] [G loss: 0.681909 0.681909]\n",
      "64 iter 644 [D loss: 0.700112, acc.: 47.78%] [G loss: 0.682817 0.682817]\n",
      "64 iter 645 [D loss: 0.700097, acc.: 49.22%] [G loss: 0.683427 0.683427]\n",
      "64 iter 646 [D loss: 0.700077, acc.: 46.44%] [G loss: 0.682955 0.682955]\n",
      "64 iter 647 [D loss: 0.700061, acc.: 49.44%] [G loss: 0.628483 0.628483]\n",
      "64 iter 648 [D loss: 0.700044, acc.: 48.56%] [G loss: 0.678272 0.678272]\n",
      "64 iter 649 [D loss: 0.700027, acc.: 46.61%] [G loss: 0.680273 0.680273]\n",
      "65 iter 650 [D loss: 0.700012, acc.: 49.06%] [G loss: 0.676339 0.676339]\n",
      "65 iter 651 [D loss: 0.699995, acc.: 48.78%] [G loss: 0.680818 0.680818]\n",
      "65 iter 652 [D loss: 0.699976, acc.: 50.61%] [G loss: 0.688039 0.688039]\n",
      "65 iter 653 [D loss: 0.699958, acc.: 45.89%] [G loss: 0.682435 0.682435]\n",
      "65 iter 654 [D loss: 0.699942, acc.: 47.56%] [G loss: 0.682704 0.682704]\n",
      "65 iter 655 [D loss: 0.699926, acc.: 49.00%] [G loss: 0.680414 0.680414]\n",
      "65 iter 656 [D loss: 0.699907, acc.: 46.28%] [G loss: 0.683043 0.683043]\n",
      "65 iter 657 [D loss: 0.699891, acc.: 49.50%] [G loss: 0.622195 0.622195]\n",
      "65 iter 658 [D loss: 0.699874, acc.: 49.44%] [G loss: 0.679140 0.679140]\n",
      "65 iter 659 [D loss: 0.699856, acc.: 48.00%] [G loss: 0.679910 0.679910]\n",
      "66 iter 660 [D loss: 0.699841, acc.: 49.28%] [G loss: 0.676791 0.676791]\n",
      "66 iter 661 [D loss: 0.699824, acc.: 48.33%] [G loss: 0.681342 0.681342]\n",
      "66 iter 662 [D loss: 0.699805, acc.: 50.39%] [G loss: 0.688378 0.688378]\n",
      "66 iter 663 [D loss: 0.699786, acc.: 46.83%] [G loss: 0.681669 0.681669]\n",
      "66 iter 664 [D loss: 0.699770, acc.: 48.83%] [G loss: 0.682574 0.682574]\n",
      "66 iter 665 [D loss: 0.699754, acc.: 49.17%] [G loss: 0.682117 0.682117]\n",
      "66 iter 666 [D loss: 0.699735, acc.: 46.94%] [G loss: 0.682898 0.682898]\n",
      "66 iter 667 [D loss: 0.699719, acc.: 49.50%] [G loss: 0.622427 0.622427]\n",
      "66 iter 668 [D loss: 0.699702, acc.: 49.11%] [G loss: 0.679020 0.679020]\n",
      "66 iter 669 [D loss: 0.699684, acc.: 47.28%] [G loss: 0.680778 0.680778]\n",
      "67 iter 670 [D loss: 0.699668, acc.: 49.50%] [G loss: 0.676348 0.676348]\n",
      "67 iter 671 [D loss: 0.699651, acc.: 48.06%] [G loss: 0.681583 0.681583]\n",
      "67 iter 672 [D loss: 0.699632, acc.: 51.06%] [G loss: 0.688185 0.688185]\n",
      "67 iter 673 [D loss: 0.699614, acc.: 45.78%] [G loss: 0.682094 0.682094]\n",
      "67 iter 674 [D loss: 0.699597, acc.: 48.44%] [G loss: 0.682962 0.682962]\n",
      "67 iter 675 [D loss: 0.699581, acc.: 48.61%] [G loss: 0.680697 0.680697]\n",
      "67 iter 676 [D loss: 0.699562, acc.: 46.89%] [G loss: 0.683344 0.683344]\n",
      "67 iter 677 [D loss: 0.699545, acc.: 49.00%] [G loss: 0.622759 0.622759]\n",
      "67 iter 678 [D loss: 0.699528, acc.: 48.33%] [G loss: 0.682157 0.682157]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "67 iter 679 [D loss: 0.699510, acc.: 48.72%] [G loss: 0.681028 0.681028]\n",
      "68 iter 680 [D loss: 0.699494, acc.: 49.56%] [G loss: 0.676533 0.676533]\n",
      "68 iter 681 [D loss: 0.699477, acc.: 48.28%] [G loss: 0.680970 0.680970]\n",
      "68 iter 682 [D loss: 0.699459, acc.: 51.00%] [G loss: 0.688171 0.688171]\n",
      "68 iter 683 [D loss: 0.699440, acc.: 46.61%] [G loss: 0.682436 0.682436]\n",
      "68 iter 684 [D loss: 0.699423, acc.: 48.56%] [G loss: 0.682950 0.682950]\n",
      "68 iter 685 [D loss: 0.699406, acc.: 48.22%] [G loss: 0.679858 0.679858]\n",
      "68 iter 686 [D loss: 0.699387, acc.: 46.33%] [G loss: 0.683017 0.683017]\n",
      "68 iter 687 [D loss: 0.699370, acc.: 49.28%] [G loss: 0.624839 0.624839]\n",
      "68 iter 688 [D loss: 0.699353, acc.: 49.44%] [G loss: 0.679550 0.679550]\n",
      "68 iter 689 [D loss: 0.699335, acc.: 48.44%] [G loss: 0.681160 0.681160]\n",
      "69 iter 690 [D loss: 0.699319, acc.: 49.00%] [G loss: 0.676566 0.676566]\n",
      "69 iter 691 [D loss: 0.699301, acc.: 48.44%] [G loss: 0.681007 0.681007]\n",
      "69 iter 692 [D loss: 0.699283, acc.: 50.78%] [G loss: 0.688194 0.688194]\n",
      "69 iter 693 [D loss: 0.699264, acc.: 44.61%] [G loss: 0.682166 0.682166]\n",
      "69 iter 694 [D loss: 0.699247, acc.: 49.44%] [G loss: 0.682882 0.682882]\n",
      "69 iter 695 [D loss: 0.699230, acc.: 48.89%] [G loss: 0.680328 0.680328]\n",
      "69 iter 696 [D loss: 0.699211, acc.: 47.17%] [G loss: 0.683016 0.683016]\n",
      "69 iter 697 [D loss: 0.699194, acc.: 48.78%] [G loss: 0.622838 0.622838]\n",
      "69 iter 698 [D loss: 0.699176, acc.: 50.17%] [G loss: 0.679155 0.679155]\n",
      "69 iter 699 [D loss: 0.699158, acc.: 49.56%] [G loss: 0.680586 0.680586]\n",
      "70 iter 700 [D loss: 0.699142, acc.: 50.17%] [G loss: 0.677232 0.677232]\n",
      "70 iter 701 [D loss: 0.699124, acc.: 46.67%] [G loss: 0.681384 0.681384]\n",
      "70 iter 702 [D loss: 0.699105, acc.: 49.72%] [G loss: 0.688303 0.688303]\n",
      "70 iter 703 [D loss: 0.699086, acc.: 45.72%] [G loss: 0.682574 0.682574]\n",
      "70 iter 704 [D loss: 0.699069, acc.: 48.56%] [G loss: 0.683372 0.683372]\n",
      "70 iter 705 [D loss: 0.699052, acc.: 49.11%] [G loss: 0.681791 0.681791]\n",
      "70 iter 706 [D loss: 0.699033, acc.: 48.11%] [G loss: 0.683484 0.683484]\n",
      "70 iter 707 [D loss: 0.699015, acc.: 49.44%] [G loss: 0.625336 0.625336]\n",
      "70 iter 708 [D loss: 0.698998, acc.: 50.44%] [G loss: 0.680981 0.680981]\n",
      "70 iter 709 [D loss: 0.698979, acc.: 49.39%] [G loss: 0.680788 0.680788]\n",
      "71 iter 710 [D loss: 0.698963, acc.: 48.94%] [G loss: 0.675550 0.675550]\n",
      "71 iter 711 [D loss: 0.698945, acc.: 46.94%] [G loss: 0.682020 0.682020]\n",
      "71 iter 712 [D loss: 0.698926, acc.: 50.28%] [G loss: 0.688594 0.688594]\n",
      "71 iter 713 [D loss: 0.698907, acc.: 45.56%] [G loss: 0.682888 0.682888]\n",
      "71 iter 714 [D loss: 0.698889, acc.: 48.94%] [G loss: 0.683436 0.683436]\n",
      "71 iter 715 [D loss: 0.698872, acc.: 50.06%] [G loss: 0.680220 0.680220]\n",
      "71 iter 716 [D loss: 0.698853, acc.: 50.17%] [G loss: 0.683017 0.683017]\n",
      "71 iter 717 [D loss: 0.698836, acc.: 49.67%] [G loss: 0.626335 0.626335]\n",
      "71 iter 718 [D loss: 0.698818, acc.: 51.39%] [G loss: 0.680965 0.680965]\n",
      "71 iter 719 [D loss: 0.698799, acc.: 50.56%] [G loss: 0.680995 0.680995]\n",
      "72 iter 720 [D loss: 0.698782, acc.: 49.67%] [G loss: 0.678480 0.678480]\n",
      "72 iter 721 [D loss: 0.698764, acc.: 46.78%] [G loss: 0.681734 0.681734]\n",
      "72 iter 722 [D loss: 0.698745, acc.: 50.61%] [G loss: 0.688368 0.688368]\n",
      "72 iter 723 [D loss: 0.698726, acc.: 45.61%] [G loss: 0.682812 0.682812]\n",
      "72 iter 724 [D loss: 0.698708, acc.: 49.50%] [G loss: 0.683093 0.683093]\n",
      "72 iter 725 [D loss: 0.698691, acc.: 48.44%] [G loss: 0.681321 0.681321]\n",
      "72 iter 726 [D loss: 0.698672, acc.: 50.28%] [G loss: 0.683062 0.683062]\n",
      "72 iter 727 [D loss: 0.698654, acc.: 49.00%] [G loss: 0.626528 0.626528]\n",
      "72 iter 728 [D loss: 0.698636, acc.: 50.94%] [G loss: 0.680402 0.680402]\n",
      "72 iter 729 [D loss: 0.698617, acc.: 50.67%] [G loss: 0.680922 0.680922]\n",
      "73 iter 730 [D loss: 0.698600, acc.: 49.22%] [G loss: 0.675689 0.675689]\n",
      "73 iter 731 [D loss: 0.698582, acc.: 47.00%] [G loss: 0.681342 0.681342]\n",
      "73 iter 732 [D loss: 0.698563, acc.: 50.22%] [G loss: 0.688638 0.688638]\n",
      "73 iter 733 [D loss: 0.698544, acc.: 46.33%] [G loss: 0.682886 0.682886]\n",
      "73 iter 734 [D loss: 0.698526, acc.: 49.33%] [G loss: 0.683435 0.683435]\n",
      "73 iter 735 [D loss: 0.698508, acc.: 49.28%] [G loss: 0.680474 0.680474]\n",
      "73 iter 736 [D loss: 0.698489, acc.: 51.00%] [G loss: 0.683055 0.683055]\n",
      "73 iter 737 [D loss: 0.698471, acc.: 49.44%] [G loss: 0.627291 0.627291]\n",
      "73 iter 738 [D loss: 0.698453, acc.: 49.78%] [G loss: 0.679951 0.679951]\n",
      "73 iter 739 [D loss: 0.698434, acc.: 50.56%] [G loss: 0.681152 0.681152]\n",
      "74 iter 740 [D loss: 0.698416, acc.: 49.17%] [G loss: 0.678369 0.678369]\n",
      "74 iter 741 [D loss: 0.698398, acc.: 46.33%] [G loss: 0.681831 0.681831]\n",
      "74 iter 742 [D loss: 0.698379, acc.: 50.06%] [G loss: 0.688857 0.688857]\n",
      "74 iter 743 [D loss: 0.698359, acc.: 47.22%] [G loss: 0.683255 0.683255]\n",
      "74 iter 744 [D loss: 0.698341, acc.: 48.83%] [G loss: 0.683576 0.683576]\n",
      "74 iter 745 [D loss: 0.698324, acc.: 49.22%] [G loss: 0.682025 0.682025]\n",
      "74 iter 746 [D loss: 0.698304, acc.: 50.89%] [G loss: 0.683092 0.683092]\n",
      "74 iter 747 [D loss: 0.698286, acc.: 48.83%] [G loss: 0.629577 0.629577]\n",
      "74 iter 748 [D loss: 0.698268, acc.: 49.33%] [G loss: 0.679957 0.679957]\n",
      "74 iter 749 [D loss: 0.698249, acc.: 49.83%] [G loss: 0.681204 0.681204]\n",
      "75 iter 750 [D loss: 0.698232, acc.: 49.11%] [G loss: 0.676712 0.676712]\n",
      "75 iter 751 [D loss: 0.698213, acc.: 47.56%] [G loss: 0.682204 0.682204]\n",
      "75 iter 752 [D loss: 0.698193, acc.: 49.94%] [G loss: 0.688904 0.688904]\n",
      "75 iter 753 [D loss: 0.698174, acc.: 48.72%] [G loss: 0.683448 0.683448]\n",
      "75 iter 754 [D loss: 0.698156, acc.: 49.22%] [G loss: 0.683662 0.683662]\n",
      "75 iter 755 [D loss: 0.698138, acc.: 49.33%] [G loss: 0.681672 0.681672]\n",
      "75 iter 756 [D loss: 0.698118, acc.: 50.78%] [G loss: 0.682959 0.682959]\n",
      "75 iter 757 [D loss: 0.698100, acc.: 49.11%] [G loss: 0.632507 0.632507]\n",
      "75 iter 758 [D loss: 0.698081, acc.: 49.89%] [G loss: 0.680291 0.680291]\n",
      "75 iter 759 [D loss: 0.698062, acc.: 51.11%] [G loss: 0.681458 0.681458]\n",
      "76 iter 760 [D loss: 0.698045, acc.: 49.11%] [G loss: 0.676197 0.676197]\n",
      "76 iter 761 [D loss: 0.698026, acc.: 46.89%] [G loss: 0.681122 0.681122]\n",
      "76 iter 762 [D loss: 0.698007, acc.: 50.67%] [G loss: 0.689025 0.689025]\n",
      "76 iter 763 [D loss: 0.697987, acc.: 49.06%] [G loss: 0.683457 0.683457]\n",
      "76 iter 764 [D loss: 0.697968, acc.: 49.67%] [G loss: 0.683710 0.683710]\n",
      "76 iter 765 [D loss: 0.697950, acc.: 49.39%] [G loss: 0.681741 0.681741]\n",
      "76 iter 766 [D loss: 0.697931, acc.: 49.94%] [G loss: 0.682991 0.682991]\n",
      "76 iter 767 [D loss: 0.697912, acc.: 48.56%] [G loss: 0.631810 0.631810]\n",
      "76 iter 768 [D loss: 0.697894, acc.: 49.33%] [G loss: 0.680323 0.680323]\n",
      "76 iter 769 [D loss: 0.697874, acc.: 51.83%] [G loss: 0.681404 0.681404]\n",
      "77 iter 770 [D loss: 0.697856, acc.: 50.11%] [G loss: 0.676795 0.676795]\n",
      "77 iter 771 [D loss: 0.697837, acc.: 46.94%] [G loss: 0.681721 0.681721]\n",
      "77 iter 772 [D loss: 0.697818, acc.: 51.39%] [G loss: 0.688878 0.688878]\n",
      "77 iter 773 [D loss: 0.697798, acc.: 49.94%] [G loss: 0.683552 0.683552]\n",
      "77 iter 774 [D loss: 0.697780, acc.: 50.50%] [G loss: 0.683739 0.683739]\n",
      "77 iter 775 [D loss: 0.697761, acc.: 50.06%] [G loss: 0.680905 0.680905]\n",
      "77 iter 776 [D loss: 0.697741, acc.: 50.06%] [G loss: 0.683021 0.683021]\n",
      "77 iter 777 [D loss: 0.697723, acc.: 48.67%] [G loss: 0.633636 0.633636]\n",
      "77 iter 778 [D loss: 0.697704, acc.: 49.72%] [G loss: 0.680143 0.680143]\n",
      "77 iter 779 [D loss: 0.697684, acc.: 51.39%] [G loss: 0.681723 0.681723]\n",
      "78 iter 780 [D loss: 0.697666, acc.: 50.28%] [G loss: 0.676735 0.676735]\n",
      "78 iter 781 [D loss: 0.697647, acc.: 46.72%] [G loss: 0.681930 0.681930]\n",
      "78 iter 782 [D loss: 0.697628, acc.: 50.06%] [G loss: 0.688993 0.688993]\n",
      "78 iter 783 [D loss: 0.697608, acc.: 50.28%] [G loss: 0.683760 0.683760]\n",
      "78 iter 784 [D loss: 0.697589, acc.: 50.17%] [G loss: 0.683973 0.683973]\n",
      "78 iter 785 [D loss: 0.697571, acc.: 49.56%] [G loss: 0.681156 0.681156]\n",
      "78 iter 786 [D loss: 0.697551, acc.: 50.11%] [G loss: 0.683145 0.683145]\n",
      "78 iter 787 [D loss: 0.697532, acc.: 48.78%] [G loss: 0.636509 0.636509]\n",
      "78 iter 788 [D loss: 0.697513, acc.: 49.28%] [G loss: 0.680719 0.680719]\n",
      "78 iter 789 [D loss: 0.697493, acc.: 50.94%] [G loss: 0.681781 0.681781]\n",
      "79 iter 790 [D loss: 0.697475, acc.: 49.83%] [G loss: 0.679626 0.679626]\n",
      "79 iter 791 [D loss: 0.697456, acc.: 47.61%] [G loss: 0.681667 0.681667]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "79 iter 792 [D loss: 0.697436, acc.: 50.28%] [G loss: 0.689068 0.689068]\n",
      "79 iter 793 [D loss: 0.697416, acc.: 50.39%] [G loss: 0.683908 0.683908]\n",
      "79 iter 794 [D loss: 0.697397, acc.: 49.83%] [G loss: 0.684222 0.684222]\n",
      "79 iter 795 [D loss: 0.697378, acc.: 48.94%] [G loss: 0.681715 0.681715]\n",
      "79 iter 796 [D loss: 0.697358, acc.: 50.39%] [G loss: 0.683242 0.683242]\n",
      "79 iter 797 [D loss: 0.697340, acc.: 48.50%] [G loss: 0.637761 0.637761]\n",
      "79 iter 798 [D loss: 0.697320, acc.: 48.72%] [G loss: 0.680520 0.680520]\n",
      "79 iter 799 [D loss: 0.697301, acc.: 51.44%] [G loss: 0.681893 0.681893]\n",
      "80 iter 800 [D loss: 0.697282, acc.: 49.50%] [G loss: 0.677383 0.677383]\n",
      "80 iter 801 [D loss: 0.697263, acc.: 49.39%] [G loss: 0.681838 0.681838]\n",
      "80 iter 802 [D loss: 0.697243, acc.: 50.83%] [G loss: 0.688934 0.688934]\n",
      "80 iter 803 [D loss: 0.697223, acc.: 50.56%] [G loss: 0.684118 0.684118]\n",
      "80 iter 804 [D loss: 0.697204, acc.: 51.17%] [G loss: 0.684419 0.684419]\n",
      "80 iter 805 [D loss: 0.697185, acc.: 49.78%] [G loss: 0.680940 0.680940]\n",
      "80 iter 806 [D loss: 0.697165, acc.: 50.50%] [G loss: 0.683354 0.683354]\n",
      "80 iter 807 [D loss: 0.697146, acc.: 48.39%] [G loss: 0.638104 0.638104]\n",
      "80 iter 808 [D loss: 0.697126, acc.: 49.56%] [G loss: 0.680753 0.680753]\n",
      "80 iter 809 [D loss: 0.697107, acc.: 50.44%] [G loss: 0.682128 0.682128]\n",
      "81 iter 810 [D loss: 0.697088, acc.: 49.72%] [G loss: 0.677518 0.677518]\n",
      "81 iter 811 [D loss: 0.697068, acc.: 49.33%] [G loss: 0.682173 0.682173]\n",
      "81 iter 812 [D loss: 0.697048, acc.: 50.72%] [G loss: 0.688923 0.688923]\n",
      "81 iter 813 [D loss: 0.697028, acc.: 50.28%] [G loss: 0.684324 0.684324]\n",
      "81 iter 814 [D loss: 0.697009, acc.: 50.22%] [G loss: 0.684600 0.684600]\n",
      "81 iter 815 [D loss: 0.696990, acc.: 50.00%] [G loss: 0.680779 0.680779]\n",
      "81 iter 816 [D loss: 0.696970, acc.: 50.89%] [G loss: 0.683704 0.683704]\n",
      "81 iter 817 [D loss: 0.696950, acc.: 48.44%] [G loss: 0.641317 0.641317]\n",
      "81 iter 818 [D loss: 0.696931, acc.: 49.67%] [G loss: 0.681583 0.681583]\n",
      "81 iter 819 [D loss: 0.696911, acc.: 49.67%] [G loss: 0.682127 0.682127]\n",
      "82 iter 820 [D loss: 0.696892, acc.: 49.17%] [G loss: 0.681027 0.681027]\n",
      "82 iter 821 [D loss: 0.696873, acc.: 51.22%] [G loss: 0.683980 0.683980]\n",
      "82 iter 822 [D loss: 0.696852, acc.: 51.11%] [G loss: 0.688924 0.688924]\n",
      "82 iter 823 [D loss: 0.696836, acc.: 53.50%] [G loss: 0.684151 0.684151]\n",
      "82 iter 824 [D loss: 0.696813, acc.: 51.50%] [G loss: 0.684372 0.684372]\n",
      "82 iter 825 [D loss: 0.696794, acc.: 50.83%] [G loss: 0.682142 0.682142]\n",
      "82 iter 826 [D loss: 0.696773, acc.: 50.67%] [G loss: 0.683692 0.683692]\n",
      "82 iter 827 [D loss: 0.696754, acc.: 48.22%] [G loss: 0.645498 0.645498]\n",
      "82 iter 828 [D loss: 0.696734, acc.: 50.00%] [G loss: 0.681763 0.681763]\n",
      "82 iter 829 [D loss: 0.696714, acc.: 50.94%] [G loss: 0.682668 0.682668]\n",
      "83 iter 830 [D loss: 0.696695, acc.: 49.89%] [G loss: 0.679312 0.679312]\n",
      "83 iter 831 [D loss: 0.696675, acc.: 49.83%] [G loss: 0.683396 0.683396]\n",
      "83 iter 832 [D loss: 0.696655, acc.: 50.22%] [G loss: 0.688364 0.688364]\n",
      "83 iter 833 [D loss: 0.696636, acc.: 50.11%] [G loss: 0.684158 0.684158]\n",
      "83 iter 834 [D loss: 0.696615, acc.: 50.33%] [G loss: 0.684276 0.684276]\n",
      "83 iter 835 [D loss: 0.696595, acc.: 49.89%] [G loss: 0.681592 0.681592]\n",
      "83 iter 836 [D loss: 0.696575, acc.: 50.44%] [G loss: 0.683353 0.683353]\n",
      "83 iter 837 [D loss: 0.696555, acc.: 47.78%] [G loss: 0.644645 0.644645]\n",
      "83 iter 838 [D loss: 0.696536, acc.: 49.67%] [G loss: 0.681474 0.681474]\n",
      "83 iter 839 [D loss: 0.696516, acc.: 49.89%] [G loss: 0.682047 0.682047]\n",
      "84 iter 840 [D loss: 0.696496, acc.: 49.56%] [G loss: 0.678630 0.678630]\n",
      "84 iter 841 [D loss: 0.696476, acc.: 49.33%] [G loss: 0.682737 0.682737]\n",
      "84 iter 842 [D loss: 0.696456, acc.: 50.11%] [G loss: 0.688598 0.688598]\n",
      "84 iter 843 [D loss: 0.696436, acc.: 49.89%] [G loss: 0.684153 0.684153]\n",
      "84 iter 844 [D loss: 0.696416, acc.: 49.72%] [G loss: 0.684500 0.684500]\n",
      "84 iter 845 [D loss: 0.696396, acc.: 49.78%] [G loss: 0.681573 0.681573]\n",
      "84 iter 846 [D loss: 0.696376, acc.: 50.06%] [G loss: 0.683790 0.683790]\n",
      "84 iter 847 [D loss: 0.696356, acc.: 48.78%] [G loss: 0.644739 0.644739]\n",
      "84 iter 848 [D loss: 0.696336, acc.: 49.83%] [G loss: 0.681537 0.681537]\n",
      "84 iter 849 [D loss: 0.696316, acc.: 49.89%] [G loss: 0.682042 0.682042]\n",
      "85 iter 850 [D loss: 0.696296, acc.: 49.78%] [G loss: 0.678803 0.678803]\n",
      "85 iter 851 [D loss: 0.696276, acc.: 50.11%] [G loss: 0.682450 0.682450]\n",
      "85 iter 852 [D loss: 0.696256, acc.: 49.89%] [G loss: 0.688677 0.688677]\n",
      "85 iter 853 [D loss: 0.696236, acc.: 49.72%] [G loss: 0.684450 0.684450]\n",
      "85 iter 854 [D loss: 0.696215, acc.: 49.83%] [G loss: 0.684776 0.684776]\n",
      "85 iter 855 [D loss: 0.696195, acc.: 49.78%] [G loss: 0.682194 0.682194]\n",
      "85 iter 856 [D loss: 0.696175, acc.: 50.22%] [G loss: 0.684001 0.684001]\n",
      "85 iter 857 [D loss: 0.696155, acc.: 48.56%] [G loss: 0.646176 0.646176]\n",
      "85 iter 858 [D loss: 0.696135, acc.: 49.94%] [G loss: 0.681560 0.681560]\n",
      "85 iter 859 [D loss: 0.696115, acc.: 50.33%] [G loss: 0.682359 0.682359]\n",
      "86 iter 860 [D loss: 0.696095, acc.: 49.94%] [G loss: 0.678904 0.678904]\n",
      "86 iter 861 [D loss: 0.696075, acc.: 50.44%] [G loss: 0.682927 0.682927]\n",
      "86 iter 862 [D loss: 0.696054, acc.: 49.94%] [G loss: 0.688719 0.688719]\n",
      "86 iter 863 [D loss: 0.696034, acc.: 50.00%] [G loss: 0.684650 0.684650]\n",
      "86 iter 864 [D loss: 0.696014, acc.: 50.56%] [G loss: 0.685109 0.685109]\n",
      "86 iter 865 [D loss: 0.695994, acc.: 50.28%] [G loss: 0.681787 0.681787]\n",
      "86 iter 866 [D loss: 0.695973, acc.: 50.17%] [G loss: 0.684377 0.684377]\n",
      "86 iter 867 [D loss: 0.695953, acc.: 49.39%] [G loss: 0.646918 0.646918]\n",
      "86 iter 868 [D loss: 0.695933, acc.: 50.22%] [G loss: 0.681886 0.681886]\n",
      "86 iter 869 [D loss: 0.695913, acc.: 49.94%] [G loss: 0.682568 0.682568]\n",
      "87 iter 870 [D loss: 0.695893, acc.: 49.61%] [G loss: 0.678983 0.678983]\n",
      "87 iter 871 [D loss: 0.695872, acc.: 49.72%] [G loss: 0.682859 0.682859]\n",
      "87 iter 872 [D loss: 0.695852, acc.: 50.06%] [G loss: 0.688642 0.688642]\n",
      "87 iter 873 [D loss: 0.695832, acc.: 49.83%] [G loss: 0.684808 0.684808]\n",
      "87 iter 874 [D loss: 0.695811, acc.: 50.28%] [G loss: 0.685243 0.685243]\n",
      "87 iter 875 [D loss: 0.695791, acc.: 49.94%] [G loss: 0.681517 0.681517]\n",
      "87 iter 876 [D loss: 0.695771, acc.: 49.89%] [G loss: 0.684544 0.684544]\n",
      "87 iter 877 [D loss: 0.695751, acc.: 49.72%] [G loss: 0.648410 0.648410]\n",
      "87 iter 878 [D loss: 0.695730, acc.: 49.83%] [G loss: 0.682392 0.682392]\n",
      "87 iter 879 [D loss: 0.695710, acc.: 50.17%] [G loss: 0.682817 0.682817]\n",
      "88 iter 880 [D loss: 0.695690, acc.: 49.61%] [G loss: 0.679113 0.679113]\n",
      "88 iter 881 [D loss: 0.695669, acc.: 50.00%] [G loss: 0.683221 0.683221]\n",
      "88 iter 882 [D loss: 0.695649, acc.: 50.00%] [G loss: 0.688594 0.688594]\n",
      "88 iter 883 [D loss: 0.695628, acc.: 49.61%] [G loss: 0.684973 0.684973]\n",
      "88 iter 884 [D loss: 0.695608, acc.: 50.33%] [G loss: 0.685479 0.685479]\n",
      "88 iter 885 [D loss: 0.695588, acc.: 50.00%] [G loss: 0.681876 0.681876]\n",
      "88 iter 886 [D loss: 0.695567, acc.: 49.78%] [G loss: 0.684805 0.684805]\n",
      "88 iter 887 [D loss: 0.695547, acc.: 49.83%] [G loss: 0.649592 0.649592]\n",
      "88 iter 888 [D loss: 0.695527, acc.: 49.89%] [G loss: 0.682406 0.682406]\n",
      "88 iter 889 [D loss: 0.695506, acc.: 49.83%] [G loss: 0.683085 0.683085]\n",
      "89 iter 890 [D loss: 0.695486, acc.: 49.67%] [G loss: 0.680054 0.680054]\n",
      "89 iter 891 [D loss: 0.695466, acc.: 50.22%] [G loss: 0.683423 0.683423]\n",
      "89 iter 892 [D loss: 0.695445, acc.: 49.89%] [G loss: 0.688473 0.688473]\n",
      "89 iter 893 [D loss: 0.695425, acc.: 49.94%] [G loss: 0.685061 0.685061]\n",
      "89 iter 894 [D loss: 0.695405, acc.: 50.17%] [G loss: 0.685593 0.685593]\n",
      "89 iter 895 [D loss: 0.695384, acc.: 50.00%] [G loss: 0.682555 0.682555]\n",
      "89 iter 896 [D loss: 0.695364, acc.: 50.06%] [G loss: 0.684994 0.684994]\n",
      "89 iter 897 [D loss: 0.695344, acc.: 50.00%] [G loss: 0.652321 0.652321]\n",
      "89 iter 898 [D loss: 0.695323, acc.: 49.89%] [G loss: 0.682703 0.682703]\n",
      "89 iter 899 [D loss: 0.695303, acc.: 49.89%] [G loss: 0.683450 0.683450]\n",
      "90 iter 900 [D loss: 0.695283, acc.: 49.56%] [G loss: 0.680645 0.680645]\n",
      "90 iter 901 [D loss: 0.695262, acc.: 50.11%] [G loss: 0.683574 0.683574]\n",
      "90 iter 902 [D loss: 0.695242, acc.: 49.94%] [G loss: 0.688407 0.688407]\n",
      "90 iter 903 [D loss: 0.695221, acc.: 50.00%] [G loss: 0.685242 0.685242]\n",
      "90 iter 904 [D loss: 0.695201, acc.: 50.22%] [G loss: 0.685848 0.685848]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "90 iter 905 [D loss: 0.695181, acc.: 49.94%] [G loss: 0.682110 0.682110]\n",
      "90 iter 906 [D loss: 0.695160, acc.: 50.00%] [G loss: 0.685221 0.685221]\n",
      "90 iter 907 [D loss: 0.695140, acc.: 49.83%] [G loss: 0.653998 0.653998]\n",
      "90 iter 908 [D loss: 0.695120, acc.: 50.17%] [G loss: 0.683034 0.683034]\n",
      "90 iter 909 [D loss: 0.695100, acc.: 50.17%] [G loss: 0.683919 0.683919]\n",
      "91 iter 910 [D loss: 0.695080, acc.: 50.06%] [G loss: 0.681684 0.681684]\n",
      "91 iter 911 [D loss: 0.695059, acc.: 50.33%] [G loss: 0.684048 0.684048]\n",
      "91 iter 912 [D loss: 0.695039, acc.: 49.94%] [G loss: 0.688383 0.688383]\n",
      "91 iter 913 [D loss: 0.695019, acc.: 50.06%] [G loss: 0.685471 0.685471]\n",
      "93 iter 931 [D loss: 0.694659, acc.: 50.11%] [G loss: 0.684333 0.684333]\n",
      "93 iter 932 [D loss: 0.694639, acc.: 50.00%] [G loss: 0.688291 0.688291]\n",
      "93 iter 933 [D loss: 0.694619, acc.: 50.11%] [G loss: 0.685661 0.685661]\n",
      "93 iter 934 [D loss: 0.694600, acc.: 50.00%] [G loss: 0.686191 0.686191]\n",
      "93 iter 935 [D loss: 0.694580, acc.: 49.94%] [G loss: 0.682986 0.682986]\n",
      "93 iter 936 [D loss: 0.694561, acc.: 50.00%] [G loss: 0.685833 0.685833]\n",
      "93 iter 937 [D loss: 0.694541, acc.: 50.00%] [G loss: 0.658844 0.658844]\n",
      "93 iter 938 [D loss: 0.694522, acc.: 50.06%] [G loss: 0.684103 0.684103]\n",
      "93 iter 939 [D loss: 0.694503, acc.: 50.00%] [G loss: 0.684680 0.684680]\n",
      "94 iter 940 [D loss: 0.694483, acc.: 50.00%] [G loss: 0.681770 0.681770]\n",
      "94 iter 941 [D loss: 0.694464, acc.: 50.11%] [G loss: 0.684806 0.684806]\n",
      "94 iter 942 [D loss: 0.694445, acc.: 50.00%] [G loss: 0.688255 0.688255]\n",
      "94 iter 943 [D loss: 0.694426, acc.: 50.06%] [G loss: 0.685868 0.685868]\n",
      "94 iter 944 [D loss: 0.694407, acc.: 50.00%] [G loss: 0.686418 0.686418]\n",
      "94 iter 945 [D loss: 0.694388, acc.: 50.00%] [G loss: 0.683372 0.683372]\n",
      "94 iter 946 [D loss: 0.694369, acc.: 50.00%] [G loss: 0.686071 0.686071]\n",
      "94 iter 947 [D loss: 0.694351, acc.: 50.00%] [G loss: 0.661173 0.661173]\n",
      "94 iter 948 [D loss: 0.694332, acc.: 50.00%] [G loss: 0.684354 0.684354]\n",
      "94 iter 949 [D loss: 0.694313, acc.: 50.06%] [G loss: 0.684952 0.684952]\n",
      "95 iter 950 [D loss: 0.694295, acc.: 50.00%] [G loss: 0.682322 0.682322]\n",
      "95 iter 951 [D loss: 0.694277, acc.: 50.00%] [G loss: 0.684841 0.684841]\n",
      "95 iter 952 [D loss: 0.694258, acc.: 50.00%] [G loss: 0.688228 0.688228]\n",
      "95 iter 953 [D loss: 0.694240, acc.: 50.06%] [G loss: 0.686042 0.686042]\n",
      "95 iter 954 [D loss: 0.694222, acc.: 50.00%] [G loss: 0.686523 0.686523]\n",
      "95 iter 955 [D loss: 0.694204, acc.: 50.06%] [G loss: 0.683639 0.683639]\n",
      "95 iter 956 [D loss: 0.694186, acc.: 50.00%] [G loss: 0.686276 0.686276]\n",
      "95 iter 957 [D loss: 0.694169, acc.: 50.00%] [G loss: 0.663037 0.663037]\n",
      "95 iter 958 [D loss: 0.694151, acc.: 50.17%] [G loss: 0.684701 0.684701]\n",
      "95 iter 959 [D loss: 0.694133, acc.: 50.06%] [G loss: 0.685277 0.685277]\n",
      "96 iter 960 [D loss: 0.694116, acc.: 50.00%] [G loss: 0.682620 0.682620]\n",
      "96 iter 961 [D loss: 0.694099, acc.: 50.00%] [G loss: 0.685126 0.685126]\n",
      "96 iter 962 [D loss: 0.694082, acc.: 50.00%] [G loss: 0.688202 0.688202]\n",
      "96 iter 963 [D loss: 0.694064, acc.: 50.06%] [G loss: 0.686204 0.686204]\n",
      "96 iter 964 [D loss: 0.694048, acc.: 50.00%] [G loss: 0.686705 0.686705]\n",
      "96 iter 965 [D loss: 0.694031, acc.: 50.11%] [G loss: 0.683597 0.683597]\n",
      "96 iter 966 [D loss: 0.694014, acc.: 50.00%] [G loss: 0.686484 0.686484]\n",
      "96 iter 967 [D loss: 0.693998, acc.: 50.00%] [G loss: 0.665028 0.665028]\n",
      "96 iter 968 [D loss: 0.693982, acc.: 50.06%] [G loss: 0.685031 0.685031]\n",
      "96 iter 969 [D loss: 0.693965, acc.: 50.00%] [G loss: 0.685605 0.685605]\n",
      "97 iter 970 [D loss: 0.693949, acc.: 50.00%] [G loss: 0.682931 0.682931]\n",
      "97 iter 971 [D loss: 0.693934, acc.: 50.00%] [G loss: 0.685419 0.685419]\n",
      "97 iter 972 [D loss: 0.693918, acc.: 50.00%] [G loss: 0.688177 0.688177]\n",
      "97 iter 973 [D loss: 0.693902, acc.: 50.00%] [G loss: 0.686412 0.686412]\n",
      "97 iter 974 [D loss: 0.693887, acc.: 50.00%] [G loss: 0.686871 0.686871]\n",
      "97 iter 975 [D loss: 0.693872, acc.: 50.00%] [G loss: 0.684026 0.684026]\n",
      "97 iter 976 [D loss: 0.693857, acc.: 50.00%] [G loss: 0.686701 0.686701]\n",
      "97 iter 977 [D loss: 0.693842, acc.: 50.00%] [G loss: 0.667472 0.667472]\n",
      "97 iter 978 [D loss: 0.693828, acc.: 50.00%] [G loss: 0.685281 0.685281]\n",
      "97 iter 979 [D loss: 0.693813, acc.: 50.11%] [G loss: 0.685914 0.685914]\n",
      "98 iter 980 [D loss: 0.693799, acc.: 50.00%] [G loss: 0.683593 0.683593]\n",
      "98 iter 981 [D loss: 0.693785, acc.: 50.00%] [G loss: 0.685767 0.685767]\n",
      "98 iter 982 [D loss: 0.693771, acc.: 50.00%] [G loss: 0.688166 0.688166]\n",
      "98 iter 983 [D loss: 0.693758, acc.: 50.00%] [G loss: 0.686616 0.686616]\n",
      "98 iter 984 [D loss: 0.693744, acc.: 50.00%] [G loss: 0.687031 0.687031]\n",
      "98 iter 985 [D loss: 0.693731, acc.: 50.00%] [G loss: 0.684704 0.684704]\n",
      "98 iter 986 [D loss: 0.693718, acc.: 50.00%] [G loss: 0.686900 0.686900]\n",
      "98 iter 987 [D loss: 0.693705, acc.: 50.00%] [G loss: 0.670103 0.670103]\n",
      "98 iter 988 [D loss: 0.693693, acc.: 50.00%] [G loss: 0.685705 0.685705]\n",
      "98 iter 989 [D loss: 0.693680, acc.: 50.00%] [G loss: 0.686236 0.686236]\n",
      "99 iter 990 [D loss: 0.693668, acc.: 50.00%] [G loss: 0.684251 0.684251]\n",
      "99 iter 991 [D loss: 0.693656, acc.: 50.00%] [G loss: 0.685948 0.685948]\n",
      "99 iter 992 [D loss: 0.693644, acc.: 50.00%] [G loss: 0.688161 0.688161]\n",
      "99 iter 993 [D loss: 0.693633, acc.: 50.00%] [G loss: 0.686813 0.686813]\n",
      "101 iter 1017 [D loss: 0.693421, acc.: 50.00%] [G loss: 0.676731 0.676731]\n",
      "101 iter 1018 [D loss: 0.693415, acc.: 50.00%] [G loss: 0.686687 0.686687]\n",
      "101 iter 1019 [D loss: 0.693408, acc.: 50.00%] [G loss: 0.687022 0.687022]\n",
      "102 iter 1020 [D loss: 0.693403, acc.: 50.00%] [G loss: 0.685505 0.685505]\n",
      "102 iter 1021 [D loss: 0.693397, acc.: 50.00%] [G loss: 0.686851 0.686851]\n",
      "102 iter 1022 [D loss: 0.693391, acc.: 50.00%] [G loss: 0.688185 0.688185]\n",
      "102 iter 1023 [D loss: 0.693386, acc.: 50.00%] [G loss: 0.687307 0.687307]\n",
      "102 iter 1024 [D loss: 0.693381, acc.: 50.00%] [G loss: 0.687615 0.687615]\n",
      "102 iter 1025 [D loss: 0.693375, acc.: 50.00%] [G loss: 0.686140 0.686140]\n",
      "102 iter 1026 [D loss: 0.693370, acc.: 50.00%] [G loss: 0.687582 0.687582]\n",
      "102 iter 1027 [D loss: 0.693366, acc.: 50.00%] [G loss: 0.678247 0.678247]\n",
      "102 iter 1028 [D loss: 0.693361, acc.: 50.00%] [G loss: 0.686918 0.686918]\n",
      "102 iter 1029 [D loss: 0.693357, acc.: 50.00%] [G loss: 0.687234 0.687234]\n",
      "103 iter 1030 [D loss: 0.693352, acc.: 50.00%] [G loss: 0.685926 0.685926]\n",
      "103 iter 1031 [D loss: 0.693348, acc.: 50.00%] [G loss: 0.687044 0.687044]\n",
      "103 iter 1032 [D loss: 0.693344, acc.: 50.00%] [G loss: 0.688203 0.688203]\n",
      "103 iter 1033 [D loss: 0.693340, acc.: 50.00%] [G loss: 0.687466 0.687466]\n",
      "103 iter 1034 [D loss: 0.693336, acc.: 50.00%] [G loss: 0.687733 0.687733]\n",
      "103 iter 1035 [D loss: 0.693333, acc.: 50.00%] [G loss: 0.686359 0.686359]\n",
      "103 iter 1036 [D loss: 0.693329, acc.: 50.00%] [G loss: 0.687708 0.687708]\n",
      "103 iter 1037 [D loss: 0.693326, acc.: 50.00%] [G loss: 0.679867 0.679867]\n",
      "103 iter 1038 [D loss: 0.693323, acc.: 50.00%] [G loss: 0.687122 0.687122]\n",
      "103 iter 1039 [D loss: 0.693319, acc.: 50.00%] [G loss: 0.687409 0.687409]\n",
      "104 iter 1040 [D loss: 0.693316, acc.: 50.00%] [G loss: 0.686399 0.686399]\n",
      "104 iter 1041 [D loss: 0.693313, acc.: 50.00%] [G loss: 0.687209 0.687209]\n",
      "104 iter 1042 [D loss: 0.693310, acc.: 50.00%] [G loss: 0.688226 0.688226]\n",
      "104 iter 1043 [D loss: 0.693308, acc.: 50.00%] [G loss: 0.687606 0.687606]\n",
      "104 iter 1044 [D loss: 0.693305, acc.: 50.00%] [G loss: 0.687843 0.687843]\n",
      "104 iter 1045 [D loss: 0.693303, acc.: 50.00%] [G loss: 0.686706 0.686706]\n",
      "104 iter 1046 [D loss: 0.693300, acc.: 50.00%] [G loss: 0.687827 0.687827]\n",
      "104 iter 1047 [D loss: 0.693298, acc.: 50.00%] [G loss: 0.681271 0.681271]\n",
      "104 iter 1048 [D loss: 0.693296, acc.: 50.00%] [G loss: 0.687324 0.687324]\n",
      "104 iter 1049 [D loss: 0.693293, acc.: 50.00%] [G loss: 0.687593 0.687593]\n",
      "105 iter 1050 [D loss: 0.693291, acc.: 50.00%] [G loss: 0.686749 0.686749]\n",
      "105 iter 1051 [D loss: 0.693289, acc.: 50.00%] [G loss: 0.687398 0.687398]\n",
      "105 iter 1052 [D loss: 0.693287, acc.: 50.00%] [G loss: 0.688252 0.688252]\n",
      "105 iter 1053 [D loss: 0.693285, acc.: 50.00%] [G loss: 0.687749 0.687749]\n",
      "105 iter 1054 [D loss: 0.693284, acc.: 50.00%] [G loss: 0.687944 0.687944]\n",
      "105 iter 1055 [D loss: 0.693282, acc.: 50.00%] [G loss: 0.687056 0.687056]\n",
      "105 iter 1056 [D loss: 0.693280, acc.: 50.00%] [G loss: 0.687936 0.687936]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "105 iter 1057 [D loss: 0.693279, acc.: 50.00%] [G loss: 0.682592 0.682592]\n",
      "105 iter 1058 [D loss: 0.693277, acc.: 50.00%] [G loss: 0.687543 0.687543]\n",
      "105 iter 1059 [D loss: 0.693276, acc.: 50.00%] [G loss: 0.687744 0.687744]\n",
      "106 iter 1060 [D loss: 0.693274, acc.: 50.00%] [G loss: 0.686957 0.686957]\n",
      "106 iter 1061 [D loss: 0.693273, acc.: 50.00%] [G loss: 0.687618 0.687618]\n",
      "106 iter 1062 [D loss: 0.693271, acc.: 50.00%] [G loss: 0.688285 0.688285]\n",
      "106 iter 1063 [D loss: 0.693270, acc.: 50.00%] [G loss: 0.687885 0.687885]\n",
      "106 iter 1064 [D loss: 0.693269, acc.: 50.00%] [G loss: 0.688038 0.688038]\n",
      "106 iter 1065 [D loss: 0.693268, acc.: 50.00%] [G loss: 0.687261 0.687261]\n",
      "106 iter 1066 [D loss: 0.693267, acc.: 50.00%] [G loss: 0.688034 0.688034]\n",
      "106 iter 1067 [D loss: 0.693266, acc.: 50.00%] [G loss: 0.683568 0.683568]\n",
      "106 iter 1068 [D loss: 0.693264, acc.: 50.00%] [G loss: 0.687727 0.687727]\n",
      "106 iter 1069 [D loss: 0.693264, acc.: 50.00%] [G loss: 0.687878 0.687878]\n",
      "107 iter 1070 [D loss: 0.693263, acc.: 50.00%] [G loss: 0.687226 0.687226]\n",
      "107 iter 1071 [D loss: 0.693262, acc.: 50.00%] [G loss: 0.687760 0.687760]\n",
      "107 iter 1072 [D loss: 0.693261, acc.: 50.00%] [G loss: 0.688320 0.688320]\n",
      "107 iter 1073 [D loss: 0.693260, acc.: 50.00%] [G loss: 0.687990 0.687990]\n",
      "107 iter 1074 [D loss: 0.693259, acc.: 50.00%] [G loss: 0.688121 0.688121]\n",
      "107 iter 1075 [D loss: 0.693258, acc.: 50.00%] [G loss: 0.687461 0.687461]\n",
      "107 iter 1076 [D loss: 0.693258, acc.: 50.00%] [G loss: 0.688123 0.688123]\n",
      "107 iter 1077 [D loss: 0.693257, acc.: 50.00%] [G loss: 0.684435 0.684435]\n",
      "107 iter 1078 [D loss: 0.693256, acc.: 50.00%] [G loss: 0.687881 0.687881]\n",
      "107 iter 1079 [D loss: 0.693256, acc.: 50.00%] [G loss: 0.688004 0.688004]\n",
      "108 iter 1080 [D loss: 0.693255, acc.: 50.00%] [G loss: 0.687452 0.687452]\n",
      "108 iter 1081 [D loss: 0.693254, acc.: 50.00%] [G loss: 0.687906 0.687906]\n",
      "108 iter 1082 [D loss: 0.693254, acc.: 50.00%] [G loss: 0.688357 0.688357]\n",
      "108 iter 1083 [D loss: 0.693253, acc.: 50.00%] [G loss: 0.688089 0.688089]\n",
      "108 iter 1084 [D loss: 0.693253, acc.: 50.00%] [G loss: 0.688197 0.688197]\n",
      "108 iter 1085 [D loss: 0.693252, acc.: 50.00%] [G loss: 0.687668 0.687668]\n",
      "108 iter 1086 [D loss: 0.693252, acc.: 50.00%] [G loss: 0.688203 0.688203]\n",
      "108 iter 1087 [D loss: 0.693251, acc.: 50.00%] [G loss: 0.685225 0.685225]\n",
      "108 iter 1088 [D loss: 0.693251, acc.: 50.00%] [G loss: 0.688007 0.688007]\n",
      "108 iter 1089 [D loss: 0.693250, acc.: 50.00%] [G loss: 0.688108 0.688108]\n",
      "109 iter 1090 [D loss: 0.693250, acc.: 50.00%] [G loss: 0.687687 0.687687]\n",
      "109 iter 1091 [D loss: 0.693249, acc.: 50.00%] [G loss: 0.688023 0.688023]\n",
      "109 iter 1092 [D loss: 0.693249, acc.: 50.00%] [G loss: 0.688395 0.688395]\n",
      "109 iter 1093 [D loss: 0.693249, acc.: 50.00%] [G loss: 0.688185 0.688185]\n",
      "109 iter 1094 [D loss: 0.693248, acc.: 50.00%] [G loss: 0.688271 0.688271]\n",
      "109 iter 1095 [D loss: 0.693248, acc.: 50.00%] [G loss: 0.687834 0.687834]\n",
      "109 iter 1096 [D loss: 0.693248, acc.: 50.00%] [G loss: 0.688277 0.688277]\n",
      "109 iter 1097 [D loss: 0.693247, acc.: 50.00%] [G loss: 0.685832 0.685832]\n",
      "109 iter 1098 [D loss: 0.693247, acc.: 50.00%] [G loss: 0.688113 0.688113]\n",
      "109 iter 1099 [D loss: 0.693247, acc.: 50.00%] [G loss: 0.688205 0.688205]\n",
      "110 iter 1100 [D loss: 0.693246, acc.: 50.00%] [G loss: 0.687861 0.687861]\n",
      "110 iter 1101 [D loss: 0.693246, acc.: 50.00%] [G loss: 0.688137 0.688137]\n",
      "110 iter 1102 [D loss: 0.693246, acc.: 50.00%] [G loss: 0.688436 0.688436]\n",
      "110 iter 1103 [D loss: 0.693246, acc.: 50.00%] [G loss: 0.688271 0.688271]\n",
      "110 iter 1104 [D loss: 0.693246, acc.: 50.00%] [G loss: 0.688339 0.688339]\n",
      "110 iter 1105 [D loss: 0.693245, acc.: 50.00%] [G loss: 0.687980 0.687980]\n",
      "110 iter 1106 [D loss: 0.693245, acc.: 50.00%] [G loss: 0.688344 0.688344]\n",
      "110 iter 1107 [D loss: 0.693245, acc.: 50.00%] [G loss: 0.686395 0.686395]\n",
      "110 iter 1108 [D loss: 0.693245, acc.: 50.00%] [G loss: 0.688217 0.688217]\n",
      "110 iter 1109 [D loss: 0.693244, acc.: 50.00%] [G loss: 0.688287 0.688287]\n",
      "111 iter 1110 [D loss: 0.693244, acc.: 50.00%] [G loss: 0.688016 0.688016]\n",
      "111 iter 1111 [D loss: 0.693244, acc.: 50.00%] [G loss: 0.688242 0.688242]\n",
      "111 iter 1112 [D loss: 0.693244, acc.: 50.00%] [G loss: 0.688476 0.688476]\n",
      "111 iter 1113 [D loss: 0.693244, acc.: 50.00%] [G loss: 0.688344 0.688344]\n",
      "111 iter 1114 [D loss: 0.693244, acc.: 50.00%] [G loss: 0.688400 0.688400]\n",
      "111 iter 1115 [D loss: 0.693243, acc.: 50.00%] [G loss: 0.688110 0.688110]\n",
      "111 iter 1116 [D loss: 0.693243, acc.: 50.00%] [G loss: 0.688408 0.688408]\n",
      "111 iter 1117 [D loss: 0.693243, acc.: 50.00%] [G loss: 0.686857 0.686857]\n",
      "111 iter 1118 [D loss: 0.693243, acc.: 50.00%] [G loss: 0.688308 0.688308]\n",
      "111 iter 1119 [D loss: 0.693243, acc.: 50.00%] [G loss: 0.688367 0.688367]\n",
      "112 iter 1120 [D loss: 0.693243, acc.: 50.00%] [G loss: 0.688142 0.688142]\n",
      "112 iter 1121 [D loss: 0.693243, acc.: 50.00%] [G loss: 0.688333 0.688333]\n",
      "112 iter 1122 [D loss: 0.693243, acc.: 50.00%] [G loss: 0.688518 0.688518]\n",
      "112 iter 1123 [D loss: 0.693242, acc.: 50.00%] [G loss: 0.688418 0.688418]\n",
      "112 iter 1124 [D loss: 0.693242, acc.: 50.00%] [G loss: 0.688461 0.688461]\n",
      "112 iter 1125 [D loss: 0.693242, acc.: 50.00%] [G loss: 0.688224 0.688224]\n",
      "112 iter 1126 [D loss: 0.693242, acc.: 50.00%] [G loss: 0.688469 0.688469]\n",
      "112 iter 1127 [D loss: 0.693242, acc.: 50.00%] [G loss: 0.687256 0.687256]\n",
      "112 iter 1128 [D loss: 0.693242, acc.: 50.00%] [G loss: 0.688393 0.688393]\n",
      "112 iter 1129 [D loss: 0.693242, acc.: 50.00%] [G loss: 0.688438 0.688438]\n",
      "113 iter 1130 [D loss: 0.693242, acc.: 50.00%] [G loss: 0.688267 0.688267]\n",
      "113 iter 1131 [D loss: 0.693242, acc.: 50.00%] [G loss: 0.688412 0.688412]\n",
      "113 iter 1132 [D loss: 0.693242, acc.: 50.00%] [G loss: 0.688561 0.688561]\n",
      "113 iter 1133 [D loss: 0.693242, acc.: 50.00%] [G loss: 0.688480 0.688480]\n",
      "113 iter 1134 [D loss: 0.693241, acc.: 50.00%] [G loss: 0.688518 0.688518]\n",
      "113 iter 1135 [D loss: 0.693241, acc.: 50.00%] [G loss: 0.688344 0.688344]\n",
      "113 iter 1136 [D loss: 0.693241, acc.: 50.00%] [G loss: 0.688526 0.688526]\n",
      "113 iter 1137 [D loss: 0.693241, acc.: 50.00%] [G loss: 0.687557 0.687557]\n",
      "113 iter 1138 [D loss: 0.693241, acc.: 50.00%] [G loss: 0.688469 0.688469]\n",
      "113 iter 1139 [D loss: 0.693241, acc.: 50.00%] [G loss: 0.688505 0.688505]\n",
      "114 iter 1140 [D loss: 0.693241, acc.: 50.00%] [G loss: 0.688366 0.688366]\n",
      "114 iter 1141 [D loss: 0.693241, acc.: 50.00%] [G loss: 0.688485 0.688485]\n",
      "114 iter 1142 [D loss: 0.693241, acc.: 50.00%] [G loss: 0.688604 0.688604]\n",
      "114 iter 1143 [D loss: 0.693241, acc.: 50.00%] [G loss: 0.688539 0.688539]\n",
      "114 iter 1144 [D loss: 0.693241, acc.: 50.00%] [G loss: 0.688572 0.688572]\n",
      "114 iter 1145 [D loss: 0.693241, acc.: 50.00%] [G loss: 0.688434 0.688434]\n",
      "114 iter 1146 [D loss: 0.693241, acc.: 50.00%] [G loss: 0.688580 0.688580]\n",
      "114 iter 1147 [D loss: 0.693241, acc.: 50.00%] [G loss: 0.687830 0.687830]\n",
      "114 iter 1148 [D loss: 0.693241, acc.: 50.00%] [G loss: 0.688537 0.688537]\n",
      "114 iter 1149 [D loss: 0.693241, acc.: 50.00%] [G loss: 0.688567 0.688567]\n",
      "115 iter 1150 [D loss: 0.693240, acc.: 50.00%] [G loss: 0.688460 0.688460]\n",
      "115 iter 1151 [D loss: 0.693240, acc.: 50.00%] [G loss: 0.688555 0.688555]\n",
      "115 iter 1152 [D loss: 0.693240, acc.: 50.00%] [G loss: 0.688647 0.688647]\n",
      "115 iter 1153 [D loss: 0.693240, acc.: 50.00%] [G loss: 0.688599 0.688599]\n",
      "115 iter 1154 [D loss: 0.693240, acc.: 50.00%] [G loss: 0.688624 0.688624]\n",
      "115 iter 1155 [D loss: 0.693240, acc.: 50.00%] [G loss: 0.688510 0.688510]\n",
      "115 iter 1156 [D loss: 0.693240, acc.: 50.00%] [G loss: 0.688632 0.688632]\n",
      "115 iter 1157 [D loss: 0.693240, acc.: 50.00%] [G loss: 0.688037 0.688037]\n",
      "115 iter 1158 [D loss: 0.693240, acc.: 50.00%] [G loss: 0.688599 0.688599]\n",
      "115 iter 1159 [D loss: 0.693240, acc.: 50.00%] [G loss: 0.688625 0.688625]\n",
      "116 iter 1160 [D loss: 0.693240, acc.: 50.00%] [G loss: 0.688542 0.688542]\n",
      "116 iter 1161 [D loss: 0.693240, acc.: 50.00%] [G loss: 0.688617 0.688617]\n",
      "116 iter 1162 [D loss: 0.693240, acc.: 50.00%] [G loss: 0.688691 0.688691]\n",
      "116 iter 1163 [D loss: 0.693240, acc.: 50.00%] [G loss: 0.688653 0.688653]\n",
      "116 iter 1164 [D loss: 0.693240, acc.: 50.00%] [G loss: 0.688675 0.688675]\n",
      "116 iter 1165 [D loss: 0.693240, acc.: 50.00%] [G loss: 0.688587 0.688587]\n",
      "116 iter 1166 [D loss: 0.693240, acc.: 50.00%] [G loss: 0.688683 0.688683]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "116 iter 1167 [D loss: 0.693240, acc.: 50.00%] [G loss: 0.688219 0.688219]\n",
      "116 iter 1168 [D loss: 0.693240, acc.: 50.00%] [G loss: 0.688659 0.688659]\n",
      "116 iter 1169 [D loss: 0.693240, acc.: 50.00%] [G loss: 0.688680 0.688680]\n",
      "117 iter 1170 [D loss: 0.693240, acc.: 50.00%] [G loss: 0.688619 0.688619]\n",
      "117 iter 1171 [D loss: 0.693240, acc.: 50.00%] [G loss: 0.688675 0.688675]\n",
      "117 iter 1172 [D loss: 0.693240, acc.: 50.00%] [G loss: 0.688734 0.688734]\n",
      "117 iter 1173 [D loss: 0.693240, acc.: 50.00%] [G loss: 0.688706 0.688706]\n",
      "117 iter 1174 [D loss: 0.693240, acc.: 50.00%] [G loss: 0.688724 0.688724]\n",
      "117 iter 1175 [D loss: 0.693240, acc.: 50.00%] [G loss: 0.688656 0.688656]\n",
      "117 iter 1176 [D loss: 0.693240, acc.: 50.00%] [G loss: 0.688732 0.688732]\n",
      "117 iter 1177 [D loss: 0.693240, acc.: 50.00%] [G loss: 0.688376 0.688376]\n",
      "117 iter 1178 [D loss: 0.693240, acc.: 50.00%] [G loss: 0.688716 0.688716]\n",
      "117 iter 1179 [D loss: 0.693240, acc.: 50.00%] [G loss: 0.688733 0.688733]\n",
      "118 iter 1180 [D loss: 0.693239, acc.: 50.00%] [G loss: 0.688685 0.688685]\n",
      "118 iter 1181 [D loss: 0.693240, acc.: 50.00%] [G loss: 0.688730 0.688730]\n",
      "118 iter 1182 [D loss: 0.693239, acc.: 50.00%] [G loss: 0.688777 0.688777]\n",
      "118 iter 1183 [D loss: 0.693239, acc.: 50.00%] [G loss: 0.688757 0.688757]\n",
      "118 iter 1184 [D loss: 0.693239, acc.: 50.00%] [G loss: 0.688771 0.688771]\n",
      "118 iter 1185 [D loss: 0.693239, acc.: 50.00%] [G loss: 0.688720 0.688720]\n",
      "118 iter 1186 [D loss: 0.693239, acc.: 50.00%] [G loss: 0.688780 0.688780]\n",
      "118 iter 1187 [D loss: 0.693239, acc.: 50.00%] [G loss: 0.688501 0.688501]\n",
      "118 iter 1188 [D loss: 0.693239, acc.: 50.00%] [G loss: 0.688769 0.688769]\n",
      "118 iter 1189 [D loss: 0.693239, acc.: 50.00%] [G loss: 0.688783 0.688783]\n",
      "119 iter 1190 [D loss: 0.693239, acc.: 50.00%] [G loss: 0.688745 0.688745]\n",
      "119 iter 1191 [D loss: 0.693239, acc.: 50.00%] [G loss: 0.688785 0.688785]\n",
      "119 iter 1192 [D loss: 0.693239, acc.: 50.00%] [G loss: 0.688821 0.688821]\n",
      "119 iter 1193 [D loss: 0.693239, acc.: 50.00%] [G loss: 0.688805 0.688805]\n",
      "119 iter 1194 [D loss: 0.693239, acc.: 50.00%] [G loss: 0.688818 0.688818]\n",
      "119 iter 1195 [D loss: 0.693239, acc.: 50.00%] [G loss: 0.688779 0.688779]\n",
      "119 iter 1196 [D loss: 0.693239, acc.: 50.00%] [G loss: 0.688826 0.688826]\n",
      "119 iter 1197 [D loss: 0.693239, acc.: 50.00%] [G loss: 0.688611 0.688611]\n",
      "119 iter 1198 [D loss: 0.693239, acc.: 50.00%] [G loss: 0.688820 0.688820]\n",
      "119 iter 1199 [D loss: 0.693239, acc.: 50.00%] [G loss: 0.688831 0.688831]\n",
      "120 iter 1200 [D loss: 0.693239, acc.: 50.00%] [G loss: 0.688803 0.688803]\n",
      "120 iter 1201 [D loss: 0.693239, acc.: 50.00%] [G loss: 0.688834 0.688834]\n",
      "120 iter 1202 [D loss: 0.693239, acc.: 50.00%] [G loss: 0.688864 0.688864]\n",
      "120 iter 1203 [D loss: 0.693239, acc.: 50.00%] [G loss: 0.688853 0.688853]\n",
      "120 iter 1204 [D loss: 0.693239, acc.: 50.00%] [G loss: 0.688863 0.688863]\n",
      "120 iter 1205 [D loss: 0.693239, acc.: 50.00%] [G loss: 0.688835 0.688835]\n",
      "120 iter 1206 [D loss: 0.693239, acc.: 50.00%] [G loss: 0.688872 0.688872]\n",
      "120 iter 1207 [D loss: 0.693239, acc.: 50.00%] [G loss: 0.688706 0.688706]\n",
      "120 iter 1208 [D loss: 0.693239, acc.: 50.00%] [G loss: 0.688869 0.688869]\n",
      "120 iter 1209 [D loss: 0.693239, acc.: 50.00%] [G loss: 0.688879 0.688879]\n",
      "121 iter 1210 [D loss: 0.693239, acc.: 50.00%] [G loss: 0.688859 0.688859]\n",
      "121 iter 1211 [D loss: 0.693239, acc.: 50.00%] [G loss: 0.688883 0.688883]\n",
      "121 iter 1212 [D loss: 0.693239, acc.: 50.00%] [G loss: 0.688906 0.688906]\n",
      "121 iter 1213 [D loss: 0.693239, acc.: 50.00%] [G loss: 0.688899 0.688899]\n",
      "121 iter 1214 [D loss: 0.693239, acc.: 50.00%] [G loss: 0.688908 0.688908]\n",
      "121 iter 1215 [D loss: 0.693239, acc.: 50.00%] [G loss: 0.688887 0.688887]\n",
      "121 iter 1216 [D loss: 0.693239, acc.: 50.00%] [G loss: 0.688916 0.688916]\n",
      "121 iter 1217 [D loss: 0.693239, acc.: 50.00%] [G loss: 0.688791 0.688791]\n",
      "121 iter 1218 [D loss: 0.693239, acc.: 50.00%] [G loss: 0.688916 0.688916]\n",
      "121 iter 1219 [D loss: 0.693239, acc.: 50.00%] [G loss: 0.688925 0.688925]\n",
      "122 iter 1220 [D loss: 0.693239, acc.: 50.00%] [G loss: 0.688910 0.688910]\n",
      "122 iter 1221 [D loss: 0.693239, acc.: 50.00%] [G loss: 0.688930 0.688930]\n",
      "122 iter 1222 [D loss: 0.693239, acc.: 50.00%] [G loss: 0.688949 0.688949]\n",
      "122 iter 1223 [D loss: 0.693239, acc.: 50.00%] [G loss: 0.688944 0.688944]\n",
      "122 iter 1224 [D loss: 0.693239, acc.: 50.00%] [G loss: 0.688952 0.688952]\n",
      "122 iter 1225 [D loss: 0.693239, acc.: 50.00%] [G loss: 0.688937 0.688937]\n",
      "122 iter 1226 [D loss: 0.693239, acc.: 50.00%] [G loss: 0.688960 0.688960]\n",
      "122 iter 1227 [D loss: 0.693239, acc.: 50.00%] [G loss: 0.688865 0.688865]\n",
      "122 iter 1228 [D loss: 0.693239, acc.: 50.00%] [G loss: 0.688962 0.688962]\n",
      "122 iter 1229 [D loss: 0.693238, acc.: 50.00%] [G loss: 0.688969 0.688969]\n",
      "123 iter 1230 [D loss: 0.693239, acc.: 50.00%] [G loss: 0.688959 0.688959]\n",
      "123 iter 1231 [D loss: 0.693239, acc.: 50.00%] [G loss: 0.688975 0.688975]\n",
      "123 iter 1232 [D loss: 0.693238, acc.: 50.00%] [G loss: 0.688991 0.688991]\n",
      "123 iter 1233 [D loss: 0.693238, acc.: 50.00%] [G loss: 0.688988 0.688988]\n",
      "123 iter 1234 [D loss: 0.693238, acc.: 50.00%] [G loss: 0.688995 0.688995]\n",
      "123 iter 1235 [D loss: 0.693238, acc.: 50.00%] [G loss: 0.688985 0.688985]\n",
      "123 iter 1236 [D loss: 0.693238, acc.: 50.00%] [G loss: 0.689004 0.689004]\n",
      "123 iter 1237 [D loss: 0.693238, acc.: 50.00%] [G loss: 0.688933 0.688933]\n",
      "123 iter 1238 [D loss: 0.693238, acc.: 50.00%] [G loss: 0.689007 0.689007]\n",
      "123 iter 1239 [D loss: 0.693238, acc.: 50.00%] [G loss: 0.689014 0.689014]\n",
      "124 iter 1240 [D loss: 0.693238, acc.: 50.00%] [G loss: 0.689007 0.689007]\n",
      "124 iter 1241 [D loss: 0.693238, acc.: 50.00%] [G loss: 0.689020 0.689020]\n",
      "124 iter 1242 [D loss: 0.693238, acc.: 50.00%] [G loss: 0.689033 0.689033]\n",
      "124 iter 1243 [D loss: 0.693238, acc.: 50.00%] [G loss: 0.689032 0.689032]\n",
      "124 iter 1244 [D loss: 0.693238, acc.: 50.00%] [G loss: 0.689038 0.689038]\n",
      "124 iter 1245 [D loss: 0.693238, acc.: 50.00%] [G loss: 0.689031 0.689031]\n",
      "124 iter 1246 [D loss: 0.693238, acc.: 50.00%] [G loss: 0.689046 0.689046]\n",
      "124 iter 1247 [D loss: 0.693238, acc.: 50.00%] [G loss: 0.688992 0.688992]\n",
      "124 iter 1248 [D loss: 0.693238, acc.: 50.00%] [G loss: 0.689051 0.689051]\n",
      "124 iter 1249 [D loss: 0.693238, acc.: 50.00%] [G loss: 0.689057 0.689057]\n",
      "125 iter 1250 [D loss: 0.693238, acc.: 50.00%] [G loss: 0.689052 0.689052]\n",
      "125 iter 1251 [D loss: 0.693238, acc.: 50.00%] [G loss: 0.689063 0.689063]\n",
      "125 iter 1252 [D loss: 0.693238, acc.: 50.00%] [G loss: 0.689074 0.689074]\n",
      "125 iter 1253 [D loss: 0.693238, acc.: 50.00%] [G loss: 0.689074 0.689074]\n",
      "125 iter 1254 [D loss: 0.693238, acc.: 50.00%] [G loss: 0.689080 0.689080]\n",
      "125 iter 1255 [D loss: 0.693238, acc.: 50.00%] [G loss: 0.689075 0.689075]\n",
      "125 iter 1256 [D loss: 0.693238, acc.: 50.00%] [G loss: 0.689088 0.689088]\n",
      "125 iter 1257 [D loss: 0.693238, acc.: 50.00%] [G loss: 0.689048 0.689048]\n",
      "125 iter 1258 [D loss: 0.693238, acc.: 50.00%] [G loss: 0.689094 0.689094]\n",
      "125 iter 1259 [D loss: 0.693238, acc.: 50.00%] [G loss: 0.689099 0.689099]\n",
      "126 iter 1260 [D loss: 0.693238, acc.: 50.00%] [G loss: 0.689097 0.689097]\n",
      "126 iter 1261 [D loss: 0.693238, acc.: 50.00%] [G loss: 0.689106 0.689106]\n",
      "126 iter 1262 [D loss: 0.693238, acc.: 50.00%] [G loss: 0.689115 0.689115]\n",
      "126 iter 1263 [D loss: 0.693238, acc.: 50.00%] [G loss: 0.689116 0.689116]\n",
      "126 iter 1264 [D loss: 0.693238, acc.: 50.00%] [G loss: 0.689122 0.689122]\n",
      "126 iter 1265 [D loss: 0.693238, acc.: 50.00%] [G loss: 0.689119 0.689119]\n",
      "126 iter 1266 [D loss: 0.693238, acc.: 50.00%] [G loss: 0.689130 0.689130]\n",
      "126 iter 1267 [D loss: 0.693238, acc.: 50.00%] [G loss: 0.689101 0.689101]\n",
      "126 iter 1268 [D loss: 0.693238, acc.: 50.00%] [G loss: 0.689136 0.689136]\n",
      "126 iter 1269 [D loss: 0.693238, acc.: 50.00%] [G loss: 0.689141 0.689141]\n",
      "127 iter 1270 [D loss: 0.693238, acc.: 50.00%] [G loss: 0.689140 0.689140]\n",
      "127 iter 1271 [D loss: 0.693238, acc.: 50.00%] [G loss: 0.689148 0.689148]\n",
      "127 iter 1272 [D loss: 0.693238, acc.: 50.00%] [G loss: 0.689156 0.689156]\n",
      "127 iter 1273 [D loss: 0.693238, acc.: 50.00%] [G loss: 0.689158 0.689158]\n",
      "127 iter 1274 [D loss: 0.693238, acc.: 50.00%] [G loss: 0.689163 0.689163]\n",
      "127 iter 1275 [D loss: 0.693238, acc.: 50.00%] [G loss: 0.689162 0.689162]\n",
      "127 iter 1276 [D loss: 0.693238, acc.: 50.00%] [G loss: 0.689171 0.689171]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "127 iter 1277 [D loss: 0.693238, acc.: 50.00%] [G loss: 0.689150 0.689150]\n",
      "127 iter 1278 [D loss: 0.693238, acc.: 50.00%] [G loss: 0.689177 0.689177]\n",
      "127 iter 1279 [D loss: 0.693238, acc.: 50.00%] [G loss: 0.689182 0.689182]\n",
      "128 iter 1280 [D loss: 0.693238, acc.: 50.00%] [G loss: 0.689182 0.689182]\n",
      "128 iter 1281 [D loss: 0.693238, acc.: 50.00%] [G loss: 0.689189 0.689189]\n",
      "128 iter 1282 [D loss: 0.693238, acc.: 50.00%] [G loss: 0.689196 0.689196]\n",
      "128 iter 1283 [D loss: 0.693238, acc.: 50.00%] [G loss: 0.689199 0.689199]\n",
      "128 iter 1284 [D loss: 0.693238, acc.: 50.00%] [G loss: 0.689203 0.689203]\n",
      "128 iter 1285 [D loss: 0.693238, acc.: 50.00%] [G loss: 0.689204 0.689204]\n",
      "128 iter 1286 [D loss: 0.693238, acc.: 50.00%] [G loss: 0.689211 0.689211]\n",
      "128 iter 1287 [D loss: 0.693238, acc.: 50.00%] [G loss: 0.689197 0.689197]\n",
      "128 iter 1288 [D loss: 0.693238, acc.: 50.00%] [G loss: 0.689218 0.689218]\n",
      "128 iter 1289 [D loss: 0.693238, acc.: 50.00%] [G loss: 0.689223 0.689223]\n",
      "129 iter 1290 [D loss: 0.693238, acc.: 50.00%] [G loss: 0.689224 0.689224]\n",
      "129 iter 1291 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689230 0.689230]\n",
      "129 iter 1292 [D loss: 0.693238, acc.: 50.00%] [G loss: 0.689236 0.689236]\n",
      "129 iter 1293 [D loss: 0.693238, acc.: 50.00%] [G loss: 0.689239 0.689239]\n",
      "129 iter 1294 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689244 0.689244]\n",
      "129 iter 1295 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689245 0.689245]\n",
      "129 iter 1296 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689251 0.689251]\n",
      "129 iter 1297 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689241 0.689241]\n",
      "129 iter 1298 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689258 0.689258]\n",
      "129 iter 1299 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689263 0.689263]\n",
      "130 iter 1300 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689265 0.689265]\n",
      "130 iter 1301 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689270 0.689270]\n",
      "130 iter 1302 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689276 0.689276]\n",
      "130 iter 1303 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689279 0.689279]\n",
      "130 iter 1304 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689283 0.689283]\n",
      "130 iter 1305 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689285 0.689285]\n",
      "130 iter 1306 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689291 0.689291]\n",
      "130 iter 1307 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689285 0.689285]\n",
      "130 iter 1308 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689298 0.689298]\n",
      "130 iter 1309 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689303 0.689303]\n",
      "131 iter 1310 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689305 0.689305]\n",
      "131 iter 1311 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689310 0.689310]\n",
      "131 iter 1312 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689315 0.689315]\n",
      "131 iter 1313 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689318 0.689318]\n",
      "131 iter 1314 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689323 0.689323]\n",
      "131 iter 1315 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689325 0.689325]\n",
      "131 iter 1316 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689330 0.689330]\n",
      "131 iter 1317 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689326 0.689326]\n",
      "131 iter 1318 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689338 0.689338]\n",
      "131 iter 1319 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689342 0.689342]\n",
      "132 iter 1320 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689345 0.689345]\n",
      "132 iter 1321 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689349 0.689349]\n",
      "132 iter 1322 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689354 0.689354]\n",
      "132 iter 1323 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689357 0.689357]\n",
      "132 iter 1324 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689362 0.689362]\n",
      "132 iter 1325 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689364 0.689364]\n",
      "132 iter 1326 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689369 0.689369]\n",
      "132 iter 1327 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689367 0.689367]\n",
      "132 iter 1328 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689376 0.689376]\n",
      "132 iter 1329 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689381 0.689381]\n",
      "133 iter 1330 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689384 0.689384]\n",
      "133 iter 1331 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689388 0.689388]\n",
      "133 iter 1332 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689393 0.689393]\n",
      "133 iter 1333 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689396 0.689396]\n",
      "133 iter 1334 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689400 0.689400]\n",
      "133 iter 1335 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689403 0.689403]\n",
      "133 iter 1336 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689408 0.689408]\n",
      "133 iter 1337 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689407 0.689407]\n",
      "133 iter 1338 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689415 0.689415]\n",
      "133 iter 1339 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689419 0.689419]\n",
      "134 iter 1340 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689422 0.689422]\n",
      "134 iter 1341 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689426 0.689426]\n",
      "134 iter 1342 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689431 0.689431]\n",
      "134 iter 1343 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689434 0.689434]\n",
      "134 iter 1344 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689438 0.689438]\n",
      "134 iter 1345 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689441 0.689441]\n",
      "134 iter 1346 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689446 0.689446]\n",
      "134 iter 1347 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689446 0.689446]\n",
      "134 iter 1348 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689453 0.689453]\n",
      "134 iter 1349 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689457 0.689457]\n",
      "135 iter 1350 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689460 0.689460]\n",
      "135 iter 1351 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689464 0.689464]\n",
      "135 iter 1352 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689468 0.689468]\n",
      "135 iter 1353 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689472 0.689472]\n",
      "135 iter 1354 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689476 0.689476]\n",
      "135 iter 1355 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689479 0.689479]\n",
      "135 iter 1356 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689483 0.689483]\n",
      "135 iter 1357 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689485 0.689485]\n",
      "135 iter 1358 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689491 0.689491]\n",
      "135 iter 1359 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689494 0.689494]\n",
      "136 iter 1360 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689498 0.689498]\n",
      "136 iter 1361 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689502 0.689502]\n",
      "136 iter 1362 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689506 0.689506]\n",
      "136 iter 1363 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689509 0.689509]\n",
      "136 iter 1364 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689513 0.689513]\n",
      "136 iter 1365 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689516 0.689516]\n",
      "136 iter 1366 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689521 0.689521]\n",
      "136 iter 1367 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689522 0.689522]\n",
      "136 iter 1368 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689528 0.689528]\n",
      "136 iter 1369 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689532 0.689532]\n",
      "137 iter 1370 [D loss: 0.693237, acc.: 50.00%] [G loss: 0.689535 0.689535]\n",
      "137 iter 1371 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689539 0.689539]\n",
      "137 iter 1372 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689543 0.689543]\n",
      "137 iter 1373 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689546 0.689546]\n",
      "137 iter 1374 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689550 0.689550]\n",
      "137 iter 1375 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689553 0.689553]\n",
      "137 iter 1376 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689557 0.689557]\n",
      "137 iter 1377 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689560 0.689560]\n",
      "137 iter 1378 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689565 0.689565]\n",
      "137 iter 1379 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689568 0.689568]\n",
      "138 iter 1380 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689572 0.689572]\n",
      "138 iter 1381 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689576 0.689576]\n",
      "138 iter 1382 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689580 0.689580]\n",
      "138 iter 1383 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689583 0.689583]\n",
      "138 iter 1384 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689587 0.689587]\n",
      "138 iter 1385 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689590 0.689590]\n",
      "138 iter 1386 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689594 0.689594]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "138 iter 1387 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689597 0.689597]\n",
      "138 iter 1388 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689601 0.689601]\n",
      "138 iter 1389 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689605 0.689605]\n",
      "139 iter 1390 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689608 0.689608]\n",
      "139 iter 1391 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689612 0.689612]\n",
      "139 iter 1392 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689616 0.689616]\n",
      "139 iter 1393 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689619 0.689619]\n",
      "139 iter 1394 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689623 0.689623]\n",
      "139 iter 1395 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689626 0.689626]\n",
      "139 iter 1396 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689630 0.689630]\n",
      "139 iter 1397 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689633 0.689633]\n",
      "139 iter 1398 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689637 0.689637]\n",
      "139 iter 1399 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689641 0.689641]\n",
      "140 iter 1400 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689644 0.689644]\n",
      "140 iter 1401 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689648 0.689648]\n",
      "140 iter 1402 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689652 0.689652]\n",
      "140 iter 1403 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689655 0.689655]\n",
      "140 iter 1404 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689659 0.689659]\n",
      "140 iter 1405 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689662 0.689662]\n",
      "140 iter 1406 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689666 0.689666]\n",
      "140 iter 1407 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689669 0.689669]\n",
      "140 iter 1408 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689673 0.689673]\n",
      "140 iter 1409 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689677 0.689677]\n",
      "141 iter 1410 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689680 0.689680]\n",
      "141 iter 1411 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689684 0.689684]\n",
      "141 iter 1412 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689687 0.689687]\n",
      "141 iter 1413 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689691 0.689691]\n",
      "141 iter 1414 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689694 0.689694]\n",
      "141 iter 1415 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689698 0.689698]\n",
      "141 iter 1416 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689701 0.689701]\n",
      "141 iter 1417 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689705 0.689705]\n",
      "141 iter 1418 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689708 0.689708]\n",
      "141 iter 1419 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689712 0.689712]\n",
      "142 iter 1420 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689715 0.689715]\n",
      "142 iter 1421 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689719 0.689719]\n",
      "142 iter 1422 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689722 0.689722]\n",
      "142 iter 1423 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689726 0.689726]\n",
      "142 iter 1424 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689729 0.689729]\n",
      "142 iter 1425 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689733 0.689733]\n",
      "142 iter 1426 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689736 0.689736]\n",
      "142 iter 1427 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689740 0.689740]\n",
      "142 iter 1428 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689743 0.689743]\n",
      "142 iter 1429 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689747 0.689747]\n",
      "143 iter 1430 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689750 0.689750]\n",
      "143 iter 1431 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689754 0.689754]\n",
      "143 iter 1432 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689757 0.689757]\n",
      "143 iter 1433 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689761 0.689761]\n",
      "143 iter 1434 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689764 0.689764]\n",
      "143 iter 1435 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689768 0.689768]\n",
      "143 iter 1436 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689771 0.689771]\n",
      "143 iter 1437 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689774 0.689774]\n",
      "143 iter 1438 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689778 0.689778]\n",
      "143 iter 1439 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689781 0.689781]\n",
      "144 iter 1440 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689785 0.689785]\n",
      "144 iter 1441 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689788 0.689788]\n",
      "144 iter 1442 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689792 0.689792]\n",
      "144 iter 1443 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689795 0.689795]\n",
      "144 iter 1444 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689799 0.689799]\n",
      "144 iter 1445 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689802 0.689802]\n",
      "144 iter 1446 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689805 0.689805]\n",
      "144 iter 1447 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689809 0.689809]\n",
      "144 iter 1448 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689812 0.689812]\n",
      "144 iter 1449 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.689816 0.689816]\n",
      "145 iter 1450 [D loss: 0.693236, acc.: 50.00%] [G loss: 0.689819 0.689819]\n",
      "145 iter 1451 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.689822 0.689822]\n",
      "145 iter 1452 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.689826 0.689826]\n",
      "145 iter 1453 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.689829 0.689829]\n",
      "145 iter 1454 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.689833 0.689833]\n",
      "145 iter 1455 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.689836 0.689836]\n",
      "145 iter 1456 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.689840 0.689840]\n",
      "145 iter 1457 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.689843 0.689843]\n",
      "145 iter 1458 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.689846 0.689846]\n",
      "145 iter 1459 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.689850 0.689850]\n",
      "146 iter 1460 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.689853 0.689853]\n",
      "146 iter 1461 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.689856 0.689856]\n",
      "146 iter 1462 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.689860 0.689860]\n",
      "146 iter 1463 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.689863 0.689863]\n",
      "146 iter 1464 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.689866 0.689866]\n",
      "146 iter 1465 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.689870 0.689870]\n",
      "146 iter 1466 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.689873 0.689873]\n",
      "146 iter 1467 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.689876 0.689876]\n",
      "146 iter 1468 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.689880 0.689880]\n",
      "146 iter 1469 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.689883 0.689883]\n",
      "147 iter 1470 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.689887 0.689887]\n",
      "147 iter 1471 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.689890 0.689890]\n",
      "147 iter 1472 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.689893 0.689893]\n",
      "147 iter 1473 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.689897 0.689897]\n",
      "147 iter 1474 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.689900 0.689900]\n",
      "147 iter 1475 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.689903 0.689903]\n",
      "147 iter 1476 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.689907 0.689907]\n",
      "147 iter 1477 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.689910 0.689910]\n",
      "147 iter 1478 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.689913 0.689913]\n",
      "147 iter 1479 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.689917 0.689917]\n",
      "148 iter 1480 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.689920 0.689920]\n",
      "148 iter 1481 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.689923 0.689923]\n",
      "148 iter 1482 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.689927 0.689927]\n",
      "148 iter 1483 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.689930 0.689930]\n",
      "148 iter 1484 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.689933 0.689933]\n",
      "148 iter 1485 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.689936 0.689936]\n",
      "148 iter 1486 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.689940 0.689940]\n",
      "148 iter 1487 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.689943 0.689943]\n",
      "148 iter 1488 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.689946 0.689946]\n",
      "148 iter 1489 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.689949 0.689949]\n",
      "149 iter 1490 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.689953 0.689953]\n",
      "149 iter 1491 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.689956 0.689956]\n",
      "149 iter 1492 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.689959 0.689959]\n",
      "149 iter 1493 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.689963 0.689963]\n",
      "149 iter 1494 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.689966 0.689966]\n",
      "149 iter 1495 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.689969 0.689969]\n",
      "149 iter 1496 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.689972 0.689972]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "149 iter 1497 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.689976 0.689976]\n",
      "149 iter 1498 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.689979 0.689979]\n",
      "149 iter 1499 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.689982 0.689982]\n",
      "150 iter 1500 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.689985 0.689985]\n",
      "150 iter 1501 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.689988 0.689988]\n",
      "150 iter 1502 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.689992 0.689992]\n",
      "150 iter 1503 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.689995 0.689995]\n",
      "150 iter 1504 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.689998 0.689998]\n",
      "150 iter 1505 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.690001 0.690001]\n",
      "150 iter 1506 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.690005 0.690005]\n",
      "150 iter 1507 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.690008 0.690008]\n",
      "150 iter 1508 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.690011 0.690011]\n",
      "150 iter 1509 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.690014 0.690014]\n",
      "Saving model at /srv/osirim/idumeur/trainings/new_model/training_9/checkpoints/ step 150\n",
      "151 iter 1510 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.690018 0.690018]\n",
      "151 iter 1511 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.690021 0.690021]\n",
      "151 iter 1512 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.690024 0.690024]\n",
      "151 iter 1513 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.690027 0.690027]\n",
      "151 iter 1514 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.690030 0.690030]\n",
      "151 iter 1515 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.690034 0.690034]\n",
      "151 iter 1516 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.690037 0.690037]\n",
      "151 iter 1517 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.690040 0.690040]\n",
      "151 iter 1518 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.690043 0.690043]\n",
      "151 iter 1519 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.690046 0.690046]\n",
      "152 iter 1520 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.690050 0.690050]\n",
      "152 iter 1521 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.690053 0.690053]\n",
      "152 iter 1522 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.690056 0.690056]\n",
      "152 iter 1523 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.690059 0.690059]\n",
      "152 iter 1524 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.690062 0.690062]\n",
      "152 iter 1525 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.690065 0.690065]\n",
      "152 iter 1526 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.690068 0.690068]\n",
      "152 iter 1527 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.690072 0.690072]\n",
      "152 iter 1528 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.690075 0.690075]\n",
      "152 iter 1529 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.690078 0.690078]\n",
      "153 iter 1530 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.690081 0.690081]\n",
      "153 iter 1531 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.690084 0.690084]\n",
      "153 iter 1532 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.690087 0.690087]\n",
      "153 iter 1533 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.690090 0.690090]\n",
      "153 iter 1534 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.690094 0.690094]\n",
      "153 iter 1535 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.690097 0.690097]\n",
      "153 iter 1536 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.690100 0.690100]\n",
      "153 iter 1537 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.690103 0.690103]\n",
      "153 iter 1538 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.690106 0.690106]\n",
      "153 iter 1539 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.690109 0.690109]\n",
      "154 iter 1540 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.690112 0.690112]\n",
      "154 iter 1541 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.690116 0.690116]\n",
      "154 iter 1542 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.690119 0.690119]\n",
      "154 iter 1543 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.690122 0.690122]\n",
      "154 iter 1544 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.690125 0.690125]\n",
      "154 iter 1545 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.690128 0.690128]\n",
      "154 iter 1546 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.690131 0.690131]\n",
      "154 iter 1547 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.690134 0.690134]\n",
      "154 iter 1548 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.690137 0.690137]\n",
      "154 iter 1549 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.690140 0.690140]\n",
      "155 iter 1550 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690143 0.690143]\n",
      "155 iter 1551 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.690147 0.690147]\n",
      "155 iter 1552 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.690150 0.690150]\n",
      "155 iter 1553 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.690153 0.690153]\n",
      "155 iter 1554 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690156 0.690156]\n",
      "155 iter 1555 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690159 0.690159]\n",
      "155 iter 1556 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690162 0.690162]\n",
      "155 iter 1557 [D loss: 0.693235, acc.: 50.00%] [G loss: 0.690165 0.690165]\n",
      "155 iter 1558 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690168 0.690168]\n",
      "155 iter 1559 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690171 0.690171]\n",
      "156 iter 1560 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690174 0.690174]\n",
      "156 iter 1561 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690177 0.690177]\n",
      "156 iter 1562 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690180 0.690180]\n",
      "156 iter 1563 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690183 0.690183]\n",
      "156 iter 1564 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690186 0.690186]\n",
      "156 iter 1565 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690189 0.690189]\n",
      "156 iter 1566 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690192 0.690192]\n",
      "156 iter 1567 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690195 0.690195]\n",
      "156 iter 1568 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690198 0.690198]\n",
      "156 iter 1569 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690201 0.690201]\n",
      "157 iter 1570 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690204 0.690204]\n",
      "157 iter 1571 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690208 0.690208]\n",
      "157 iter 1572 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690211 0.690211]\n",
      "157 iter 1573 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690214 0.690214]\n",
      "157 iter 1574 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690217 0.690217]\n",
      "157 iter 1575 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690220 0.690220]\n",
      "157 iter 1576 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690223 0.690223]\n",
      "157 iter 1577 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690225 0.690225]\n",
      "157 iter 1578 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690229 0.690229]\n",
      "157 iter 1579 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690232 0.690232]\n",
      "158 iter 1580 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690234 0.690234]\n",
      "158 iter 1581 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690238 0.690238]\n",
      "158 iter 1582 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690241 0.690241]\n",
      "158 iter 1583 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690243 0.690243]\n",
      "158 iter 1584 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690247 0.690247]\n",
      "158 iter 1585 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690249 0.690249]\n",
      "158 iter 1586 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690252 0.690252]\n",
      "158 iter 1587 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690255 0.690255]\n",
      "158 iter 1588 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690258 0.690258]\n",
      "158 iter 1589 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690261 0.690261]\n",
      "159 iter 1590 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690264 0.690264]\n",
      "159 iter 1591 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690267 0.690267]\n",
      "159 iter 1592 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690270 0.690270]\n",
      "159 iter 1593 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690273 0.690273]\n",
      "159 iter 1594 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690276 0.690276]\n",
      "159 iter 1595 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690279 0.690279]\n",
      "159 iter 1596 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690282 0.690282]\n",
      "159 iter 1597 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690285 0.690285]\n",
      "159 iter 1598 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690288 0.690288]\n",
      "159 iter 1599 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690291 0.690291]\n",
      "160 iter 1600 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690294 0.690294]\n",
      "160 iter 1601 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690297 0.690297]\n",
      "160 iter 1602 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690300 0.690300]\n",
      "160 iter 1603 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690303 0.690303]\n",
      "160 iter 1604 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690306 0.690306]\n",
      "160 iter 1605 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690308 0.690308]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "160 iter 1606 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690311 0.690311]\n",
      "160 iter 1607 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690314 0.690314]\n",
      "160 iter 1608 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690317 0.690317]\n",
      "160 iter 1609 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690320 0.690320]\n",
      "161 iter 1610 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690323 0.690323]\n",
      "161 iter 1611 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690326 0.690326]\n",
      "161 iter 1612 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690329 0.690329]\n",
      "161 iter 1613 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690332 0.690332]\n",
      "161 iter 1614 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690335 0.690335]\n",
      "161 iter 1615 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690338 0.690338]\n",
      "161 iter 1616 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690340 0.690340]\n",
      "161 iter 1617 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690343 0.690343]\n",
      "161 iter 1618 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690346 0.690346]\n",
      "161 iter 1619 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690349 0.690349]\n",
      "162 iter 1620 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690352 0.690352]\n",
      "162 iter 1621 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690355 0.690355]\n",
      "162 iter 1622 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690358 0.690358]\n",
      "162 iter 1623 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690360 0.690360]\n",
      "162 iter 1624 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690363 0.690363]\n",
      "162 iter 1625 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690366 0.690366]\n",
      "162 iter 1626 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690369 0.690369]\n",
      "162 iter 1627 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690372 0.690372]\n",
      "162 iter 1628 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690375 0.690375]\n",
      "162 iter 1629 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690378 0.690378]\n",
      "163 iter 1630 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690381 0.690381]\n",
      "163 iter 1631 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690383 0.690383]\n",
      "163 iter 1632 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690386 0.690386]\n",
      "163 iter 1633 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690389 0.690389]\n",
      "163 iter 1634 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690392 0.690392]\n",
      "163 iter 1635 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690395 0.690395]\n",
      "163 iter 1636 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690398 0.690398]\n",
      "163 iter 1637 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690400 0.690400]\n",
      "163 iter 1638 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690403 0.690403]\n",
      "163 iter 1639 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690406 0.690406]\n",
      "164 iter 1640 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690409 0.690409]\n",
      "164 iter 1641 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690412 0.690412]\n",
      "164 iter 1642 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690414 0.690414]\n",
      "164 iter 1643 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690417 0.690417]\n",
      "164 iter 1644 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690420 0.690420]\n",
      "164 iter 1645 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690423 0.690423]\n",
      "164 iter 1646 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690426 0.690426]\n",
      "164 iter 1647 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690428 0.690428]\n",
      "164 iter 1648 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690431 0.690431]\n",
      "164 iter 1649 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690434 0.690434]\n",
      "165 iter 1650 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690437 0.690437]\n",
      "165 iter 1651 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690440 0.690440]\n",
      "165 iter 1652 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690442 0.690442]\n",
      "165 iter 1653 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690445 0.690445]\n",
      "165 iter 1654 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690448 0.690448]\n",
      "165 iter 1655 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690451 0.690451]\n",
      "165 iter 1656 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690454 0.690454]\n",
      "165 iter 1657 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690456 0.690456]\n",
      "165 iter 1658 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690459 0.690459]\n",
      "165 iter 1659 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690462 0.690462]\n",
      "166 iter 1660 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690465 0.690465]\n",
      "166 iter 1661 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690467 0.690467]\n",
      "166 iter 1662 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690470 0.690470]\n",
      "166 iter 1663 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690473 0.690473]\n",
      "166 iter 1664 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690476 0.690476]\n",
      "166 iter 1665 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690479 0.690479]\n",
      "166 iter 1666 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690481 0.690481]\n",
      "166 iter 1667 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690484 0.690484]\n",
      "166 iter 1668 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690487 0.690487]\n",
      "166 iter 1669 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690489 0.690489]\n",
      "167 iter 1670 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690492 0.690492]\n",
      "167 iter 1671 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690495 0.690495]\n",
      "167 iter 1672 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690498 0.690498]\n",
      "167 iter 1673 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690500 0.690500]\n",
      "167 iter 1674 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690503 0.690503]\n",
      "167 iter 1675 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690506 0.690506]\n",
      "167 iter 1676 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690509 0.690509]\n",
      "167 iter 1677 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690511 0.690511]\n",
      "167 iter 1678 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690514 0.690514]\n",
      "167 iter 1679 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690517 0.690517]\n",
      "168 iter 1680 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690519 0.690519]\n",
      "168 iter 1681 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690522 0.690522]\n",
      "168 iter 1682 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690525 0.690525]\n",
      "168 iter 1683 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690527 0.690527]\n",
      "168 iter 1684 [D loss: 0.693234, acc.: 50.00%] [G loss: 0.690530 0.690530]\n",
      "168 iter 1685 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690533 0.690533]\n",
      "168 iter 1686 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690536 0.690536]\n",
      "168 iter 1687 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690538 0.690538]\n",
      "168 iter 1688 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690541 0.690541]\n",
      "168 iter 1689 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690544 0.690544]\n",
      "169 iter 1690 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690546 0.690546]\n",
      "169 iter 1691 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690549 0.690549]\n",
      "169 iter 1692 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690552 0.690552]\n",
      "169 iter 1693 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690554 0.690554]\n",
      "169 iter 1694 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690557 0.690557]\n",
      "169 iter 1695 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690560 0.690560]\n",
      "169 iter 1696 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690562 0.690562]\n",
      "169 iter 1697 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690565 0.690565]\n",
      "169 iter 1698 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690568 0.690568]\n",
      "169 iter 1699 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690570 0.690570]\n",
      "170 iter 1700 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690573 0.690573]\n",
      "170 iter 1701 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690576 0.690576]\n",
      "170 iter 1702 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690578 0.690578]\n",
      "170 iter 1703 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690581 0.690581]\n",
      "170 iter 1704 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690584 0.690584]\n",
      "170 iter 1705 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690586 0.690586]\n",
      "170 iter 1706 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690589 0.690589]\n",
      "170 iter 1707 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690592 0.690592]\n",
      "170 iter 1708 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690594 0.690594]\n",
      "170 iter 1709 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690597 0.690597]\n",
      "171 iter 1710 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690599 0.690599]\n",
      "171 iter 1711 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690602 0.690602]\n",
      "171 iter 1712 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690605 0.690605]\n",
      "171 iter 1713 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690607 0.690607]\n",
      "171 iter 1714 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690610 0.690610]\n",
      "171 iter 1715 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690613 0.690613]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "171 iter 1716 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690615 0.690615]\n",
      "171 iter 1717 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690618 0.690618]\n",
      "171 iter 1718 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690620 0.690620]\n",
      "171 iter 1719 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690623 0.690623]\n",
      "172 iter 1720 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690626 0.690626]\n",
      "172 iter 1721 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690628 0.690628]\n",
      "172 iter 1722 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690631 0.690631]\n",
      "172 iter 1723 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690633 0.690633]\n",
      "172 iter 1724 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690636 0.690636]\n",
      "172 iter 1725 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690639 0.690639]\n",
      "172 iter 1726 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690641 0.690641]\n",
      "172 iter 1727 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690644 0.690644]\n",
      "172 iter 1728 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690646 0.690646]\n",
      "172 iter 1729 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690649 0.690649]\n",
      "173 iter 1730 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690652 0.690652]\n",
      "173 iter 1731 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690654 0.690654]\n",
      "173 iter 1732 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690657 0.690657]\n",
      "173 iter 1733 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690659 0.690659]\n",
      "173 iter 1734 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690662 0.690662]\n",
      "173 iter 1735 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690664 0.690664]\n",
      "173 iter 1736 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690667 0.690667]\n",
      "173 iter 1737 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690669 0.690669]\n",
      "173 iter 1738 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690672 0.690672]\n",
      "173 iter 1739 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690675 0.690675]\n",
      "174 iter 1740 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690677 0.690677]\n",
      "174 iter 1741 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690680 0.690680]\n",
      "174 iter 1742 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690682 0.690682]\n",
      "174 iter 1743 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690685 0.690685]\n",
      "174 iter 1744 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690687 0.690687]\n",
      "174 iter 1745 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690690 0.690690]\n",
      "174 iter 1746 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690692 0.690692]\n",
      "174 iter 1747 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690695 0.690695]\n",
      "174 iter 1748 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690697 0.690697]\n",
      "174 iter 1749 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690700 0.690700]\n",
      "175 iter 1750 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690702 0.690702]\n",
      "175 iter 1751 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690705 0.690705]\n",
      "175 iter 1752 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690708 0.690708]\n",
      "175 iter 1753 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690710 0.690710]\n",
      "175 iter 1754 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690713 0.690713]\n",
      "175 iter 1755 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690715 0.690715]\n",
      "175 iter 1756 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690718 0.690718]\n",
      "175 iter 1757 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690720 0.690720]\n",
      "175 iter 1758 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690723 0.690723]\n",
      "175 iter 1759 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690725 0.690725]\n",
      "176 iter 1760 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690728 0.690728]\n",
      "176 iter 1761 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690730 0.690730]\n",
      "176 iter 1762 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690733 0.690733]\n",
      "176 iter 1763 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690735 0.690735]\n",
      "176 iter 1764 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690738 0.690738]\n",
      "176 iter 1765 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690740 0.690740]\n",
      "176 iter 1766 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690743 0.690743]\n",
      "176 iter 1767 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690745 0.690745]\n",
      "176 iter 1768 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690748 0.690748]\n",
      "176 iter 1769 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690750 0.690750]\n",
      "177 iter 1770 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690753 0.690753]\n",
      "177 iter 1771 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690755 0.690755]\n",
      "177 iter 1772 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690758 0.690758]\n",
      "177 iter 1773 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690760 0.690760]\n",
      "177 iter 1774 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690762 0.690762]\n",
      "177 iter 1775 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690765 0.690765]\n",
      "177 iter 1776 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690767 0.690767]\n",
      "177 iter 1777 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690770 0.690770]\n",
      "177 iter 1778 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690772 0.690772]\n",
      "177 iter 1779 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690775 0.690775]\n",
      "178 iter 1780 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690777 0.690777]\n",
      "178 iter 1781 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690780 0.690780]\n",
      "178 iter 1782 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690782 0.690782]\n",
      "178 iter 1783 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690785 0.690785]\n",
      "178 iter 1784 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690787 0.690787]\n",
      "178 iter 1785 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690789 0.690789]\n",
      "178 iter 1786 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690792 0.690792]\n",
      "178 iter 1787 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690794 0.690794]\n",
      "178 iter 1788 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690797 0.690797]\n",
      "178 iter 1789 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690799 0.690799]\n",
      "179 iter 1790 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690802 0.690802]\n",
      "179 iter 1791 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690804 0.690804]\n",
      "179 iter 1792 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690806 0.690806]\n",
      "179 iter 1793 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690809 0.690809]\n",
      "179 iter 1794 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690811 0.690811]\n",
      "179 iter 1795 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690814 0.690814]\n",
      "179 iter 1796 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690816 0.690816]\n",
      "179 iter 1797 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690819 0.690819]\n",
      "179 iter 1798 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690821 0.690821]\n",
      "179 iter 1799 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690823 0.690823]\n",
      "180 iter 1800 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690826 0.690826]\n",
      "180 iter 1801 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690828 0.690828]\n",
      "180 iter 1802 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690831 0.690831]\n",
      "180 iter 1803 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690833 0.690833]\n",
      "180 iter 1804 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690835 0.690835]\n",
      "180 iter 1805 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690838 0.690838]\n",
      "180 iter 1806 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690840 0.690840]\n",
      "180 iter 1807 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690842 0.690842]\n",
      "180 iter 1808 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690845 0.690845]\n",
      "180 iter 1809 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690847 0.690847]\n",
      "181 iter 1810 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690850 0.690850]\n",
      "181 iter 1811 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690852 0.690852]\n",
      "181 iter 1812 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690854 0.690854]\n",
      "181 iter 1813 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690857 0.690857]\n",
      "181 iter 1814 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690859 0.690859]\n",
      "181 iter 1815 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690861 0.690861]\n",
      "181 iter 1816 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690864 0.690864]\n",
      "181 iter 1817 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690866 0.690866]\n",
      "181 iter 1818 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690869 0.690869]\n",
      "181 iter 1819 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690871 0.690871]\n",
      "182 iter 1820 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690873 0.690873]\n",
      "182 iter 1821 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690876 0.690876]\n",
      "182 iter 1822 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690878 0.690878]\n",
      "182 iter 1823 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690880 0.690880]\n",
      "182 iter 1824 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690883 0.690883]\n",
      "182 iter 1825 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690885 0.690885]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "182 iter 1826 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690887 0.690887]\n",
      "182 iter 1827 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690890 0.690890]\n",
      "182 iter 1828 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690892 0.690892]\n",
      "182 iter 1829 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690894 0.690894]\n",
      "183 iter 1830 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690897 0.690897]\n",
      "183 iter 1831 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690899 0.690899]\n",
      "183 iter 1832 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690901 0.690901]\n",
      "183 iter 1833 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690904 0.690904]\n",
      "183 iter 1834 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690906 0.690906]\n",
      "183 iter 1835 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690908 0.690908]\n",
      "183 iter 1836 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690911 0.690911]\n",
      "183 iter 1837 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690913 0.690913]\n",
      "183 iter 1838 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690915 0.690915]\n",
      "183 iter 1839 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690918 0.690918]\n",
      "184 iter 1840 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690920 0.690920]\n",
      "184 iter 1841 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690922 0.690922]\n",
      "184 iter 1842 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690925 0.690925]\n",
      "184 iter 1843 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690927 0.690927]\n",
      "184 iter 1844 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690929 0.690929]\n",
      "184 iter 1845 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690931 0.690931]\n",
      "184 iter 1846 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690934 0.690934]\n",
      "184 iter 1847 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690936 0.690936]\n",
      "184 iter 1848 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690938 0.690938]\n",
      "184 iter 1849 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690941 0.690941]\n",
      "185 iter 1850 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690943 0.690943]\n",
      "185 iter 1851 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690945 0.690945]\n",
      "185 iter 1852 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690947 0.690947]\n",
      "185 iter 1853 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690950 0.690950]\n",
      "185 iter 1854 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690952 0.690952]\n",
      "185 iter 1855 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690954 0.690954]\n",
      "185 iter 1856 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690957 0.690957]\n",
      "185 iter 1857 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690959 0.690959]\n",
      "185 iter 1858 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.690961 0.690961]\n",
      "185 iter 1859 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690963 0.690963]\n",
      "186 iter 1860 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690966 0.690966]\n",
      "186 iter 1861 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690968 0.690968]\n",
      "186 iter 1862 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690970 0.690970]\n",
      "186 iter 1863 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690973 0.690973]\n",
      "186 iter 1864 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690975 0.690975]\n",
      "186 iter 1865 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.690977 0.690977]\n",
      "186 iter 1866 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690979 0.690979]\n",
      "186 iter 1867 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690981 0.690981]\n",
      "186 iter 1868 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.690984 0.690984]\n",
      "186 iter 1869 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.690986 0.690986]\n",
      "187 iter 1870 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690988 0.690988]\n",
      "187 iter 1871 [D loss: 0.693233, acc.: 50.00%] [G loss: 0.690990 0.690990]\n",
      "187 iter 1872 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.690993 0.690993]\n",
      "187 iter 1873 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.690995 0.690995]\n",
      "187 iter 1874 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.690997 0.690997]\n",
      "187 iter 1875 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.690999 0.690999]\n",
      "187 iter 1876 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691002 0.691002]\n",
      "187 iter 1877 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691004 0.691004]\n",
      "187 iter 1878 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691006 0.691006]\n",
      "187 iter 1879 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691008 0.691008]\n",
      "188 iter 1880 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691010 0.691010]\n",
      "188 iter 1881 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691013 0.691013]\n",
      "188 iter 1882 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691015 0.691015]\n",
      "188 iter 1883 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691017 0.691017]\n",
      "188 iter 1884 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691019 0.691019]\n",
      "188 iter 1885 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691022 0.691022]\n",
      "188 iter 1886 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691024 0.691024]\n",
      "188 iter 1887 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691026 0.691026]\n",
      "188 iter 1888 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691028 0.691028]\n",
      "188 iter 1889 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691030 0.691030]\n",
      "189 iter 1890 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691033 0.691033]\n",
      "189 iter 1891 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691035 0.691035]\n",
      "189 iter 1892 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691037 0.691037]\n",
      "189 iter 1893 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691039 0.691039]\n",
      "189 iter 1894 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691041 0.691041]\n",
      "189 iter 1895 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691044 0.691044]\n",
      "189 iter 1896 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691046 0.691046]\n",
      "189 iter 1897 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691048 0.691048]\n",
      "189 iter 1898 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691050 0.691050]\n",
      "189 iter 1899 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691052 0.691052]\n",
      "190 iter 1900 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691055 0.691055]\n",
      "190 iter 1901 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691057 0.691057]\n",
      "190 iter 1902 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691059 0.691059]\n",
      "190 iter 1903 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691061 0.691061]\n",
      "190 iter 1904 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691063 0.691063]\n",
      "190 iter 1905 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691065 0.691065]\n",
      "190 iter 1906 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691067 0.691067]\n",
      "190 iter 1907 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691070 0.691070]\n",
      "190 iter 1908 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691072 0.691072]\n",
      "190 iter 1909 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691074 0.691074]\n",
      "191 iter 1910 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691076 0.691076]\n",
      "191 iter 1911 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691078 0.691078]\n",
      "191 iter 1912 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691080 0.691080]\n",
      "191 iter 1913 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691083 0.691083]\n",
      "191 iter 1914 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691085 0.691085]\n",
      "191 iter 1915 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691087 0.691087]\n",
      "191 iter 1916 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691089 0.691089]\n",
      "191 iter 1917 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691091 0.691091]\n",
      "191 iter 1918 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691093 0.691093]\n",
      "191 iter 1919 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691095 0.691095]\n",
      "192 iter 1920 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691097 0.691097]\n",
      "192 iter 1921 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691100 0.691100]\n",
      "192 iter 1922 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691102 0.691102]\n",
      "192 iter 1923 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691104 0.691104]\n",
      "192 iter 1924 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691106 0.691106]\n",
      "192 iter 1925 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691108 0.691108]\n",
      "192 iter 1926 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691110 0.691110]\n",
      "192 iter 1927 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691112 0.691112]\n",
      "192 iter 1928 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691114 0.691114]\n",
      "192 iter 1929 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691117 0.691117]\n",
      "193 iter 1930 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691119 0.691119]\n",
      "193 iter 1931 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691121 0.691121]\n",
      "193 iter 1932 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691123 0.691123]\n",
      "193 iter 1933 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691125 0.691125]\n",
      "193 iter 1934 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691127 0.691127]\n",
      "193 iter 1935 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691129 0.691129]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "193 iter 1936 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691131 0.691131]\n",
      "193 iter 1937 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691133 0.691133]\n",
      "193 iter 1938 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691136 0.691136]\n",
      "193 iter 1939 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691138 0.691138]\n",
      "194 iter 1940 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691140 0.691140]\n",
      "194 iter 1941 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691142 0.691142]\n",
      "194 iter 1942 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691144 0.691144]\n",
      "194 iter 1943 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691146 0.691146]\n",
      "194 iter 1944 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691148 0.691148]\n",
      "194 iter 1945 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691150 0.691150]\n",
      "194 iter 1946 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691152 0.691152]\n",
      "194 iter 1947 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691154 0.691154]\n",
      "194 iter 1948 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691156 0.691156]\n",
      "194 iter 1949 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691158 0.691158]\n",
      "195 iter 1950 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691161 0.691161]\n",
      "195 iter 1951 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691163 0.691163]\n",
      "195 iter 1952 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691165 0.691165]\n",
      "195 iter 1953 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691167 0.691167]\n",
      "195 iter 1954 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691169 0.691169]\n",
      "195 iter 1955 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691171 0.691171]\n",
      "195 iter 1956 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691173 0.691173]\n",
      "195 iter 1957 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691175 0.691175]\n",
      "195 iter 1958 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691177 0.691177]\n",
      "195 iter 1959 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691179 0.691179]\n",
      "196 iter 1960 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691181 0.691181]\n",
      "196 iter 1961 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691183 0.691183]\n",
      "196 iter 1962 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691185 0.691185]\n",
      "196 iter 1963 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691187 0.691187]\n",
      "196 iter 1964 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691189 0.691189]\n",
      "196 iter 1965 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691191 0.691191]\n",
      "196 iter 1966 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691193 0.691193]\n",
      "196 iter 1967 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691195 0.691195]\n",
      "196 iter 1968 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691197 0.691197]\n",
      "196 iter 1969 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691200 0.691200]\n",
      "197 iter 1970 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691201 0.691201]\n",
      "197 iter 1971 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691203 0.691203]\n",
      "197 iter 1972 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691205 0.691205]\n",
      "197 iter 1973 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691208 0.691208]\n",
      "197 iter 1974 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691210 0.691210]\n",
      "197 iter 1975 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691212 0.691212]\n",
      "197 iter 1976 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691214 0.691214]\n",
      "197 iter 1977 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691216 0.691216]\n",
      "197 iter 1978 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691218 0.691218]\n",
      "197 iter 1979 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691220 0.691220]\n",
      "198 iter 1980 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691222 0.691222]\n",
      "198 iter 1981 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691224 0.691224]\n",
      "198 iter 1982 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691226 0.691226]\n",
      "198 iter 1983 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691228 0.691228]\n",
      "198 iter 1984 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691230 0.691230]\n",
      "198 iter 1985 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691232 0.691232]\n",
      "198 iter 1986 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691234 0.691234]\n",
      "198 iter 1987 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691236 0.691236]\n",
      "198 iter 1988 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691238 0.691238]\n",
      "198 iter 1989 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691240 0.691240]\n",
      "199 iter 1990 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691242 0.691242]\n",
      "199 iter 1991 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691244 0.691244]\n",
      "199 iter 1992 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691246 0.691246]\n",
      "199 iter 1993 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691248 0.691248]\n",
      "199 iter 1994 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691250 0.691250]\n",
      "199 iter 1995 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691252 0.691252]\n",
      "199 iter 1996 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691254 0.691254]\n",
      "199 iter 1997 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691256 0.691256]\n",
      "199 iter 1998 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691257 0.691257]\n",
      "199 iter 1999 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691259 0.691259]\n",
      "200 iter 2000 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691261 0.691261]\n",
      "200 iter 2001 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691263 0.691263]\n",
      "200 iter 2002 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691265 0.691265]\n",
      "200 iter 2003 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691267 0.691267]\n",
      "200 iter 2004 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691269 0.691269]\n",
      "200 iter 2005 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691271 0.691271]\n",
      "200 iter 2006 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691273 0.691273]\n",
      "200 iter 2007 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691275 0.691275]\n",
      "200 iter 2008 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691277 0.691277]\n",
      "200 iter 2009 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691279 0.691279]\n",
      "Saving model at /srv/osirim/idumeur/trainings/new_model/training_9/checkpoints/ step 200\n",
      "201 iter 2010 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691281 0.691281]\n",
      "201 iter 2011 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691283 0.691283]\n",
      "201 iter 2012 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691285 0.691285]\n",
      "201 iter 2013 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691287 0.691287]\n",
      "201 iter 2014 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691289 0.691289]\n",
      "201 iter 2015 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691291 0.691291]\n",
      "201 iter 2016 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691293 0.691293]\n",
      "201 iter 2017 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691295 0.691295]\n",
      "201 iter 2018 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691297 0.691297]\n",
      "201 iter 2019 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691298 0.691298]\n",
      "202 iter 2020 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691300 0.691300]\n",
      "202 iter 2021 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691302 0.691302]\n",
      "202 iter 2022 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691304 0.691304]\n",
      "202 iter 2023 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691306 0.691306]\n",
      "202 iter 2024 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691308 0.691308]\n",
      "202 iter 2025 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691310 0.691310]\n",
      "202 iter 2026 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691312 0.691312]\n",
      "202 iter 2027 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691314 0.691314]\n",
      "202 iter 2028 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691316 0.691316]\n",
      "202 iter 2029 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691318 0.691318]\n",
      "203 iter 2030 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691320 0.691320]\n",
      "203 iter 2031 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691322 0.691322]\n",
      "203 iter 2032 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691323 0.691323]\n",
      "203 iter 2033 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691325 0.691325]\n",
      "203 iter 2034 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691327 0.691327]\n",
      "203 iter 2035 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691329 0.691329]\n",
      "203 iter 2036 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691331 0.691331]\n",
      "203 iter 2037 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691333 0.691333]\n",
      "203 iter 2038 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691335 0.691335]\n",
      "203 iter 2039 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691337 0.691337]\n",
      "204 iter 2040 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691339 0.691339]\n",
      "204 iter 2041 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691340 0.691340]\n",
      "204 iter 2042 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691342 0.691342]\n",
      "204 iter 2043 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691344 0.691344]\n",
      "204 iter 2044 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691346 0.691346]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "204 iter 2045 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691348 0.691348]\n",
      "204 iter 2046 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691350 0.691350]\n",
      "204 iter 2047 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691352 0.691352]\n",
      "204 iter 2048 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691354 0.691354]\n",
      "204 iter 2049 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691356 0.691356]\n",
      "205 iter 2050 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691357 0.691357]\n",
      "205 iter 2051 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691359 0.691359]\n",
      "205 iter 2052 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691361 0.691361]\n",
      "205 iter 2053 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691363 0.691363]\n",
      "205 iter 2054 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691365 0.691365]\n",
      "205 iter 2055 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691367 0.691367]\n",
      "205 iter 2056 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691369 0.691369]\n",
      "205 iter 2057 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691370 0.691370]\n",
      "205 iter 2058 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691372 0.691372]\n",
      "205 iter 2059 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691374 0.691374]\n",
      "206 iter 2060 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691376 0.691376]\n",
      "206 iter 2061 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691378 0.691378]\n",
      "206 iter 2062 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691380 0.691380]\n",
      "206 iter 2063 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691382 0.691382]\n",
      "206 iter 2064 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691383 0.691383]\n",
      "206 iter 2065 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691385 0.691385]\n",
      "206 iter 2066 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691387 0.691387]\n",
      "206 iter 2067 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691389 0.691389]\n",
      "206 iter 2068 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691391 0.691391]\n",
      "206 iter 2069 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691393 0.691393]\n",
      "207 iter 2070 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691394 0.691394]\n",
      "207 iter 2071 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691396 0.691396]\n",
      "207 iter 2072 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691398 0.691398]\n",
      "207 iter 2073 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691400 0.691400]\n",
      "207 iter 2074 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691402 0.691402]\n",
      "207 iter 2075 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691404 0.691404]\n",
      "207 iter 2076 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691405 0.691405]\n",
      "207 iter 2077 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691407 0.691407]\n",
      "207 iter 2078 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691409 0.691409]\n",
      "207 iter 2079 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691411 0.691411]\n",
      "208 iter 2080 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691413 0.691413]\n",
      "208 iter 2081 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691415 0.691415]\n",
      "208 iter 2082 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691416 0.691416]\n",
      "208 iter 2083 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691418 0.691418]\n",
      "208 iter 2084 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691420 0.691420]\n",
      "208 iter 2085 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691422 0.691422]\n",
      "208 iter 2086 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691424 0.691424]\n",
      "208 iter 2087 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691425 0.691425]\n",
      "208 iter 2088 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691427 0.691427]\n",
      "208 iter 2089 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691429 0.691429]\n",
      "209 iter 2090 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691431 0.691431]\n",
      "209 iter 2091 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691433 0.691433]\n",
      "209 iter 2092 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691434 0.691434]\n",
      "209 iter 2093 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691436 0.691436]\n",
      "209 iter 2094 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691438 0.691438]\n",
      "209 iter 2095 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691440 0.691440]\n",
      "209 iter 2096 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691442 0.691442]\n",
      "209 iter 2097 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691443 0.691443]\n",
      "209 iter 2098 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691445 0.691445]\n",
      "209 iter 2099 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691447 0.691447]\n",
      "210 iter 2100 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691449 0.691449]\n",
      "210 iter 2101 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691450 0.691450]\n",
      "210 iter 2102 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691452 0.691452]\n",
      "210 iter 2103 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691454 0.691454]\n",
      "210 iter 2104 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691456 0.691456]\n",
      "210 iter 2105 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691458 0.691458]\n",
      "210 iter 2106 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691459 0.691459]\n",
      "210 iter 2107 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691461 0.691461]\n",
      "210 iter 2108 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691463 0.691463]\n",
      "210 iter 2109 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691465 0.691465]\n",
      "211 iter 2110 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691467 0.691467]\n",
      "211 iter 2111 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691468 0.691468]\n",
      "211 iter 2112 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691470 0.691470]\n",
      "211 iter 2113 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691472 0.691472]\n",
      "211 iter 2114 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691474 0.691474]\n",
      "211 iter 2115 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691475 0.691475]\n",
      "211 iter 2116 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691477 0.691477]\n",
      "211 iter 2117 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691479 0.691479]\n",
      "211 iter 2118 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691481 0.691481]\n",
      "211 iter 2119 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691482 0.691482]\n",
      "212 iter 2120 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691484 0.691484]\n",
      "212 iter 2121 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691486 0.691486]\n",
      "212 iter 2122 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691487 0.691487]\n",
      "212 iter 2123 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691489 0.691489]\n",
      "212 iter 2124 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691491 0.691491]\n",
      "212 iter 2125 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691493 0.691493]\n",
      "212 iter 2126 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691494 0.691494]\n",
      "212 iter 2127 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691496 0.691496]\n",
      "212 iter 2128 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691498 0.691498]\n",
      "212 iter 2129 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691500 0.691500]\n",
      "213 iter 2130 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691501 0.691501]\n",
      "213 iter 2131 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691503 0.691503]\n",
      "213 iter 2132 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691505 0.691505]\n",
      "213 iter 2133 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691507 0.691507]\n",
      "213 iter 2134 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691508 0.691508]\n",
      "213 iter 2135 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691510 0.691510]\n",
      "213 iter 2136 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691512 0.691512]\n",
      "213 iter 2137 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691513 0.691513]\n",
      "213 iter 2138 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691515 0.691515]\n",
      "213 iter 2139 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691517 0.691517]\n",
      "214 iter 2140 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691519 0.691519]\n",
      "214 iter 2141 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691520 0.691520]\n",
      "214 iter 2142 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691522 0.691522]\n",
      "214 iter 2143 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691524 0.691524]\n",
      "214 iter 2144 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691525 0.691525]\n",
      "214 iter 2145 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691527 0.691527]\n",
      "214 iter 2146 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691529 0.691529]\n",
      "214 iter 2147 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691530 0.691530]\n",
      "214 iter 2148 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691532 0.691532]\n",
      "214 iter 2149 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691534 0.691534]\n",
      "215 iter 2150 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691536 0.691536]\n",
      "215 iter 2151 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691537 0.691537]\n",
      "215 iter 2152 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691539 0.691539]\n",
      "215 iter 2153 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691541 0.691541]\n",
      "215 iter 2154 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691542 0.691542]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "215 iter 2155 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691544 0.691544]\n",
      "215 iter 2156 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691546 0.691546]\n",
      "215 iter 2157 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691547 0.691547]\n",
      "215 iter 2158 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691549 0.691549]\n",
      "215 iter 2159 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691551 0.691551]\n",
      "216 iter 2160 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691552 0.691552]\n",
      "216 iter 2161 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691554 0.691554]\n",
      "216 iter 2162 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691556 0.691556]\n",
      "216 iter 2163 [D loss: 0.693232, acc.: 50.00%] [G loss: 0.691557 0.691557]\n",
      "216 iter 2164 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691559 0.691559]\n",
      "216 iter 2165 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691561 0.691561]\n",
      "216 iter 2166 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691562 0.691562]\n",
      "216 iter 2167 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691564 0.691564]\n",
      "216 iter 2168 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691566 0.691566]\n",
      "216 iter 2169 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691568 0.691568]\n",
      "217 iter 2170 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691569 0.691569]\n",
      "217 iter 2171 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691571 0.691571]\n",
      "217 iter 2172 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691572 0.691572]\n",
      "217 iter 2173 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691574 0.691574]\n",
      "217 iter 2174 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691576 0.691576]\n",
      "217 iter 2175 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691577 0.691577]\n",
      "217 iter 2176 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691579 0.691579]\n",
      "217 iter 2177 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691581 0.691581]\n",
      "217 iter 2178 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691582 0.691582]\n",
      "217 iter 2179 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691584 0.691584]\n",
      "218 iter 2180 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691586 0.691586]\n",
      "218 iter 2181 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691587 0.691587]\n",
      "218 iter 2182 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691589 0.691589]\n",
      "218 iter 2183 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691591 0.691591]\n",
      "218 iter 2184 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691592 0.691592]\n",
      "218 iter 2185 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691594 0.691594]\n",
      "218 iter 2186 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691596 0.691596]\n",
      "218 iter 2187 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691597 0.691597]\n",
      "218 iter 2188 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691599 0.691599]\n",
      "218 iter 2189 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691600 0.691600]\n",
      "219 iter 2190 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691602 0.691602]\n",
      "219 iter 2191 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691604 0.691604]\n",
      "219 iter 2192 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691605 0.691605]\n",
      "219 iter 2193 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691607 0.691607]\n",
      "219 iter 2194 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691608 0.691608]\n",
      "219 iter 2195 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691610 0.691610]\n",
      "219 iter 2196 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691612 0.691612]\n",
      "219 iter 2197 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691613 0.691613]\n",
      "219 iter 2198 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691615 0.691615]\n",
      "219 iter 2199 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691617 0.691617]\n",
      "220 iter 2200 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691618 0.691618]\n",
      "220 iter 2201 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691620 0.691620]\n",
      "220 iter 2202 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691621 0.691621]\n",
      "220 iter 2203 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691623 0.691623]\n",
      "220 iter 2204 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691625 0.691625]\n",
      "220 iter 2205 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691626 0.691626]\n",
      "220 iter 2206 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691628 0.691628]\n",
      "220 iter 2207 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691629 0.691629]\n",
      "220 iter 2208 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691631 0.691631]\n",
      "220 iter 2209 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691633 0.691633]\n",
      "221 iter 2210 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691634 0.691634]\n",
      "221 iter 2211 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691636 0.691636]\n",
      "221 iter 2212 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691637 0.691637]\n",
      "221 iter 2213 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691639 0.691639]\n",
      "221 iter 2214 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691641 0.691641]\n",
      "221 iter 2215 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691642 0.691642]\n",
      "221 iter 2216 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691644 0.691644]\n",
      "221 iter 2217 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691645 0.691645]\n",
      "221 iter 2218 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691647 0.691647]\n",
      "221 iter 2219 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691649 0.691649]\n",
      "222 iter 2220 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691650 0.691650]\n",
      "222 iter 2221 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691652 0.691652]\n",
      "222 iter 2222 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691653 0.691653]\n",
      "222 iter 2223 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691655 0.691655]\n",
      "222 iter 2224 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691656 0.691656]\n",
      "222 iter 2225 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691658 0.691658]\n",
      "222 iter 2226 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691660 0.691660]\n",
      "222 iter 2227 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691661 0.691661]\n",
      "222 iter 2228 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691663 0.691663]\n",
      "222 iter 2229 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691664 0.691664]\n",
      "223 iter 2230 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691666 0.691666]\n",
      "223 iter 2231 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691667 0.691667]\n",
      "223 iter 2232 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691669 0.691669]\n",
      "223 iter 2233 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691671 0.691671]\n",
      "223 iter 2234 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691672 0.691672]\n",
      "223 iter 2235 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691674 0.691674]\n",
      "223 iter 2236 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691675 0.691675]\n",
      "223 iter 2237 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691677 0.691677]\n",
      "223 iter 2238 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691678 0.691678]\n",
      "223 iter 2239 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691680 0.691680]\n",
      "224 iter 2240 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691681 0.691681]\n",
      "224 iter 2241 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691683 0.691683]\n",
      "224 iter 2242 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691685 0.691685]\n",
      "224 iter 2243 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691686 0.691686]\n",
      "224 iter 2244 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691688 0.691688]\n",
      "224 iter 2245 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691689 0.691689]\n",
      "224 iter 2246 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691691 0.691691]\n",
      "224 iter 2247 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691692 0.691692]\n",
      "224 iter 2248 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691694 0.691694]\n",
      "224 iter 2249 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691695 0.691695]\n",
      "225 iter 2250 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691697 0.691697]\n",
      "225 iter 2251 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691698 0.691698]\n",
      "225 iter 2252 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691700 0.691700]\n",
      "225 iter 2253 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691701 0.691701]\n",
      "225 iter 2254 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691703 0.691703]\n",
      "225 iter 2255 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691705 0.691705]\n",
      "225 iter 2256 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691706 0.691706]\n",
      "225 iter 2257 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691707 0.691707]\n",
      "225 iter 2258 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691709 0.691709]\n",
      "225 iter 2259 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691711 0.691711]\n",
      "226 iter 2260 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691712 0.691712]\n",
      "226 iter 2261 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691714 0.691714]\n",
      "226 iter 2262 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691715 0.691715]\n",
      "226 iter 2263 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691717 0.691717]\n",
      "226 iter 2264 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691718 0.691718]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "226 iter 2265 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691720 0.691720]\n",
      "226 iter 2266 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691721 0.691721]\n",
      "226 iter 2267 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691723 0.691723]\n",
      "226 iter 2268 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691724 0.691724]\n",
      "226 iter 2269 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691726 0.691726]\n",
      "227 iter 2270 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691727 0.691727]\n",
      "227 iter 2271 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691729 0.691729]\n",
      "227 iter 2272 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691730 0.691730]\n",
      "227 iter 2273 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691732 0.691732]\n",
      "227 iter 2274 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691733 0.691733]\n",
      "227 iter 2275 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691735 0.691735]\n",
      "227 iter 2276 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691736 0.691736]\n",
      "227 iter 2277 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691738 0.691738]\n",
      "227 iter 2278 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691739 0.691739]\n",
      "227 iter 2279 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691741 0.691741]\n",
      "228 iter 2280 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691742 0.691742]\n",
      "228 iter 2281 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691744 0.691744]\n",
      "228 iter 2282 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691745 0.691745]\n",
      "228 iter 2283 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691747 0.691747]\n",
      "228 iter 2284 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691748 0.691748]\n",
      "228 iter 2285 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691750 0.691750]\n",
      "228 iter 2286 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691751 0.691751]\n",
      "228 iter 2287 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691752 0.691752]\n",
      "228 iter 2288 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691754 0.691754]\n",
      "228 iter 2289 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691755 0.691755]\n",
      "229 iter 2290 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691757 0.691757]\n",
      "229 iter 2291 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691758 0.691758]\n",
      "229 iter 2292 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691760 0.691760]\n",
      "229 iter 2293 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691761 0.691761]\n",
      "229 iter 2294 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691763 0.691763]\n",
      "229 iter 2295 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691764 0.691764]\n",
      "229 iter 2296 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691766 0.691766]\n",
      "229 iter 2297 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691767 0.691767]\n",
      "229 iter 2298 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691769 0.691769]\n",
      "229 iter 2299 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691770 0.691770]\n",
      "230 iter 2300 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691772 0.691772]\n",
      "230 iter 2301 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691773 0.691773]\n",
      "230 iter 2302 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691774 0.691774]\n",
      "230 iter 2303 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691776 0.691776]\n",
      "230 iter 2304 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691777 0.691777]\n",
      "230 iter 2305 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691779 0.691779]\n",
      "230 iter 2306 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691780 0.691780]\n",
      "230 iter 2307 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691782 0.691782]\n",
      "230 iter 2308 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691783 0.691783]\n",
      "230 iter 2309 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691785 0.691785]\n",
      "231 iter 2310 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691786 0.691786]\n",
      "231 iter 2311 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691788 0.691788]\n",
      "231 iter 2312 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691789 0.691789]\n",
      "231 iter 2313 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691790 0.691790]\n",
      "231 iter 2314 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691792 0.691792]\n",
      "231 iter 2315 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691793 0.691793]\n",
      "231 iter 2316 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691795 0.691795]\n",
      "231 iter 2317 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691796 0.691796]\n",
      "231 iter 2318 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691798 0.691798]\n",
      "231 iter 2319 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691799 0.691799]\n",
      "232 iter 2320 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691800 0.691800]\n",
      "232 iter 2321 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691802 0.691802]\n",
      "232 iter 2322 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691803 0.691803]\n",
      "232 iter 2323 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691805 0.691805]\n",
      "232 iter 2324 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691806 0.691806]\n",
      "232 iter 2325 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691808 0.691808]\n",
      "232 iter 2326 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691809 0.691809]\n",
      "232 iter 2327 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691810 0.691810]\n",
      "232 iter 2328 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691812 0.691812]\n",
      "232 iter 2329 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691813 0.691813]\n",
      "233 iter 2330 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691815 0.691815]\n",
      "233 iter 2331 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691816 0.691816]\n",
      "233 iter 2332 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691818 0.691818]\n",
      "233 iter 2333 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691819 0.691819]\n",
      "233 iter 2334 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691820 0.691820]\n",
      "233 iter 2335 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691822 0.691822]\n",
      "233 iter 2336 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691823 0.691823]\n",
      "233 iter 2337 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691825 0.691825]\n",
      "233 iter 2338 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691826 0.691826]\n",
      "233 iter 2339 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691827 0.691827]\n",
      "234 iter 2340 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691829 0.691829]\n",
      "234 iter 2341 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691830 0.691830]\n",
      "234 iter 2342 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691832 0.691832]\n",
      "234 iter 2343 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691833 0.691833]\n",
      "234 iter 2344 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691834 0.691834]\n",
      "234 iter 2345 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691836 0.691836]\n",
      "234 iter 2346 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691837 0.691837]\n",
      "234 iter 2347 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691839 0.691839]\n",
      "234 iter 2348 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691840 0.691840]\n",
      "234 iter 2349 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691841 0.691841]\n",
      "235 iter 2350 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691843 0.691843]\n",
      "235 iter 2351 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691844 0.691844]\n",
      "235 iter 2352 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691846 0.691846]\n",
      "235 iter 2353 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691847 0.691847]\n",
      "235 iter 2354 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691848 0.691848]\n",
      "235 iter 2355 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691850 0.691850]\n",
      "235 iter 2356 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691851 0.691851]\n",
      "235 iter 2357 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691852 0.691852]\n",
      "235 iter 2358 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691854 0.691854]\n",
      "235 iter 2359 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691855 0.691855]\n",
      "236 iter 2360 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691857 0.691857]\n",
      "236 iter 2361 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691858 0.691858]\n",
      "236 iter 2362 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691859 0.691859]\n",
      "236 iter 2363 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691861 0.691861]\n",
      "236 iter 2364 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691862 0.691862]\n",
      "236 iter 2365 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691863 0.691863]\n",
      "236 iter 2366 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691865 0.691865]\n",
      "236 iter 2367 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691866 0.691866]\n",
      "236 iter 2368 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691868 0.691868]\n",
      "236 iter 2369 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691869 0.691869]\n",
      "237 iter 2370 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691870 0.691870]\n",
      "237 iter 2371 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691872 0.691872]\n",
      "237 iter 2372 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691873 0.691873]\n",
      "237 iter 2373 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691874 0.691874]\n",
      "237 iter 2374 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691876 0.691876]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "237 iter 2375 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691877 0.691877]\n",
      "237 iter 2376 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691878 0.691878]\n",
      "237 iter 2377 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691880 0.691880]\n",
      "237 iter 2378 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691881 0.691881]\n",
      "237 iter 2379 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691882 0.691882]\n",
      "238 iter 2380 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691884 0.691884]\n",
      "238 iter 2381 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691885 0.691885]\n",
      "238 iter 2382 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691886 0.691886]\n",
      "238 iter 2383 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691888 0.691888]\n",
      "238 iter 2384 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691889 0.691889]\n",
      "238 iter 2385 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691890 0.691890]\n",
      "238 iter 2386 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691892 0.691892]\n",
      "238 iter 2387 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691893 0.691893]\n",
      "238 iter 2388 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691894 0.691894]\n",
      "238 iter 2389 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691896 0.691896]\n",
      "239 iter 2390 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691897 0.691897]\n",
      "239 iter 2391 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691898 0.691898]\n",
      "239 iter 2392 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691900 0.691900]\n",
      "239 iter 2393 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691901 0.691901]\n",
      "239 iter 2394 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691902 0.691902]\n",
      "239 iter 2395 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691904 0.691904]\n",
      "239 iter 2396 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691905 0.691905]\n",
      "239 iter 2397 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691906 0.691906]\n",
      "239 iter 2398 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691908 0.691908]\n",
      "239 iter 2399 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691909 0.691909]\n",
      "240 iter 2400 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691910 0.691910]\n",
      "240 iter 2401 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691912 0.691912]\n",
      "240 iter 2402 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691913 0.691913]\n",
      "240 iter 2403 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691914 0.691914]\n",
      "240 iter 2404 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691916 0.691916]\n",
      "240 iter 2405 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691917 0.691917]\n",
      "240 iter 2406 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691918 0.691918]\n",
      "240 iter 2407 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691920 0.691920]\n",
      "240 iter 2408 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691921 0.691921]\n",
      "240 iter 2409 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691922 0.691922]\n",
      "241 iter 2410 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691924 0.691924]\n",
      "241 iter 2411 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691925 0.691925]\n",
      "241 iter 2412 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691926 0.691926]\n",
      "241 iter 2413 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691927 0.691927]\n",
      "241 iter 2414 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691929 0.691929]\n",
      "241 iter 2415 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691930 0.691930]\n",
      "241 iter 2416 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691931 0.691931]\n",
      "241 iter 2417 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691933 0.691933]\n",
      "241 iter 2418 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691934 0.691934]\n",
      "241 iter 2419 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691935 0.691935]\n",
      "242 iter 2420 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691936 0.691936]\n",
      "242 iter 2421 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691938 0.691938]\n",
      "242 iter 2422 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691939 0.691939]\n",
      "242 iter 2423 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691940 0.691940]\n",
      "242 iter 2424 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691942 0.691942]\n",
      "242 iter 2425 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691943 0.691943]\n",
      "242 iter 2426 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691944 0.691944]\n",
      "242 iter 2427 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691946 0.691946]\n",
      "242 iter 2428 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691947 0.691947]\n",
      "242 iter 2429 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691948 0.691948]\n",
      "243 iter 2430 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691949 0.691949]\n",
      "243 iter 2431 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691951 0.691951]\n",
      "243 iter 2432 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691952 0.691952]\n",
      "243 iter 2433 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691953 0.691953]\n",
      "243 iter 2434 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691955 0.691955]\n",
      "243 iter 2435 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691956 0.691956]\n",
      "243 iter 2436 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691957 0.691957]\n",
      "243 iter 2437 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691958 0.691958]\n",
      "243 iter 2438 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691960 0.691960]\n",
      "243 iter 2439 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691961 0.691961]\n",
      "244 iter 2440 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691962 0.691962]\n",
      "244 iter 2441 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691963 0.691963]\n",
      "244 iter 2442 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691965 0.691965]\n",
      "244 iter 2443 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691966 0.691966]\n",
      "244 iter 2444 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691967 0.691967]\n",
      "244 iter 2445 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691969 0.691969]\n",
      "244 iter 2446 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691970 0.691970]\n",
      "244 iter 2447 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691971 0.691971]\n",
      "244 iter 2448 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691972 0.691972]\n",
      "244 iter 2449 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691973 0.691973]\n",
      "245 iter 2450 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691975 0.691975]\n",
      "245 iter 2451 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691976 0.691976]\n",
      "245 iter 2452 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691977 0.691977]\n",
      "245 iter 2453 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691979 0.691979]\n",
      "245 iter 2454 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691980 0.691980]\n",
      "245 iter 2455 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691981 0.691981]\n",
      "245 iter 2456 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691982 0.691982]\n",
      "245 iter 2457 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691984 0.691984]\n",
      "245 iter 2458 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691985 0.691985]\n",
      "245 iter 2459 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691986 0.691986]\n",
      "246 iter 2460 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691987 0.691987]\n",
      "246 iter 2461 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691989 0.691989]\n",
      "246 iter 2462 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691990 0.691990]\n",
      "246 iter 2463 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691991 0.691991]\n",
      "246 iter 2464 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691992 0.691992]\n",
      "246 iter 2465 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691993 0.691993]\n",
      "246 iter 2466 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691995 0.691995]\n",
      "246 iter 2467 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691996 0.691996]\n",
      "246 iter 2468 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691997 0.691997]\n",
      "246 iter 2469 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691998 0.691998]\n",
      "247 iter 2470 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.691999 0.691999]\n",
      "247 iter 2471 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692001 0.692001]\n",
      "247 iter 2472 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692002 0.692002]\n",
      "247 iter 2473 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692003 0.692003]\n",
      "247 iter 2474 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692005 0.692005]\n",
      "247 iter 2475 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692006 0.692006]\n",
      "247 iter 2476 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692007 0.692007]\n",
      "247 iter 2477 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692008 0.692008]\n",
      "247 iter 2478 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692010 0.692010]\n",
      "247 iter 2479 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692011 0.692011]\n",
      "248 iter 2480 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692012 0.692012]\n",
      "248 iter 2481 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692013 0.692013]\n",
      "248 iter 2482 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692014 0.692014]\n",
      "248 iter 2483 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692016 0.692016]\n",
      "248 iter 2484 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692017 0.692017]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "248 iter 2485 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692018 0.692018]\n",
      "248 iter 2486 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692019 0.692019]\n",
      "248 iter 2487 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692020 0.692020]\n",
      "248 iter 2488 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692022 0.692022]\n",
      "248 iter 2489 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692023 0.692023]\n",
      "249 iter 2490 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692024 0.692024]\n",
      "249 iter 2491 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692025 0.692025]\n",
      "249 iter 2492 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692026 0.692026]\n",
      "249 iter 2493 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692028 0.692028]\n",
      "249 iter 2494 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692029 0.692029]\n",
      "249 iter 2495 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692030 0.692030]\n",
      "249 iter 2496 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692031 0.692031]\n",
      "249 iter 2497 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692032 0.692032]\n",
      "249 iter 2498 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692034 0.692034]\n",
      "249 iter 2499 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692035 0.692035]\n",
      "250 iter 2500 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692036 0.692036]\n",
      "250 iter 2501 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692037 0.692037]\n",
      "250 iter 2502 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692038 0.692038]\n",
      "250 iter 2503 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692040 0.692040]\n",
      "250 iter 2504 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692041 0.692041]\n",
      "250 iter 2505 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692042 0.692042]\n",
      "250 iter 2506 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692043 0.692043]\n",
      "250 iter 2507 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692044 0.692044]\n",
      "250 iter 2508 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692046 0.692046]\n",
      "250 iter 2509 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692047 0.692047]\n",
      "Saving model at /srv/osirim/idumeur/trainings/new_model/training_9/checkpoints/ step 250\n",
      "251 iter 2510 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692048 0.692048]\n",
      "251 iter 2511 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692049 0.692049]\n",
      "251 iter 2512 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692050 0.692050]\n",
      "251 iter 2513 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692051 0.692051]\n",
      "251 iter 2514 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692053 0.692053]\n",
      "251 iter 2515 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692054 0.692054]\n",
      "251 iter 2516 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692055 0.692055]\n",
      "251 iter 2517 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692056 0.692056]\n",
      "251 iter 2518 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692057 0.692057]\n",
      "251 iter 2519 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692059 0.692059]\n",
      "252 iter 2520 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692060 0.692060]\n",
      "252 iter 2521 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692061 0.692061]\n",
      "252 iter 2522 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692062 0.692062]\n",
      "252 iter 2523 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692063 0.692063]\n",
      "252 iter 2524 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692064 0.692064]\n",
      "252 iter 2525 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692065 0.692065]\n",
      "252 iter 2526 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692067 0.692067]\n",
      "252 iter 2527 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692068 0.692068]\n",
      "252 iter 2528 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692069 0.692069]\n",
      "252 iter 2529 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692070 0.692070]\n",
      "253 iter 2530 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692071 0.692071]\n",
      "253 iter 2531 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692073 0.692073]\n",
      "253 iter 2532 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692074 0.692074]\n",
      "253 iter 2533 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692075 0.692075]\n",
      "253 iter 2534 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692076 0.692076]\n",
      "253 iter 2535 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692077 0.692077]\n",
      "253 iter 2536 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692078 0.692078]\n",
      "253 iter 2537 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692079 0.692079]\n",
      "253 iter 2538 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692081 0.692081]\n",
      "253 iter 2539 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692082 0.692082]\n",
      "254 iter 2540 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692083 0.692083]\n",
      "254 iter 2541 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692084 0.692084]\n",
      "254 iter 2542 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692085 0.692085]\n",
      "254 iter 2543 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692086 0.692086]\n",
      "254 iter 2544 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692088 0.692088]\n",
      "254 iter 2545 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692089 0.692089]\n",
      "254 iter 2546 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692090 0.692090]\n",
      "254 iter 2547 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692091 0.692091]\n",
      "254 iter 2548 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692092 0.692092]\n",
      "254 iter 2549 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692093 0.692093]\n",
      "255 iter 2550 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692094 0.692094]\n",
      "255 iter 2551 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692095 0.692095]\n",
      "255 iter 2552 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692097 0.692097]\n",
      "255 iter 2553 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692098 0.692098]\n",
      "255 iter 2554 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692099 0.692099]\n",
      "255 iter 2555 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692100 0.692100]\n",
      "255 iter 2556 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692101 0.692101]\n",
      "255 iter 2557 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692102 0.692102]\n",
      "255 iter 2558 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692103 0.692103]\n",
      "255 iter 2559 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692104 0.692104]\n",
      "256 iter 2560 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692106 0.692106]\n",
      "256 iter 2561 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692107 0.692107]\n",
      "256 iter 2562 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692108 0.692108]\n",
      "256 iter 2563 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692109 0.692109]\n",
      "256 iter 2564 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692110 0.692110]\n",
      "256 iter 2565 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692111 0.692111]\n",
      "256 iter 2566 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692112 0.692112]\n",
      "256 iter 2567 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692113 0.692113]\n",
      "256 iter 2568 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692115 0.692115]\n",
      "256 iter 2569 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692116 0.692116]\n",
      "257 iter 2570 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692117 0.692117]\n",
      "257 iter 2571 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692118 0.692118]\n",
      "257 iter 2572 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692119 0.692119]\n",
      "257 iter 2573 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692120 0.692120]\n",
      "257 iter 2574 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692121 0.692121]\n",
      "257 iter 2575 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692122 0.692122]\n",
      "257 iter 2576 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692123 0.692123]\n",
      "257 iter 2577 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692125 0.692125]\n",
      "257 iter 2578 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692126 0.692126]\n",
      "257 iter 2579 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692127 0.692127]\n",
      "258 iter 2580 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692128 0.692128]\n",
      "258 iter 2581 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692129 0.692129]\n",
      "258 iter 2582 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692130 0.692130]\n",
      "258 iter 2583 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692131 0.692131]\n",
      "258 iter 2584 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692132 0.692132]\n",
      "258 iter 2585 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692133 0.692133]\n",
      "258 iter 2586 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692134 0.692134]\n",
      "258 iter 2587 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692136 0.692136]\n",
      "258 iter 2588 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692137 0.692137]\n",
      "258 iter 2589 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692138 0.692138]\n",
      "259 iter 2590 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692139 0.692139]\n",
      "259 iter 2591 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692140 0.692140]\n",
      "259 iter 2592 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692141 0.692141]\n",
      "259 iter 2593 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692142 0.692142]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "259 iter 2594 [D loss: 0.693231, acc.: 50.00%] [G loss: 0.692143 0.692143]\n"
     ]
    }
   ],
   "source": [
    "#Start the training\n",
    "model_dir = gan.model_dir\n",
    "training_dir = gan.this_training_dir\n",
    "#saving_yaml(path_model, model_dir)\n",
    "#saving_yaml(path_train, training_dir)\n",
    "gan.train()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "training_env",
   "language": "python",
   "name": "training_env"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.11"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
