{
  "nbformat": 4,
  "nbformat_minor": 0,
  "metadata": {
    "colab": {
      "name": "Copy of Compaire.ipynb",
      "provenance": [],
      "collapsed_sections": [],
      "include_colab_link": true
    },
    "kernelspec": {
      "name": "python3",
      "display_name": "Python 3"
    }
  },
  "cells": [
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "view-in-github",
        "colab_type": "text"
      },
      "source": [
        "<a href=\"https://colab.research.google.com/github/BenYavor/Comparison_of_communication_Autoencoder_MI_GAN_WGAN/blob/master/Copy_of_Compaire.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>"
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "Y3Mvn1V30ejH",
        "colab_type": "code",
        "outputId": "eb374804-5864-4c06-d5ba-b0f779a476ed",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 644
        }
      },
      "source": [
        "!pip install tensorflow==2.0.0\n",
        "import numpy as np\n",
        "%matplotlib inline\n",
        "import matplotlib.pyplot as plt   \n",
        "import warnings\n",
        "with warnings.catch_warnings():\n",
        "    warnings.filterwarnings(\"ignore\",category=FutureWarning)\n",
        "    import tensorflow as tf\n",
        "import os\n",
        "tf.__version__\n",
        "from tensorflow import keras\n",
        "import time\n",
        "from __future__ import absolute_import, division, print_function, unicode_literals\n",
        "import pandas as pd\n",
        "import sys\n",
        "assert sys.version_info >= (3, 5)\n",
        "%matplotlib inline\n",
        "import matplotlib as mpl\n",
        "mpl.rc('axes', labelsize=14)\n",
        "mpl.rc('xtick', labelsize=12)\n",
        "mpl.rc('ytick', labelsize=12)\n",
        "import tensorflow as tf\n",
        "from tensorflow import keras\n",
        "from sklearn.preprocessing import OneHotEncoder\n",
        "import pandas as pd\n",
        "from scipy import special\n",
        "#from Clustering_Equalgrps.equal_groups import EqualGroupsKMeans\n",
        "from tensorflow.keras import layers\n",
        "np.random.seed(42)\n",
        "tf.random.set_seed(42)"
      ],
      "execution_count": 1,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "Requirement already satisfied: tensorflow==2.0.0 in /usr/local/lib/python3.6/dist-packages (2.0.0)\n",
            "Requirement already satisfied: tensorboard<2.1.0,>=2.0.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow==2.0.0) (2.0.1)\n",
            "Requirement already satisfied: six>=1.10.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow==2.0.0) (1.12.0)\n",
            "Requirement already satisfied: keras-applications>=1.0.8 in /usr/local/lib/python3.6/dist-packages (from tensorflow==2.0.0) (1.0.8)\n",
            "Requirement already satisfied: numpy<2.0,>=1.16.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow==2.0.0) (1.17.3)\n",
            "Requirement already satisfied: opt-einsum>=2.3.2 in /usr/local/lib/python3.6/dist-packages (from tensorflow==2.0.0) (3.1.0)\n",
            "Requirement already satisfied: protobuf>=3.6.1 in /usr/local/lib/python3.6/dist-packages (from tensorflow==2.0.0) (3.10.0)\n",
            "Requirement already satisfied: tensorflow-estimator<2.1.0,>=2.0.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow==2.0.0) (2.0.1)\n",
            "Requirement already satisfied: grpcio>=1.8.6 in /usr/local/lib/python3.6/dist-packages (from tensorflow==2.0.0) (1.15.0)\n",
            "Requirement already satisfied: google-pasta>=0.1.6 in /usr/local/lib/python3.6/dist-packages (from tensorflow==2.0.0) (0.1.8)\n",
            "Requirement already satisfied: wheel>=0.26 in /usr/local/lib/python3.6/dist-packages (from tensorflow==2.0.0) (0.33.6)\n",
            "Requirement already satisfied: gast==0.2.2 in /usr/local/lib/python3.6/dist-packages (from tensorflow==2.0.0) (0.2.2)\n",
            "Requirement already satisfied: keras-preprocessing>=1.0.5 in /usr/local/lib/python3.6/dist-packages (from tensorflow==2.0.0) (1.1.0)\n",
            "Requirement already satisfied: astor>=0.6.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow==2.0.0) (0.8.0)\n",
            "Requirement already satisfied: wrapt>=1.11.1 in /usr/local/lib/python3.6/dist-packages (from tensorflow==2.0.0) (1.11.2)\n",
            "Requirement already satisfied: termcolor>=1.1.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow==2.0.0) (1.1.0)\n",
            "Requirement already satisfied: absl-py>=0.7.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow==2.0.0) (0.8.1)\n",
            "Requirement already satisfied: markdown>=2.6.8 in /usr/local/lib/python3.6/dist-packages (from tensorboard<2.1.0,>=2.0.0->tensorflow==2.0.0) (3.1.1)\n",
            "Requirement already satisfied: google-auth<2,>=1.6.3 in /usr/local/lib/python3.6/dist-packages (from tensorboard<2.1.0,>=2.0.0->tensorflow==2.0.0) (1.7.0)\n",
            "Requirement already satisfied: google-auth-oauthlib<0.5,>=0.4.1 in /usr/local/lib/python3.6/dist-packages (from tensorboard<2.1.0,>=2.0.0->tensorflow==2.0.0) (0.4.1)\n",
            "Requirement already satisfied: werkzeug>=0.11.15 in /usr/local/lib/python3.6/dist-packages (from tensorboard<2.1.0,>=2.0.0->tensorflow==2.0.0) (0.16.0)\n",
            "Requirement already satisfied: setuptools>=41.0.0 in /usr/local/lib/python3.6/dist-packages (from tensorboard<2.1.0,>=2.0.0->tensorflow==2.0.0) (41.4.0)\n",
            "Requirement already satisfied: h5py in /usr/local/lib/python3.6/dist-packages (from keras-applications>=1.0.8->tensorflow==2.0.0) (2.8.0)\n",
            "Requirement already satisfied: rsa<4.1,>=3.1.4 in /usr/local/lib/python3.6/dist-packages (from google-auth<2,>=1.6.3->tensorboard<2.1.0,>=2.0.0->tensorflow==2.0.0) (4.0)\n",
            "Requirement already satisfied: pyasn1-modules>=0.2.1 in /usr/local/lib/python3.6/dist-packages (from google-auth<2,>=1.6.3->tensorboard<2.1.0,>=2.0.0->tensorflow==2.0.0) (0.2.7)\n",
            "Requirement already satisfied: cachetools<3.2,>=2.0.0 in /usr/local/lib/python3.6/dist-packages (from google-auth<2,>=1.6.3->tensorboard<2.1.0,>=2.0.0->tensorflow==2.0.0) (3.1.1)\n",
            "Requirement already satisfied: requests-oauthlib>=0.7.0 in /usr/local/lib/python3.6/dist-packages (from google-auth-oauthlib<0.5,>=0.4.1->tensorboard<2.1.0,>=2.0.0->tensorflow==2.0.0) (1.3.0)\n",
            "Requirement already satisfied: pyasn1>=0.1.3 in /usr/local/lib/python3.6/dist-packages (from rsa<4.1,>=3.1.4->google-auth<2,>=1.6.3->tensorboard<2.1.0,>=2.0.0->tensorflow==2.0.0) (0.4.7)\n",
            "Requirement already satisfied: oauthlib>=3.0.0 in /usr/local/lib/python3.6/dist-packages (from requests-oauthlib>=0.7.0->google-auth-oauthlib<0.5,>=0.4.1->tensorboard<2.1.0,>=2.0.0->tensorflow==2.0.0) (3.1.0)\n",
            "Requirement already satisfied: requests>=2.0.0 in /usr/local/lib/python3.6/dist-packages (from requests-oauthlib>=0.7.0->google-auth-oauthlib<0.5,>=0.4.1->tensorboard<2.1.0,>=2.0.0->tensorflow==2.0.0) (2.21.0)\n",
            "Requirement already satisfied: idna<2.9,>=2.5 in /usr/local/lib/python3.6/dist-packages (from requests>=2.0.0->requests-oauthlib>=0.7.0->google-auth-oauthlib<0.5,>=0.4.1->tensorboard<2.1.0,>=2.0.0->tensorflow==2.0.0) (2.8)\n",
            "Requirement already satisfied: urllib3<1.25,>=1.21.1 in /usr/local/lib/python3.6/dist-packages (from requests>=2.0.0->requests-oauthlib>=0.7.0->google-auth-oauthlib<0.5,>=0.4.1->tensorboard<2.1.0,>=2.0.0->tensorflow==2.0.0) (1.24.3)\n",
            "Requirement already satisfied: chardet<3.1.0,>=3.0.2 in /usr/local/lib/python3.6/dist-packages (from requests>=2.0.0->requests-oauthlib>=0.7.0->google-auth-oauthlib<0.5,>=0.4.1->tensorboard<2.1.0,>=2.0.0->tensorflow==2.0.0) (3.0.4)\n",
            "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.6/dist-packages (from requests>=2.0.0->requests-oauthlib>=0.7.0->google-auth-oauthlib<0.5,>=0.4.1->tensorboard<2.1.0,>=2.0.0->tensorflow==2.0.0) (2019.9.11)\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "1wlZswcMF7Rt",
        "colab_type": "text"
      },
      "source": [
        "#### Vergleich\n",
        "Als erstes für feste $k$ und $n$, was sich ändert ist die Samplesize, Anzahl der Samples und SNR"
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "4qpY-gawAf-9",
        "colab_type": "text"
      },
      "source": [
        "###Systemparameter\n",
        "ACHTUNG: CHANNELANZAHL WURDE UNTERSCHIEDLICH VERWENDET \\\\\n",
        "$k$ - die Anzhal der bits \\\\\n",
        "$M$ - Anzahl der unterschiedlichen Nachrichten \\\\\n",
        "$n$ - channel uses\\\\\n",
        "$N$ - Länge des Rauschvektors"
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "colab_type": "code",
        "id": "czeNNfpY1qc2",
        "outputId": "8ad19581-ba71-4162-c172-f231f03b97ad",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 35
        }
      },
      "source": [
        "k = 4      # Number of information bits per message, i.e., M=2**k\n",
        "M = 2**k\n",
        "n = 2    # Number of real channel uses per message\n",
        "#k = int(np.log2(M))\n",
        "#n = 2\n",
        "print(M)\n",
        "\n",
        "batch_size = 100\n",
        "\n",
        "SNR = 7\n"
      ],
      "execution_count": 2,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "16\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "BA4TqJBOXXIg",
        "colab_type": "text"
      },
      "source": [
        "## Training Parameter"
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "tb-DiBwSN255",
        "colab_type": "text"
      },
      "source": [
        "### Different Layers"
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "hFMMLrY0LthL",
        "colab_type": "code",
        "colab": {}
      },
      "source": [
        "randN_initial = keras.initializers.RandomNormal(mean=0.0, stddev=0.05, seed=None)\n",
        "\n",
        "EncIn = tf.keras.layers.Input(shape=(M,))#, dtype= tf.int32)\n",
        "e1 = tf.keras.layers.Dense(2*n, activation=None)\n",
        "e2 = tf.keras.layers.Lambda(lambda x:tf.reshape(x, shape=[-1,int(n/2),2]))\n",
        "EncOut = tf.keras.layers.Lambda(lambda x: x/tf.sqrt(2*tf.reduce_mean(tf.square(x))))\n",
        "GenIn = tf.keras.layers.Lambda(lambda x:tf.reshape(x,(tf.shape(x)[0],-1)))\n",
        "# = tf.keras.layers.Lambda(generator)\n",
        "DecIn = tf.keras.layers.Lambda(lambda x:tf.reshape(x, shape=[-1,int(n/2),2]))\n",
        "d1 = tf.keras.layers.Lambda(lambda x:tf.reshape(x, shape=[-1,n]))\n",
        "d2 = tf.keras.layers.Dense(M, activation='relu')\n",
        "DecOut = tf.keras.layers.Dense(M, activation='softmax')\n",
        "\n",
        "\n",
        "#noise_std = EbNo_to_noise(TRAINING_SNR)\n",
        "# custom functions / layers without weights\n",
        "norm_layer = keras.layers.Lambda(lambda x: tf.divide(x,tf.sqrt(2*tf.reduce_mean(tf.square(x)))))\n",
        "shape_layer = keras.layers.Lambda(lambda x: tf.reshape(x, shape=[-1,2,n]))\n",
        "shape_layer2 = keras.layers.Lambda(lambda x: tf.reshape(x, shape=[-1,n]))\n",
        "channel_layer = keras.layers.Lambda(lambda x: x + tf.random.normal(tf.shape(x), mean=0.0, stddev=noise_std))"
      ],
      "execution_count": 0,
      "outputs": []
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "7J96hJhKO9VJ",
        "colab_type": "text"
      },
      "source": [
        "### Help functions"
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "uV7pjryDv4M4",
        "colab_type": "code",
        "outputId": "6517927a-d362-491c-a9e3-313fccd4c6a6",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 53
        }
      },
      "source": [
        "def EbNo2Sigma(ebnodb):\n",
        "    '''Convert Eb/No in dB to noise standard deviation'''\n",
        "    ebno = 10**(ebnodb/10)\n",
        "    return 1/np.sqrt(2*(2*k/n)*ebno)\n",
        "\n",
        "def EbNo_to_noise(ebnodb):\n",
        "    '''Transform EbNo[dB]/snr to noise power'''\n",
        "    ebno = 10**(ebnodb/10)\n",
        "    noise_std = 1/np.sqrt(2*(2*k/n)*ebno) \n",
        "    return noise_std\n",
        "\n",
        "\n",
        "def real_channel(x,noise_std):\n",
        "    # Black-box Channel\n",
        "    #AWGN\n",
        "    return x + tf.random.normal(tf.shape(x), mean=0.0, stddev=noise_std)\n",
        "\n",
        "    #Rayleigh\n",
        "    #return x + tf.sqrt(tf.square(tf.random_normal(tf.shape(x), mean=0.0, stddev=noise_std)) + tf.square(tf.random_normal(tf.shape(x), mean=0.0, stddev=noise_std)))\n",
        "    \n",
        "    #Uniform U(-3;3)    \n",
        "    #return x + tf.random_uniform(tf.shape(x), minval=-2, maxval=2)\n",
        "\n",
        "def B_Ber(input_msg, msg):\n",
        "    '''Calculate the Batch Bit Error Rate'''\n",
        "    pred_error = tf.not_equal(tf.argmax(msg, 1), tf.argmax(input_msg, 1))\n",
        "    bber = tf.reduce_mean(tf.cast(pred_error, tf.float32))\n",
        "    return bber\n",
        "\n",
        "def random_sample(batch_size=32):\n",
        "    msg = np.random.randint(M, size=batch_size)\n",
        "    return msg\n",
        "\n",
        "\n",
        "\n",
        "\n",
        "def B_Ber_m(input_msg, msg):\n",
        "    '''Calculate the Batch Bit Error Rate'''\n",
        "    pred_error = tf.not_equal(input_msg, tf.argmax(msg, 1))      \n",
        "    bber = tf.reduce_mean(tf.cast(pred_error, tf.float32))\n",
        "    return bber\n",
        "\n",
        "def SNR_to_noise(snrdb):\n",
        "    '''Transform EbNo[dB]/snr to noise power'''\n",
        "    snr = 10**(snrdb/10)\n",
        "    noise_std = 1/np.sqrt(2*snr)\n",
        "    return noise_std\n",
        "\n",
        "\n",
        "noise_std = EbNo2Sigma(SNR)\n",
        "\n",
        "print(EbNo2Sigma(SNR))\n",
        "print(EbNo_to_noise(SNR))"
      ],
      "execution_count": 4,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "0.15792649852735607\n",
            "0.15792649852735607\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "AOoYuK_jR9rH",
        "colab_type": "text"
      },
      "source": [
        "# Models"
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "ExecuteTime": {
          "end_time": "2019-05-14T06:31:29.973887Z",
          "start_time": "2019-05-14T06:31:29.969185Z"
        },
        "colab_type": "code",
        "id": "WbqrTgB_SYbf",
        "outputId": "e2c6edbb-0462-4d72-965f-0e24a3808c15",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 394
        }
      },
      "source": [
        "### install necessary packages if in colab\n",
        "def run_subprocess_command(cmd):\n",
        "  process = subprocess.Popen(cmd.split(), stdout=subprocess.PIPE)\n",
        "  for line in process.stdout:\n",
        "      print(line.decode().strip())\n",
        "      \n",
        "import sys, subprocess\n",
        "IN_COLAB = 'google.colab' in sys.modules\n",
        "colab_requirements = ['pip install tf-nightly-gpu-2.0-preview==2.0.0.dev20190513']\n",
        "if IN_COLAB:\n",
        "  for i in colab_requirements:\n",
        "    run_subprocess_command(i)"
      ],
      "execution_count": 5,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "Requirement already satisfied: tf-nightly-gpu-2.0-preview==2.0.0.dev20190513 in /usr/local/lib/python3.6/dist-packages (2.0.0.dev20190513)\n",
            "Requirement already satisfied: wheel>=0.26 in /usr/local/lib/python3.6/dist-packages (from tf-nightly-gpu-2.0-preview==2.0.0.dev20190513) (0.33.6)\n",
            "Requirement already satisfied: absl-py>=0.7.0 in /usr/local/lib/python3.6/dist-packages (from tf-nightly-gpu-2.0-preview==2.0.0.dev20190513) (0.8.1)\n",
            "Requirement already satisfied: keras-applications>=1.0.6 in /usr/local/lib/python3.6/dist-packages (from tf-nightly-gpu-2.0-preview==2.0.0.dev20190513) (1.0.8)\n",
            "Requirement already satisfied: protobuf>=3.6.1 in /usr/local/lib/python3.6/dist-packages (from tf-nightly-gpu-2.0-preview==2.0.0.dev20190513) (3.10.0)\n",
            "Requirement already satisfied: grpcio>=1.8.6 in /usr/local/lib/python3.6/dist-packages (from tf-nightly-gpu-2.0-preview==2.0.0.dev20190513) (1.15.0)\n",
            "Requirement already satisfied: google-pasta>=0.1.6 in /usr/local/lib/python3.6/dist-packages (from tf-nightly-gpu-2.0-preview==2.0.0.dev20190513) (0.1.8)\n",
            "Requirement already satisfied: tensorflow-estimator-2.0-preview in /usr/local/lib/python3.6/dist-packages (from tf-nightly-gpu-2.0-preview==2.0.0.dev20190513) (2.0.0)\n",
            "Requirement already satisfied: gast>=0.2.0 in /usr/local/lib/python3.6/dist-packages (from tf-nightly-gpu-2.0-preview==2.0.0.dev20190513) (0.2.2)\n",
            "Requirement already satisfied: numpy<2.0,>=1.14.5 in /usr/local/lib/python3.6/dist-packages (from tf-nightly-gpu-2.0-preview==2.0.0.dev20190513) (1.17.3)\n",
            "Requirement already satisfied: tb-nightly<1.15.0a0,>=1.14.0a0 in /usr/local/lib/python3.6/dist-packages (from tf-nightly-gpu-2.0-preview==2.0.0.dev20190513) (1.14.0a20190614)\n",
            "Requirement already satisfied: keras-preprocessing>=1.0.5 in /usr/local/lib/python3.6/dist-packages (from tf-nightly-gpu-2.0-preview==2.0.0.dev20190513) (1.1.0)\n",
            "Requirement already satisfied: astor>=0.6.0 in /usr/local/lib/python3.6/dist-packages (from tf-nightly-gpu-2.0-preview==2.0.0.dev20190513) (0.8.0)\n",
            "Requirement already satisfied: termcolor>=1.1.0 in /usr/local/lib/python3.6/dist-packages (from tf-nightly-gpu-2.0-preview==2.0.0.dev20190513) (1.1.0)\n",
            "Requirement already satisfied: six>=1.10.0 in /usr/local/lib/python3.6/dist-packages (from tf-nightly-gpu-2.0-preview==2.0.0.dev20190513) (1.12.0)\n",
            "Requirement already satisfied: wrapt>=1.11.1 in /usr/local/lib/python3.6/dist-packages (from tf-nightly-gpu-2.0-preview==2.0.0.dev20190513) (1.11.2)\n",
            "Requirement already satisfied: h5py in /usr/local/lib/python3.6/dist-packages (from keras-applications>=1.0.6->tf-nightly-gpu-2.0-preview==2.0.0.dev20190513) (2.8.0)\n",
            "Requirement already satisfied: setuptools in /usr/local/lib/python3.6/dist-packages (from protobuf>=3.6.1->tf-nightly-gpu-2.0-preview==2.0.0.dev20190513) (41.4.0)\n",
            "Requirement already satisfied: werkzeug>=0.11.15 in /usr/local/lib/python3.6/dist-packages (from tb-nightly<1.15.0a0,>=1.14.0a0->tf-nightly-gpu-2.0-preview==2.0.0.dev20190513) (0.16.0)\n",
            "Requirement already satisfied: markdown>=2.6.8 in /usr/local/lib/python3.6/dist-packages (from tb-nightly<1.15.0a0,>=1.14.0a0->tf-nightly-gpu-2.0-preview==2.0.0.dev20190513) (3.1.1)\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "colab_type": "text",
        "id": "3eKFKF5HSYbi"
      },
      "source": [
        "### load packages"
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "ExecuteTime": {
          "end_time": "2019-05-14T06:31:30.061880Z",
          "start_time": "2019-05-14T06:31:29.975587Z"
        },
        "colab_type": "code",
        "id": "at1xYevFSYbl",
        "outputId": "f008d92b-c4ad-4de8-c785-21bd980eb288",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 35
        }
      },
      "source": [
        "# make visible the only one GPU\n",
        "%env CUDA_VISIBLE_DEVICES=3"
      ],
      "execution_count": 6,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "env: CUDA_VISIBLE_DEVICES=3\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "ExecuteTime": {
          "end_time": "2019-05-14T06:31:33.702580Z",
          "start_time": "2019-05-14T06:31:30.063437Z"
        },
        "colab_type": "code",
        "id": "759gzUFlSYbq",
        "outputId": "ac2ab190-2f7a-4731-9d73-2b8b8c0bb1ff",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 73
        }
      },
      "source": [
        "import tensorflow as tf\n",
        "import numpy as np\n",
        "import matplotlib.pyplot as plt\n",
        "from tqdm.autonotebook import tqdm\n",
        "%matplotlib inline\n",
        "from IPython import display\n",
        "import pandas as pd"
      ],
      "execution_count": 7,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "/usr/local/lib/python3.6/dist-packages/tqdm/autonotebook/__init__.py:14: TqdmExperimentalWarning: Using `tqdm.autonotebook.tqdm` in notebook mode. Use `tqdm.tqdm` instead to force console mode (e.g. in jupyter console)\n",
            "  \" (e.g. in jupyter console)\", TqdmExperimentalWarning)\n"
          ],
          "name": "stderr"
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "ExecuteTime": {
          "end_time": "2019-05-14T06:31:33.711214Z",
          "start_time": "2019-05-14T06:31:33.706313Z"
        },
        "colab_type": "code",
        "id": "AxY3I4SfSYbt",
        "outputId": "6de32e6b-ce9f-4e47-cef5-a5a537516721",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 35
        }
      },
      "source": [
        "print(tf.__version__)"
      ],
      "execution_count": 8,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "2.0.0-dev20190513\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "ExecuteTime": {
          "end_time": "2019-05-14T06:31:33.803523Z",
          "start_time": "2019-05-14T06:31:33.714599Z"
        },
        "colab_type": "code",
        "id": "Ypym6ZAESYbx",
        "colab": {}
      },
      "source": [
        "#TRAIN_BUF=60000\n",
        "#BATCH_SIZE=512\n",
        "#TEST_BUF=10000\n",
        "#DIMS = (28,28,1)\n",
        "#N_TRAIN_BATCHES =int(TRAIN_BUF/BATCH_SIZE)\n",
        "#N_TEST_BATCHES = int(TEST_BUF/BATCH_SIZE)\n",
        "#print(N_TRAIN_BATCHES)"
      ],
      "execution_count": 0,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "ExecuteTime": {
          "end_time": "2019-05-14T06:31:38.044471Z",
          "start_time": "2019-05-14T06:31:33.805821Z"
        },
        "colab_type": "code",
        "id": "xhqU6sqiSYbz",
        "colab": {}
      },
      "source": [
        "####Datensets muessen theortisch die originale sein, also bei mir Rauschen und das Rauschen über den Kanal\n",
        "\n",
        "# load dataset\n",
        "#(train_images, _), (test_images, _) = tf.keras.datasets.fashion_mnist.load_data()\n",
        "\n",
        "# split dataset\n",
        "#train_images = train_images.reshape(train_images.shape[0], 28, 28, 1).astype(\n",
        "#    \"float32\"\n",
        "#) / 255.0\n",
        "#test_images = test_images.reshape(test_images.shape[0], 28, 28, 1).astype(\"float32\") / 255.0\n",
        "\n",
        "# batch datasets\n",
        "#train_dataset = (\n",
        "#    tf.data.Dataset.from_tensor_slices(train_images)\n",
        "#    .shuffle(TRAIN_BUF)\n",
        "#    .batch(BATCH_SIZE)\n",
        "#)\n",
        "#test_dataset = (\n",
        "#    tf.data.Dataset.from_tensor_slices(test_images)\n",
        "#    .shuffle(TEST_BUF)\n",
        "#    .batch(BATCH_SIZE)\n",
        "#)\n",
        "\n",
        "def creat_train_data(length):\n",
        "  train_dataset=[]\n",
        "  for i in range(length):\n",
        "    x = tf.random.normal((batch_size,n),dtype=tf.dtypes.float32) \n",
        "    x = x/tf.sqrt(2*tf.reduce_mean(tf.square(x)))\n",
        "    train_dataset.append(real_channel(x,noise_std))\n",
        "  return train_dataset\n",
        "\n",
        "train_dataset = creat_train_data(100)\n",
        "\n",
        "test_dataset = creat_train_data(100)\n"
      ],
      "execution_count": 0,
      "outputs": []
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "colab_type": "text",
        "id": "HLxPlL7QSYb1"
      },
      "source": [
        "### Define the network as tf.keras.model object"
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "n4gYXZf9hkzY",
        "colab_type": "code",
        "colab": {}
      },
      "source": [
        "#@tf.function\n",
        "def train(real_data):\n",
        "  gen_gradients, disc_gradients = compute_gradients(real_data)\n",
        "  apply_gradients(gen_gradients, disc_gradients)"
      ],
      "execution_count": 0,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "PPsFbJwchvhq",
        "colab_type": "code",
        "colab": {}
      },
      "source": [
        "def compute_gradients(real_data):\n",
        "  \"\"\" passes through the network and computes loss\n",
        "  \"\"\"\n",
        "  with tf.GradientTape() as gen_tape, tf.GradientTape() as disc_tape:\n",
        "    disc_loss, gen_loss = compute_loss(real_data)\n",
        "  gen_gradients = gen_tape.gradient(gen_loss, w_generator.trainable_variables)\n",
        "  disc_gradients = disc_tape.gradient(disc_loss, w_discriminator.trainable_variables)\n",
        "\n",
        "  print(\"compute_gradients\")\n",
        "\n",
        "  return gen_gradients, disc_gradients"
      ],
      "execution_count": 0,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "ExecuteTime": {
          "end_time": "2019-05-14T06:31:38.068468Z",
          "start_time": "2019-05-14T06:31:38.046751Z"
        },
        "colab_type": "code",
        "id": "Wyipg-4oSYb1",
        "colab": {}
      },
      "source": [
        "def compute_loss(real_data):\n",
        "  \"\"\" passes through the network and computes loss\n",
        "  \"\"\"\n",
        "        ### pass through network\n",
        "        # generating noise from a uniform distribution\n",
        "  ####Mein noise ist anders als hier\n",
        "  gradient_penalty_weight = 10\n",
        "\n",
        "\n",
        "  \n",
        "  x_samp = tf.random.normal((batch_size,n),dtype=tf.dtypes.float32)\n",
        "  #print(x_samp)\n",
        "  # run noise through generator\n",
        "  x_gen = w_generator(x_samp,training =True)     # x_gen zu fake_data\n",
        "  # discriminate x and x_gen\n",
        "  logits_x = w_discriminator(real_data)\n",
        "  logits_x_gen = w_discriminator(x_gen)\n",
        "\n",
        "  # gradient penalty\n",
        "  d_regularizer = gradient_penalty(real_data, x_gen)\n",
        "        ### losses\n",
        "  disc_loss = (tf.reduce_mean(logits_x) - tf.reduce_mean(logits_x_gen)+ d_regularizer * gradient_penalty_weight)\n",
        "\n",
        "        # losses of fake with label \"1\"\n",
        "  gen_loss = tf.reduce_mean(logits_x_gen)\n",
        "  return disc_loss, gen_loss\n",
        "\n",
        "\n",
        "\n",
        "def apply_gradients(gen_gradients, disc_gradients):\n",
        "  gen_optimizer.apply_gradients(zip(gen_gradients, w_generator.trainable_variables))\n",
        "  disc_optimizer.apply_gradients(zip(disc_gradients, w_discriminator.trainable_variables))\n",
        "\n",
        "def gradient_penalty(x, x_gen):\n",
        "  epsilon = tf.random.uniform([x.shape[0], 1, 1, 1], 0.0, 1.0)\n",
        "  x_hat = epsilon * x + (1 - epsilon) * x_gen\n",
        "  with tf.GradientTape() as t:\n",
        "      t.watch(x_hat)\n",
        "      d_hat = w_discriminator(x_hat)\n",
        "  gradients = t.gradient(d_hat, x_hat)\n",
        "  ddx = tf.sqrt(tf.reduce_sum(gradients ** 2, axis=[1, 2]))\n",
        "  d_regularizer = tf.reduce_mean((ddx - 1.0) ** 2)\n",
        "  #print(\"gradient_penalty\")\n",
        "  return d_regularizer\n",
        "\n"
      ],
      "execution_count": 0,
      "outputs": []
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "colab_type": "text",
        "id": "qEVl58nDSYb4"
      },
      "source": [
        "### Define the network architecture\n",
        "\n",
        "## Changes\n",
        "in dem vorherigen GAN modell hat der discriminator side_infomation erhalten, worin das original Rauschen enthalten war. Dies ist in dem WGAN paper anders gemacht worden."
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "ExecuteTime": {
          "end_time": "2019-05-14T06:31:38.219862Z",
          "start_time": "2019-05-14T06:31:38.070570Z"
        },
        "colab_type": "code",
        "id": "dyU21SGbSYb4",
        "colab": {}
      },
      "source": [
        "N_Z = n\n",
        "\n",
        "\n",
        "w_generator = keras.models.Sequential([\n",
        "  tf.keras.layers.Input(shape=(n,)),\n",
        "  tf.keras.layers.Dense(32,use_bias=True,  activation='relu'),\n",
        "  tf.keras.layers.Dense(32,use_bias=True, activation='relu'),\n",
        "  tf.keras.layers.Dense(n, use_bias= True, activation='linear')\n",
        "   ])\n",
        "\n",
        "w_discriminator = keras.models.Sequential([\n",
        "  tf.keras.layers.Dense(32,use_bias=True, kernel_initializer=randN_initial,activation='relu',input_shape=((n,))),\n",
        "  #model.add(tf.keras.layers.Dense(32,use_bias=True, kernel_initializer=randN_initial, activation='relu'))\n",
        "  tf.keras.layers.Dense(1,use_bias=False, activation='sigmoid')\n",
        "])\n",
        "\n"
      ],
      "execution_count": 0,
      "outputs": []
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "ExecuteTime": {
          "end_time": "2019-05-10T18:40:40.306731Z",
          "start_time": "2019-05-10T18:40:40.292930Z"
        },
        "colab_type": "text",
        "id": "wi_ZuWBdSYb6"
      },
      "source": [
        "### Create Model"
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "ExecuteTime": {
          "end_time": "2019-05-14T06:31:39.047233Z",
          "start_time": "2019-05-14T06:31:38.222179Z"
        },
        "colab_type": "code",
        "id": "dSYjNRAwSYb7",
        "colab": {}
      },
      "source": [
        "# optimizers\n",
        "gen_optimizer = tf.keras.optimizers.Adam(0.0001)#, beta_1=0.5)\n",
        "disc_optimizer = tf.keras.optimizers.RMSprop(0.0005)# train the model\n",
        "# model\n",
        "#model = WGAN(\n",
        "#    gen = w_generator,\n",
        "#    disc = w_discriminator,\n",
        "#    gen_optimizer = gen_optimizer,\n",
        "#    disc_optimizer = disc_optimizer,\n",
        "#    n_Z = N_Z,\n",
        "gradient_penalty_weight = 10.0\n"
      ],
      "execution_count": 0,
      "outputs": []
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "colab_type": "text",
        "id": "qwBg8NwrSYb9"
      },
      "source": [
        "### Train the model"
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "ExecuteTime": {
          "end_time": "2019-05-14T06:31:39.056490Z",
          "start_time": "2019-05-14T06:31:39.049635Z"
        },
        "colab_type": "code",
        "id": "47sz8RMeSYb-",
        "colab": {}
      },
      "source": [
        "# exampled data for plotting results\n",
        "def plot_reconstruction(model, nex=8, zm=2):\n",
        "    samples = model.generate(tf.random.normal(shape=(BATCH_SIZE, N_Z)))\n",
        "    fig, axs = plt.subplots(ncols=nex, nrows=1, figsize=(zm * nex, zm))\n",
        "    for axi in range(nex):\n",
        "        axs[axi].matshow(\n",
        "                    samples.numpy()[axi].squeeze(), cmap=plt.cm.Greys, vmin=0, vmax=1\n",
        "                )\n",
        "        axs[axi].axis('off')\n",
        "    plt.show()"
      ],
      "execution_count": 0,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "ExecuteTime": {
          "end_time": "2019-05-14T06:31:39.152670Z",
          "start_time": "2019-05-14T06:31:39.058505Z"
        },
        "colab_type": "code",
        "id": "pKkEX9yBSYcB",
        "colab": {}
      },
      "source": [
        "# a pandas dataframe to save the loss information to\n",
        "losses = pd.DataFrame(columns = ['disc_loss', 'gen_loss'])"
      ],
      "execution_count": 0,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "ExecuteTime": {
          "end_time": "2019-05-14T07:04:26.791634Z",
          "start_time": "2019-05-14T07:04:17.126436Z"
        },
        "colab_type": "code",
        "id": "00dI2M4iSYcE",
        "outputId": "ee47418c-83b8-4a0b-c9b8-70603bf3703b",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 1000
        }
      },
      "source": [
        "n_epochs = 1200\n",
        "for epoch in range(n_epochs):\n",
        "  x = tf.random.normal((batch_size,n),dtype=tf.dtypes.float32) \n",
        "  x_samp  = x/tf.sqrt(2*tf.reduce_mean(tf.square(x)))\n",
        "  real_data = real_channel(x_samp, noise_std)\n",
        "  train(real_data)\n",
        "    # test on holdout\n",
        "  loss = []\n",
        "\n",
        "  loss.append(compute_loss(real_data))\n",
        "  losses.loc[len(losses)] = np.mean(loss, axis=0)\n",
        "  print(\n",
        "       \"Epoch: {} | disc_loss: {} | gen_loss: {}\".format(\n",
        "            epoch, losses.disc_loss.values[-1], losses.gen_loss.values[-1]\n",
        "        )  )\n",
        "# plot_reconstruction(model)"
      ],
      "execution_count": 18,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "compute_gradients\n",
            "Epoch: 0 | disc_loss: 7.132716655731201 | gen_loss: 0.4952550530433655\n",
            "compute_gradients\n",
            "Epoch: 1 | disc_loss: 7.126594543457031 | gen_loss: 0.49546435475349426\n",
            "compute_gradients\n",
            "Epoch: 2 | disc_loss: 6.995324611663818 | gen_loss: 0.49522289633750916\n",
            "compute_gradients\n",
            "Epoch: 3 | disc_loss: 6.896092414855957 | gen_loss: 0.49636784195899963\n",
            "compute_gradients\n",
            "Epoch: 4 | disc_loss: 6.754265308380127 | gen_loss: 0.49632158875465393\n",
            "compute_gradients\n",
            "Epoch: 5 | disc_loss: 6.775443077087402 | gen_loss: 0.49565136432647705\n",
            "compute_gradients\n",
            "Epoch: 6 | disc_loss: 6.670928955078125 | gen_loss: 0.4959333837032318\n",
            "compute_gradients\n",
            "Epoch: 7 | disc_loss: 6.6210036277771 | gen_loss: 0.49542129039764404\n",
            "compute_gradients\n",
            "Epoch: 8 | disc_loss: 6.642005920410156 | gen_loss: 0.49623480439186096\n",
            "compute_gradients\n",
            "Epoch: 9 | disc_loss: 6.615437030792236 | gen_loss: 0.4969867467880249\n",
            "compute_gradients\n",
            "Epoch: 10 | disc_loss: 6.524909973144531 | gen_loss: 0.49640992283821106\n",
            "compute_gradients\n",
            "Epoch: 11 | disc_loss: 6.454870700836182 | gen_loss: 0.49683448672294617\n",
            "compute_gradients\n",
            "Epoch: 12 | disc_loss: 6.3614983558654785 | gen_loss: 0.4964454770088196\n",
            "compute_gradients\n",
            "Epoch: 13 | disc_loss: 6.393040657043457 | gen_loss: 0.49725207686424255\n",
            "compute_gradients\n",
            "Epoch: 14 | disc_loss: 6.378749847412109 | gen_loss: 0.4975166618824005\n",
            "compute_gradients\n",
            "Epoch: 15 | disc_loss: 6.264346122741699 | gen_loss: 0.4971620440483093\n",
            "compute_gradients\n",
            "Epoch: 16 | disc_loss: 6.389629364013672 | gen_loss: 0.4975602626800537\n",
            "compute_gradients\n",
            "Epoch: 17 | disc_loss: 6.229983806610107 | gen_loss: 0.497990220785141\n",
            "compute_gradients\n",
            "Epoch: 18 | disc_loss: 5.926950454711914 | gen_loss: 0.4962938725948334\n",
            "compute_gradients\n",
            "Epoch: 19 | disc_loss: 5.912430286407471 | gen_loss: 0.49721771478652954\n",
            "compute_gradients\n",
            "Epoch: 20 | disc_loss: 5.9184441566467285 | gen_loss: 0.4973788857460022\n",
            "compute_gradients\n",
            "Epoch: 21 | disc_loss: 5.972837924957275 | gen_loss: 0.49699142575263977\n",
            "compute_gradients\n",
            "Epoch: 22 | disc_loss: 5.9233903884887695 | gen_loss: 0.49774685502052307\n",
            "compute_gradients\n",
            "Epoch: 23 | disc_loss: 6.016650676727295 | gen_loss: 0.49757272005081177\n",
            "compute_gradients\n",
            "Epoch: 24 | disc_loss: 5.735506534576416 | gen_loss: 0.49787425994873047\n",
            "compute_gradients\n",
            "Epoch: 25 | disc_loss: 5.869600296020508 | gen_loss: 0.49694541096687317\n",
            "compute_gradients\n",
            "Epoch: 26 | disc_loss: 5.884900093078613 | gen_loss: 0.49792712926864624\n",
            "compute_gradients\n",
            "Epoch: 27 | disc_loss: 5.841341018676758 | gen_loss: 0.49903327226638794\n",
            "compute_gradients\n",
            "Epoch: 28 | disc_loss: 5.648868083953857 | gen_loss: 0.4979584217071533\n",
            "compute_gradients\n",
            "Epoch: 29 | disc_loss: 5.709439754486084 | gen_loss: 0.49813225865364075\n",
            "compute_gradients\n",
            "Epoch: 30 | disc_loss: 5.697440147399902 | gen_loss: 0.4977952837944031\n",
            "compute_gradients\n",
            "Epoch: 31 | disc_loss: 5.4621477127075195 | gen_loss: 0.49803391098976135\n",
            "compute_gradients\n",
            "Epoch: 32 | disc_loss: 5.758059978485107 | gen_loss: 0.4993409812450409\n",
            "compute_gradients\n",
            "Epoch: 33 | disc_loss: 5.562182903289795 | gen_loss: 0.4982697665691376\n",
            "compute_gradients\n",
            "Epoch: 34 | disc_loss: 5.438207149505615 | gen_loss: 0.49821406602859497\n",
            "compute_gradients\n",
            "Epoch: 35 | disc_loss: 5.460281848907471 | gen_loss: 0.49878770112991333\n",
            "compute_gradients\n",
            "Epoch: 36 | disc_loss: 5.326774597167969 | gen_loss: 0.4982422888278961\n",
            "compute_gradients\n",
            "Epoch: 37 | disc_loss: 5.375877380371094 | gen_loss: 0.4989360570907593\n",
            "compute_gradients\n",
            "Epoch: 38 | disc_loss: 5.374532222747803 | gen_loss: 0.49834302067756653\n",
            "compute_gradients\n",
            "Epoch: 39 | disc_loss: 5.272761821746826 | gen_loss: 0.49839869141578674\n",
            "compute_gradients\n",
            "Epoch: 40 | disc_loss: 5.29813289642334 | gen_loss: 0.4989682734012604\n",
            "compute_gradients\n",
            "Epoch: 41 | disc_loss: 5.179793357849121 | gen_loss: 0.49882128834724426\n",
            "compute_gradients\n",
            "Epoch: 42 | disc_loss: 5.204394817352295 | gen_loss: 0.4993925392627716\n",
            "compute_gradients\n",
            "Epoch: 43 | disc_loss: 5.0188422203063965 | gen_loss: 0.4989626705646515\n",
            "compute_gradients\n",
            "Epoch: 44 | disc_loss: 5.248038291931152 | gen_loss: 0.5003681778907776\n",
            "compute_gradients\n",
            "Epoch: 45 | disc_loss: 5.223520755767822 | gen_loss: 0.4999016523361206\n",
            "compute_gradients\n",
            "Epoch: 46 | disc_loss: 5.05817174911499 | gen_loss: 0.4987260103225708\n",
            "compute_gradients\n",
            "Epoch: 47 | disc_loss: 4.8641510009765625 | gen_loss: 0.4999854266643524\n",
            "compute_gradients\n",
            "Epoch: 48 | disc_loss: 4.968221187591553 | gen_loss: 0.49864432215690613\n",
            "compute_gradients\n",
            "Epoch: 49 | disc_loss: 4.791744232177734 | gen_loss: 0.49860790371894836\n",
            "compute_gradients\n",
            "Epoch: 50 | disc_loss: 4.993651390075684 | gen_loss: 0.49908873438835144\n",
            "compute_gradients\n",
            "Epoch: 51 | disc_loss: 4.862175941467285 | gen_loss: 0.4993302524089813\n",
            "compute_gradients\n",
            "Epoch: 52 | disc_loss: 4.897275924682617 | gen_loss: 0.4994671940803528\n",
            "compute_gradients\n",
            "Epoch: 53 | disc_loss: 4.96756649017334 | gen_loss: 0.4994577467441559\n",
            "compute_gradients\n",
            "Epoch: 54 | disc_loss: 4.770660400390625 | gen_loss: 0.49880263209342957\n",
            "compute_gradients\n",
            "Epoch: 55 | disc_loss: 4.665704250335693 | gen_loss: 0.4988147020339966\n",
            "compute_gradients\n",
            "Epoch: 56 | disc_loss: 4.5951128005981445 | gen_loss: 0.4985092878341675\n",
            "compute_gradients\n",
            "Epoch: 57 | disc_loss: 4.827287673950195 | gen_loss: 0.4989912509918213\n",
            "compute_gradients\n",
            "Epoch: 58 | disc_loss: 4.513615131378174 | gen_loss: 0.499446839094162\n",
            "compute_gradients\n",
            "Epoch: 59 | disc_loss: 4.5870747566223145 | gen_loss: 0.4996028244495392\n",
            "compute_gradients\n",
            "Epoch: 60 | disc_loss: 4.870170593261719 | gen_loss: 0.5005115270614624\n",
            "compute_gradients\n",
            "Epoch: 61 | disc_loss: 4.803421974182129 | gen_loss: 0.49962130188941956\n",
            "compute_gradients\n",
            "Epoch: 62 | disc_loss: 4.6258864402771 | gen_loss: 0.4994563162326813\n",
            "compute_gradients\n",
            "Epoch: 63 | disc_loss: 4.789246559143066 | gen_loss: 0.4977860748767853\n",
            "compute_gradients\n",
            "Epoch: 64 | disc_loss: 4.645888805389404 | gen_loss: 0.5002809166908264\n",
            "compute_gradients\n",
            "Epoch: 65 | disc_loss: 4.50015926361084 | gen_loss: 0.4984440505504608\n",
            "compute_gradients\n",
            "Epoch: 66 | disc_loss: 4.25400447845459 | gen_loss: 0.4979246258735657\n",
            "compute_gradients\n",
            "Epoch: 67 | disc_loss: 4.197560787200928 | gen_loss: 0.49881964921951294\n",
            "compute_gradients\n",
            "Epoch: 68 | disc_loss: 4.289337158203125 | gen_loss: 0.4997962713241577\n",
            "compute_gradients\n",
            "Epoch: 69 | disc_loss: 4.343480110168457 | gen_loss: 0.49935367703437805\n",
            "compute_gradients\n",
            "Epoch: 70 | disc_loss: 4.193820953369141 | gen_loss: 0.49922293424606323\n",
            "compute_gradients\n",
            "Epoch: 71 | disc_loss: 4.46726131439209 | gen_loss: 0.4975573420524597\n",
            "compute_gradients\n",
            "Epoch: 72 | disc_loss: 4.070745468139648 | gen_loss: 0.4984092712402344\n",
            "compute_gradients\n",
            "Epoch: 73 | disc_loss: 4.32499885559082 | gen_loss: 0.49940916895866394\n",
            "compute_gradients\n",
            "Epoch: 74 | disc_loss: 4.053350925445557 | gen_loss: 0.49678924679756165\n",
            "compute_gradients\n",
            "Epoch: 75 | disc_loss: 4.279631614685059 | gen_loss: 0.5008136034011841\n",
            "compute_gradients\n",
            "Epoch: 76 | disc_loss: 4.196078777313232 | gen_loss: 0.49670517444610596\n",
            "compute_gradients\n",
            "Epoch: 77 | disc_loss: 4.267904758453369 | gen_loss: 0.49960049986839294\n",
            "compute_gradients\n",
            "Epoch: 78 | disc_loss: 4.052550315856934 | gen_loss: 0.49872279167175293\n",
            "compute_gradients\n",
            "Epoch: 79 | disc_loss: 3.9854042530059814 | gen_loss: 0.499826043844223\n",
            "compute_gradients\n",
            "Epoch: 80 | disc_loss: 4.273393630981445 | gen_loss: 0.49876871705055237\n",
            "compute_gradients\n",
            "Epoch: 81 | disc_loss: 3.9314682483673096 | gen_loss: 0.4984064996242523\n",
            "compute_gradients\n",
            "Epoch: 82 | disc_loss: 3.827484369277954 | gen_loss: 0.4985142946243286\n",
            "compute_gradients\n",
            "Epoch: 83 | disc_loss: 3.7809853553771973 | gen_loss: 0.4975873529911041\n",
            "compute_gradients\n",
            "Epoch: 84 | disc_loss: 3.9488964080810547 | gen_loss: 0.4974452257156372\n",
            "compute_gradients\n",
            "Epoch: 85 | disc_loss: 3.6618616580963135 | gen_loss: 0.49744266271591187\n",
            "compute_gradients\n",
            "Epoch: 86 | disc_loss: 3.9591739177703857 | gen_loss: 0.4969123899936676\n",
            "compute_gradients\n",
            "Epoch: 87 | disc_loss: 3.7367537021636963 | gen_loss: 0.49752822518348694\n",
            "compute_gradients\n",
            "Epoch: 88 | disc_loss: 3.5948259830474854 | gen_loss: 0.49769678711891174\n",
            "compute_gradients\n",
            "Epoch: 89 | disc_loss: 3.5267791748046875 | gen_loss: 0.49917566776275635\n",
            "compute_gradients\n",
            "Epoch: 90 | disc_loss: 3.2989730834960938 | gen_loss: 0.49767735600471497\n",
            "compute_gradients\n",
            "Epoch: 91 | disc_loss: 3.3234193325042725 | gen_loss: 0.49568235874176025\n",
            "compute_gradients\n",
            "Epoch: 92 | disc_loss: 3.6118907928466797 | gen_loss: 0.49697816371917725\n",
            "compute_gradients\n",
            "Epoch: 93 | disc_loss: 3.59682297706604 | gen_loss: 0.4963022470474243\n",
            "compute_gradients\n",
            "Epoch: 94 | disc_loss: 3.6836657524108887 | gen_loss: 0.49650904536247253\n",
            "compute_gradients\n",
            "Epoch: 95 | disc_loss: 3.3492698669433594 | gen_loss: 0.49614498019218445\n",
            "compute_gradients\n",
            "Epoch: 96 | disc_loss: 3.571746826171875 | gen_loss: 0.4963279962539673\n",
            "compute_gradients\n",
            "Epoch: 97 | disc_loss: 3.558732271194458 | gen_loss: 0.49722370505332947\n",
            "compute_gradients\n",
            "Epoch: 98 | disc_loss: 3.37727689743042 | gen_loss: 0.4988666772842407\n",
            "compute_gradients\n",
            "Epoch: 99 | disc_loss: 3.3558449745178223 | gen_loss: 0.4939587414264679\n",
            "compute_gradients\n",
            "Epoch: 100 | disc_loss: 3.1525557041168213 | gen_loss: 0.4936906695365906\n",
            "compute_gradients\n",
            "Epoch: 101 | disc_loss: 3.358025312423706 | gen_loss: 0.4946750998497009\n",
            "compute_gradients\n",
            "Epoch: 102 | disc_loss: 3.31133770942688 | gen_loss: 0.4939606785774231\n",
            "compute_gradients\n",
            "Epoch: 103 | disc_loss: 3.190380573272705 | gen_loss: 0.4948104918003082\n",
            "compute_gradients\n",
            "Epoch: 104 | disc_loss: 3.3874666690826416 | gen_loss: 0.49449795484542847\n",
            "compute_gradients\n",
            "Epoch: 105 | disc_loss: 3.0916569232940674 | gen_loss: 0.49050021171569824\n",
            "compute_gradients\n",
            "Epoch: 106 | disc_loss: 3.252941370010376 | gen_loss: 0.49516162276268005\n",
            "compute_gradients\n",
            "Epoch: 107 | disc_loss: 3.0633392333984375 | gen_loss: 0.49208423495292664\n",
            "compute_gradients\n",
            "Epoch: 108 | disc_loss: 2.855144500732422 | gen_loss: 0.4930464029312134\n",
            "compute_gradients\n",
            "Epoch: 109 | disc_loss: 3.035640001296997 | gen_loss: 0.4914602041244507\n",
            "compute_gradients\n",
            "Epoch: 110 | disc_loss: 2.8958945274353027 | gen_loss: 0.49273526668548584\n",
            "compute_gradients\n",
            "Epoch: 111 | disc_loss: 3.088554620742798 | gen_loss: 0.49422329664230347\n",
            "compute_gradients\n",
            "Epoch: 112 | disc_loss: 2.9549055099487305 | gen_loss: 0.49184754490852356\n",
            "compute_gradients\n",
            "Epoch: 113 | disc_loss: 2.986732006072998 | gen_loss: 0.4910375475883484\n",
            "compute_gradients\n",
            "Epoch: 114 | disc_loss: 2.783367156982422 | gen_loss: 0.4903377890586853\n",
            "compute_gradients\n",
            "Epoch: 115 | disc_loss: 2.78922438621521 | gen_loss: 0.4919019341468811\n",
            "compute_gradients\n",
            "Epoch: 116 | disc_loss: 2.938035488128662 | gen_loss: 0.48935467004776\n",
            "compute_gradients\n",
            "Epoch: 117 | disc_loss: 2.6795506477355957 | gen_loss: 0.4908977746963501\n",
            "compute_gradients\n",
            "Epoch: 118 | disc_loss: 2.924940347671509 | gen_loss: 0.4909757673740387\n",
            "compute_gradients\n",
            "Epoch: 119 | disc_loss: 2.6751513481140137 | gen_loss: 0.48834651708602905\n",
            "compute_gradients\n",
            "Epoch: 120 | disc_loss: 2.4388184547424316 | gen_loss: 0.4879437983036041\n",
            "compute_gradients\n",
            "Epoch: 121 | disc_loss: 2.4694504737854004 | gen_loss: 0.48877203464508057\n",
            "compute_gradients\n",
            "Epoch: 122 | disc_loss: 2.291293144226074 | gen_loss: 0.48715370893478394\n",
            "compute_gradients\n",
            "Epoch: 123 | disc_loss: 2.6578876972198486 | gen_loss: 0.4860711395740509\n",
            "compute_gradients\n",
            "Epoch: 124 | disc_loss: 2.69661283493042 | gen_loss: 0.48577624559402466\n",
            "compute_gradients\n",
            "Epoch: 125 | disc_loss: 2.316610097885132 | gen_loss: 0.4826608896255493\n",
            "compute_gradients\n",
            "Epoch: 126 | disc_loss: 2.2970004081726074 | gen_loss: 0.49068066477775574\n",
            "compute_gradients\n",
            "Epoch: 127 | disc_loss: 1.9984780550003052 | gen_loss: 0.48982006311416626\n",
            "compute_gradients\n",
            "Epoch: 128 | disc_loss: 2.3105838298797607 | gen_loss: 0.48742958903312683\n",
            "compute_gradients\n",
            "Epoch: 129 | disc_loss: 2.4287164211273193 | gen_loss: 0.4759635925292969\n",
            "compute_gradients\n",
            "Epoch: 130 | disc_loss: 2.684443712234497 | gen_loss: 0.48868876695632935\n",
            "compute_gradients\n",
            "Epoch: 131 | disc_loss: 2.0201311111450195 | gen_loss: 0.4835435450077057\n",
            "compute_gradients\n",
            "Epoch: 132 | disc_loss: 2.2030081748962402 | gen_loss: 0.48028793931007385\n",
            "compute_gradients\n",
            "Epoch: 133 | disc_loss: 2.2703776359558105 | gen_loss: 0.4800131320953369\n",
            "compute_gradients\n",
            "Epoch: 134 | disc_loss: 2.1937108039855957 | gen_loss: 0.48265236616134644\n",
            "compute_gradients\n",
            "Epoch: 135 | disc_loss: 1.8283066749572754 | gen_loss: 0.48213765025138855\n",
            "compute_gradients\n",
            "Epoch: 136 | disc_loss: 1.7939529418945312 | gen_loss: 0.47919902205467224\n",
            "compute_gradients\n",
            "Epoch: 137 | disc_loss: 1.6787707805633545 | gen_loss: 0.47694510221481323\n",
            "compute_gradients\n",
            "Epoch: 138 | disc_loss: 1.8295735120773315 | gen_loss: 0.48267990350723267\n",
            "compute_gradients\n",
            "Epoch: 139 | disc_loss: 1.905516505241394 | gen_loss: 0.47546035051345825\n",
            "compute_gradients\n",
            "Epoch: 140 | disc_loss: 2.054572582244873 | gen_loss: 0.4724835157394409\n",
            "compute_gradients\n",
            "Epoch: 141 | disc_loss: 1.8566882610321045 | gen_loss: 0.47484642267227173\n",
            "compute_gradients\n",
            "Epoch: 142 | disc_loss: 2.2647078037261963 | gen_loss: 0.48280197381973267\n",
            "compute_gradients\n",
            "Epoch: 143 | disc_loss: 1.9734172821044922 | gen_loss: 0.4738483428955078\n",
            "compute_gradients\n",
            "Epoch: 144 | disc_loss: 1.7129848003387451 | gen_loss: 0.4758041501045227\n",
            "compute_gradients\n",
            "Epoch: 145 | disc_loss: 1.6863312721252441 | gen_loss: 0.47626426815986633\n",
            "compute_gradients\n",
            "Epoch: 146 | disc_loss: 1.7430760860443115 | gen_loss: 0.4711124300956726\n",
            "compute_gradients\n",
            "Epoch: 147 | disc_loss: 1.710903286933899 | gen_loss: 0.476550817489624\n",
            "compute_gradients\n",
            "Epoch: 148 | disc_loss: 1.6514947414398193 | gen_loss: 0.4688117504119873\n",
            "compute_gradients\n",
            "Epoch: 149 | disc_loss: 1.5691325664520264 | gen_loss: 0.47074785828590393\n",
            "compute_gradients\n",
            "Epoch: 150 | disc_loss: 1.3819637298583984 | gen_loss: 0.46550610661506653\n",
            "compute_gradients\n",
            "Epoch: 151 | disc_loss: 1.6698533296585083 | gen_loss: 0.47107574343681335\n",
            "compute_gradients\n",
            "Epoch: 152 | disc_loss: 1.511394739151001 | gen_loss: 0.46551379561424255\n",
            "compute_gradients\n",
            "Epoch: 153 | disc_loss: 1.4921514987945557 | gen_loss: 0.4743155241012573\n",
            "compute_gradients\n",
            "Epoch: 154 | disc_loss: 1.4878740310668945 | gen_loss: 0.4572727084159851\n",
            "compute_gradients\n",
            "Epoch: 155 | disc_loss: 1.7064188718795776 | gen_loss: 0.4710141718387604\n",
            "compute_gradients\n",
            "Epoch: 156 | disc_loss: 1.9146831035614014 | gen_loss: 0.46714305877685547\n",
            "compute_gradients\n",
            "Epoch: 157 | disc_loss: 1.6577258110046387 | gen_loss: 0.47037750482559204\n",
            "compute_gradients\n",
            "Epoch: 158 | disc_loss: 1.6299997568130493 | gen_loss: 0.4678608775138855\n",
            "compute_gradients\n",
            "Epoch: 159 | disc_loss: 1.6464259624481201 | gen_loss: 0.47083279490470886\n",
            "compute_gradients\n",
            "Epoch: 160 | disc_loss: 1.217320442199707 | gen_loss: 0.46134501695632935\n",
            "compute_gradients\n",
            "Epoch: 161 | disc_loss: 1.138379454612732 | gen_loss: 0.4607768952846527\n",
            "compute_gradients\n",
            "Epoch: 162 | disc_loss: 1.1528029441833496 | gen_loss: 0.4626656770706177\n",
            "compute_gradients\n",
            "Epoch: 163 | disc_loss: 1.3887423276901245 | gen_loss: 0.45750483870506287\n",
            "compute_gradients\n",
            "Epoch: 164 | disc_loss: 1.849266767501831 | gen_loss: 0.4548628330230713\n",
            "compute_gradients\n",
            "Epoch: 165 | disc_loss: 1.3734047412872314 | gen_loss: 0.45859265327453613\n",
            "compute_gradients\n",
            "Epoch: 166 | disc_loss: 1.3795591592788696 | gen_loss: 0.4492763876914978\n",
            "compute_gradients\n",
            "Epoch: 167 | disc_loss: 1.4763402938842773 | gen_loss: 0.4632197320461273\n",
            "compute_gradients\n",
            "Epoch: 168 | disc_loss: 1.0898727178573608 | gen_loss: 0.45834892988204956\n",
            "compute_gradients\n",
            "Epoch: 169 | disc_loss: 1.0762706995010376 | gen_loss: 0.4514082670211792\n",
            "compute_gradients\n",
            "Epoch: 170 | disc_loss: 0.972236156463623 | gen_loss: 0.45143523812294006\n",
            "compute_gradients\n",
            "Epoch: 171 | disc_loss: 1.2774007320404053 | gen_loss: 0.4603869318962097\n",
            "compute_gradients\n",
            "Epoch: 172 | disc_loss: 0.7679407000541687 | gen_loss: 0.44730135798454285\n",
            "compute_gradients\n",
            "Epoch: 173 | disc_loss: 1.0462403297424316 | gen_loss: 0.44394075870513916\n",
            "compute_gradients\n",
            "Epoch: 174 | disc_loss: 0.8872660994529724 | gen_loss: 0.44025322794914246\n",
            "compute_gradients\n",
            "Epoch: 175 | disc_loss: 1.0772274732589722 | gen_loss: 0.4482525587081909\n",
            "compute_gradients\n",
            "Epoch: 176 | disc_loss: 0.8578541278839111 | gen_loss: 0.44624242186546326\n",
            "compute_gradients\n",
            "Epoch: 177 | disc_loss: 1.183795690536499 | gen_loss: 0.44852185249328613\n",
            "compute_gradients\n",
            "Epoch: 178 | disc_loss: 0.9899213314056396 | gen_loss: 0.4428810775279999\n",
            "compute_gradients\n",
            "Epoch: 179 | disc_loss: 1.0084844827651978 | gen_loss: 0.44643157720565796\n",
            "compute_gradients\n",
            "Epoch: 180 | disc_loss: 0.7988021969795227 | gen_loss: 0.4443005323410034\n",
            "compute_gradients\n",
            "Epoch: 181 | disc_loss: 0.9038159847259521 | gen_loss: 0.4387788474559784\n",
            "compute_gradients\n",
            "Epoch: 182 | disc_loss: 0.921906590461731 | gen_loss: 0.4357336461544037\n",
            "compute_gradients\n",
            "Epoch: 183 | disc_loss: 1.1111838817596436 | gen_loss: 0.4455934166908264\n",
            "compute_gradients\n",
            "Epoch: 184 | disc_loss: 0.8780786991119385 | gen_loss: 0.4249957799911499\n",
            "compute_gradients\n",
            "Epoch: 185 | disc_loss: 0.5103178024291992 | gen_loss: 0.44353044033050537\n",
            "compute_gradients\n",
            "Epoch: 186 | disc_loss: 0.6223537921905518 | gen_loss: 0.4365242123603821\n",
            "compute_gradients\n",
            "Epoch: 187 | disc_loss: 0.510493278503418 | gen_loss: 0.4371155798435211\n",
            "compute_gradients\n",
            "Epoch: 188 | disc_loss: 0.932117223739624 | gen_loss: 0.43984830379486084\n",
            "compute_gradients\n",
            "Epoch: 189 | disc_loss: 0.8597052693367004 | gen_loss: 0.42919743061065674\n",
            "compute_gradients\n",
            "Epoch: 190 | disc_loss: 0.8348085880279541 | gen_loss: 0.42953187227249146\n",
            "compute_gradients\n",
            "Epoch: 191 | disc_loss: 0.8793867826461792 | gen_loss: 0.44270843267440796\n",
            "compute_gradients\n",
            "Epoch: 192 | disc_loss: 0.4174169600009918 | gen_loss: 0.4369918704032898\n",
            "compute_gradients\n",
            "Epoch: 193 | disc_loss: 0.6406477689743042 | gen_loss: 0.4279245436191559\n",
            "compute_gradients\n",
            "Epoch: 194 | disc_loss: 0.6749979853630066 | gen_loss: 0.42428314685821533\n",
            "compute_gradients\n",
            "Epoch: 195 | disc_loss: 0.8130035400390625 | gen_loss: 0.42418810725212097\n",
            "compute_gradients\n",
            "Epoch: 196 | disc_loss: 0.4359292685985565 | gen_loss: 0.4274096190929413\n",
            "compute_gradients\n",
            "Epoch: 197 | disc_loss: 0.5041238069534302 | gen_loss: 0.41073817014694214\n",
            "compute_gradients\n",
            "Epoch: 198 | disc_loss: 0.6688209772109985 | gen_loss: 0.41716521978378296\n",
            "compute_gradients\n",
            "Epoch: 199 | disc_loss: 0.7024163007736206 | gen_loss: 0.40858379006385803\n",
            "compute_gradients\n",
            "Epoch: 200 | disc_loss: 0.6604310274124146 | gen_loss: 0.4214057922363281\n",
            "compute_gradients\n",
            "Epoch: 201 | disc_loss: 0.6669454574584961 | gen_loss: 0.4127902090549469\n",
            "compute_gradients\n",
            "Epoch: 202 | disc_loss: 0.4866320788860321 | gen_loss: 0.4193612039089203\n",
            "compute_gradients\n",
            "Epoch: 203 | disc_loss: 0.6466078758239746 | gen_loss: 0.4259199798107147\n",
            "compute_gradients\n",
            "Epoch: 204 | disc_loss: 0.5227197408676147 | gen_loss: 0.42071178555488586\n",
            "compute_gradients\n",
            "Epoch: 205 | disc_loss: 0.5654789209365845 | gen_loss: 0.41091275215148926\n",
            "compute_gradients\n",
            "Epoch: 206 | disc_loss: 0.5641266107559204 | gen_loss: 0.4125182628631592\n",
            "compute_gradients\n",
            "Epoch: 207 | disc_loss: 0.5732667446136475 | gen_loss: 0.3939236104488373\n",
            "compute_gradients\n",
            "Epoch: 208 | disc_loss: 0.6346101760864258 | gen_loss: 0.4101592004299164\n",
            "compute_gradients\n",
            "Epoch: 209 | disc_loss: 0.8123637437820435 | gen_loss: 0.41115066409111023\n",
            "compute_gradients\n",
            "Epoch: 210 | disc_loss: 0.4749487340450287 | gen_loss: 0.4078761339187622\n",
            "compute_gradients\n",
            "Epoch: 211 | disc_loss: 0.47997918725013733 | gen_loss: 0.40373504161834717\n",
            "compute_gradients\n",
            "Epoch: 212 | disc_loss: 0.5520706176757812 | gen_loss: 0.3999920189380646\n",
            "compute_gradients\n",
            "Epoch: 213 | disc_loss: 0.591292142868042 | gen_loss: 0.3997929096221924\n",
            "compute_gradients\n",
            "Epoch: 214 | disc_loss: 0.5294192433357239 | gen_loss: 0.41034644842147827\n",
            "compute_gradients\n",
            "Epoch: 215 | disc_loss: 0.5215507745742798 | gen_loss: 0.40007656812667847\n",
            "compute_gradients\n",
            "Epoch: 216 | disc_loss: 0.5568749904632568 | gen_loss: 0.4097718298435211\n",
            "compute_gradients\n",
            "Epoch: 217 | disc_loss: 0.3802288770675659 | gen_loss: 0.40923598408699036\n",
            "compute_gradients\n",
            "Epoch: 218 | disc_loss: 0.46928972005844116 | gen_loss: 0.38172298669815063\n",
            "compute_gradients\n",
            "Epoch: 219 | disc_loss: 0.5125113725662231 | gen_loss: 0.39962077140808105\n",
            "compute_gradients\n",
            "Epoch: 220 | disc_loss: 0.5429418683052063 | gen_loss: 0.39183592796325684\n",
            "compute_gradients\n",
            "Epoch: 221 | disc_loss: 0.7031934261322021 | gen_loss: 0.39083370566368103\n",
            "compute_gradients\n",
            "Epoch: 222 | disc_loss: 0.40523451566696167 | gen_loss: 0.403458833694458\n",
            "compute_gradients\n",
            "Epoch: 223 | disc_loss: 0.39204689860343933 | gen_loss: 0.3912254273891449\n",
            "compute_gradients\n",
            "Epoch: 224 | disc_loss: 0.5343078374862671 | gen_loss: 0.3818058371543884\n",
            "compute_gradients\n",
            "Epoch: 225 | disc_loss: 0.6758339405059814 | gen_loss: 0.38068875670433044\n",
            "compute_gradients\n",
            "Epoch: 226 | disc_loss: 0.5548443794250488 | gen_loss: 0.37524983286857605\n",
            "compute_gradients\n",
            "Epoch: 227 | disc_loss: 0.514694094657898 | gen_loss: 0.3954121768474579\n",
            "compute_gradients\n",
            "Epoch: 228 | disc_loss: 0.5456110239028931 | gen_loss: 0.3798378109931946\n",
            "compute_gradients\n",
            "Epoch: 229 | disc_loss: 0.43777748942375183 | gen_loss: 0.39643964171409607\n",
            "compute_gradients\n",
            "Epoch: 230 | disc_loss: 0.5293598175048828 | gen_loss: 0.3826253414154053\n",
            "compute_gradients\n",
            "Epoch: 231 | disc_loss: 0.5269663333892822 | gen_loss: 0.38728418946266174\n",
            "compute_gradients\n",
            "Epoch: 232 | disc_loss: 0.49893930554389954 | gen_loss: 0.3955579698085785\n",
            "compute_gradients\n",
            "Epoch: 233 | disc_loss: 0.5266458988189697 | gen_loss: 0.3799211084842682\n",
            "compute_gradients\n",
            "Epoch: 234 | disc_loss: 0.5192035436630249 | gen_loss: 0.382223904132843\n",
            "compute_gradients\n",
            "Epoch: 235 | disc_loss: 0.48221954703330994 | gen_loss: 0.3836180567741394\n",
            "compute_gradients\n",
            "Epoch: 236 | disc_loss: 0.6017229557037354 | gen_loss: 0.3722366988658905\n",
            "compute_gradients\n",
            "Epoch: 237 | disc_loss: 0.7528743743896484 | gen_loss: 0.3714217245578766\n",
            "compute_gradients\n",
            "Epoch: 238 | disc_loss: 0.34860849380493164 | gen_loss: 0.375703364610672\n",
            "compute_gradients\n",
            "Epoch: 239 | disc_loss: 0.6216967105865479 | gen_loss: 0.3748590350151062\n",
            "compute_gradients\n",
            "Epoch: 240 | disc_loss: 0.551732063293457 | gen_loss: 0.3813411593437195\n",
            "compute_gradients\n",
            "Epoch: 241 | disc_loss: 0.6742372512817383 | gen_loss: 0.38358113169670105\n",
            "compute_gradients\n",
            "Epoch: 242 | disc_loss: 0.39522454142570496 | gen_loss: 0.384160578250885\n",
            "compute_gradients\n",
            "Epoch: 243 | disc_loss: 0.7203567028045654 | gen_loss: 0.3651035726070404\n",
            "compute_gradients\n",
            "Epoch: 244 | disc_loss: 0.5961066484451294 | gen_loss: 0.37840166687965393\n",
            "compute_gradients\n",
            "Epoch: 245 | disc_loss: 0.4764535427093506 | gen_loss: 0.35620811581611633\n",
            "compute_gradients\n",
            "Epoch: 246 | disc_loss: 0.4787697196006775 | gen_loss: 0.3548203408718109\n",
            "compute_gradients\n",
            "Epoch: 247 | disc_loss: 0.5114248991012573 | gen_loss: 0.3648819029331207\n",
            "compute_gradients\n",
            "Epoch: 248 | disc_loss: 0.3307340741157532 | gen_loss: 0.3667350709438324\n",
            "compute_gradients\n",
            "Epoch: 249 | disc_loss: 0.5597776770591736 | gen_loss: 0.3638263940811157\n",
            "compute_gradients\n",
            "Epoch: 250 | disc_loss: 0.6854749917984009 | gen_loss: 0.37434205412864685\n",
            "compute_gradients\n",
            "Epoch: 251 | disc_loss: 0.4756728708744049 | gen_loss: 0.3643170893192291\n",
            "compute_gradients\n",
            "Epoch: 252 | disc_loss: 0.4867798686027527 | gen_loss: 0.3664701581001282\n",
            "compute_gradients\n",
            "Epoch: 253 | disc_loss: 0.438108891248703 | gen_loss: 0.3450321853160858\n",
            "compute_gradients\n",
            "Epoch: 254 | disc_loss: 0.6891493201255798 | gen_loss: 0.35165292024612427\n",
            "compute_gradients\n",
            "Epoch: 255 | disc_loss: 0.5187574625015259 | gen_loss: 0.34011590480804443\n",
            "compute_gradients\n",
            "Epoch: 256 | disc_loss: 0.5380589365959167 | gen_loss: 0.34736523032188416\n",
            "compute_gradients\n",
            "Epoch: 257 | disc_loss: 0.5573352575302124 | gen_loss: 0.34548458456993103\n",
            "compute_gradients\n",
            "Epoch: 258 | disc_loss: 0.43239617347717285 | gen_loss: 0.3645525872707367\n",
            "compute_gradients\n",
            "Epoch: 259 | disc_loss: 0.5069001913070679 | gen_loss: 0.34220150113105774\n",
            "compute_gradients\n",
            "Epoch: 260 | disc_loss: 0.4202418923377991 | gen_loss: 0.35929009318351746\n",
            "compute_gradients\n",
            "Epoch: 261 | disc_loss: 0.5325943231582642 | gen_loss: 0.3350882828235626\n",
            "compute_gradients\n",
            "Epoch: 262 | disc_loss: 0.518128514289856 | gen_loss: 0.34727245569229126\n",
            "compute_gradients\n",
            "Epoch: 263 | disc_loss: 0.5861598253250122 | gen_loss: 0.3489256799221039\n",
            "compute_gradients\n",
            "Epoch: 264 | disc_loss: 0.5565091967582703 | gen_loss: 0.3617842495441437\n",
            "compute_gradients\n",
            "Epoch: 265 | disc_loss: 0.4602261781692505 | gen_loss: 0.34019893407821655\n",
            "compute_gradients\n",
            "Epoch: 266 | disc_loss: 0.6250528693199158 | gen_loss: 0.35265088081359863\n",
            "compute_gradients\n",
            "Epoch: 267 | disc_loss: 0.5480396747589111 | gen_loss: 0.34325650334358215\n",
            "compute_gradients\n",
            "Epoch: 268 | disc_loss: 0.4121728539466858 | gen_loss: 0.33957019448280334\n",
            "compute_gradients\n",
            "Epoch: 269 | disc_loss: 0.49455147981643677 | gen_loss: 0.3432517349720001\n",
            "compute_gradients\n",
            "Epoch: 270 | disc_loss: 0.5522675514221191 | gen_loss: 0.34374046325683594\n",
            "compute_gradients\n",
            "Epoch: 271 | disc_loss: 0.451840341091156 | gen_loss: 0.3478257656097412\n",
            "compute_gradients\n",
            "Epoch: 272 | disc_loss: 0.5757272243499756 | gen_loss: 0.345031201839447\n",
            "compute_gradients\n",
            "Epoch: 273 | disc_loss: 0.5941087007522583 | gen_loss: 0.3494192361831665\n",
            "compute_gradients\n",
            "Epoch: 274 | disc_loss: 0.4916883111000061 | gen_loss: 0.33495742082595825\n",
            "compute_gradients\n",
            "Epoch: 275 | disc_loss: 0.5863820910453796 | gen_loss: 0.3428030014038086\n",
            "compute_gradients\n",
            "Epoch: 276 | disc_loss: 0.42535024881362915 | gen_loss: 0.3480561375617981\n",
            "compute_gradients\n",
            "Epoch: 277 | disc_loss: 0.5048179030418396 | gen_loss: 0.3357636332511902\n",
            "compute_gradients\n",
            "Epoch: 278 | disc_loss: 0.39920538663864136 | gen_loss: 0.333935022354126\n",
            "compute_gradients\n",
            "Epoch: 279 | disc_loss: 0.5325967669487 | gen_loss: 0.3331899642944336\n",
            "compute_gradients\n",
            "Epoch: 280 | disc_loss: 0.5141719579696655 | gen_loss: 0.3482428789138794\n",
            "compute_gradients\n",
            "Epoch: 281 | disc_loss: 0.40951550006866455 | gen_loss: 0.32277652621269226\n",
            "compute_gradients\n",
            "Epoch: 282 | disc_loss: 0.47688040137290955 | gen_loss: 0.3286300599575043\n",
            "compute_gradients\n",
            "Epoch: 283 | disc_loss: 0.5285645723342896 | gen_loss: 0.33623844385147095\n",
            "compute_gradients\n",
            "Epoch: 284 | disc_loss: 0.816167950630188 | gen_loss: 0.32316914200782776\n",
            "compute_gradients\n",
            "Epoch: 285 | disc_loss: 0.5059751272201538 | gen_loss: 0.3315359055995941\n",
            "compute_gradients\n",
            "Epoch: 286 | disc_loss: 0.40726912021636963 | gen_loss: 0.31603777408599854\n",
            "compute_gradients\n",
            "Epoch: 287 | disc_loss: 0.41134190559387207 | gen_loss: 0.3195757567882538\n",
            "compute_gradients\n",
            "Epoch: 288 | disc_loss: 0.46144425868988037 | gen_loss: 0.3273943364620209\n",
            "compute_gradients\n",
            "Epoch: 289 | disc_loss: 0.344373494386673 | gen_loss: 0.33373451232910156\n",
            "compute_gradients\n",
            "Epoch: 290 | disc_loss: 0.3850824236869812 | gen_loss: 0.31350287795066833\n",
            "compute_gradients\n",
            "Epoch: 291 | disc_loss: 0.5073956847190857 | gen_loss: 0.3376118838787079\n",
            "compute_gradients\n",
            "Epoch: 292 | disc_loss: 0.4687047004699707 | gen_loss: 0.31058573722839355\n",
            "compute_gradients\n",
            "Epoch: 293 | disc_loss: 0.4555152654647827 | gen_loss: 0.32227200269699097\n",
            "compute_gradients\n",
            "Epoch: 294 | disc_loss: 0.3190878927707672 | gen_loss: 0.3091021478176117\n",
            "compute_gradients\n",
            "Epoch: 295 | disc_loss: 0.4634169340133667 | gen_loss: 0.31966185569763184\n",
            "compute_gradients\n",
            "Epoch: 296 | disc_loss: 0.5331969261169434 | gen_loss: 0.3044414818286896\n",
            "compute_gradients\n",
            "Epoch: 297 | disc_loss: 0.46834927797317505 | gen_loss: 0.31676197052001953\n",
            "compute_gradients\n",
            "Epoch: 298 | disc_loss: 0.39071905612945557 | gen_loss: 0.31128546595573425\n",
            "compute_gradients\n",
            "Epoch: 299 | disc_loss: 0.36207467317581177 | gen_loss: 0.3105155825614929\n",
            "compute_gradients\n",
            "Epoch: 300 | disc_loss: 0.4861396253108978 | gen_loss: 0.2990114986896515\n",
            "compute_gradients\n",
            "Epoch: 301 | disc_loss: 0.511059045791626 | gen_loss: 0.2928236126899719\n",
            "compute_gradients\n",
            "Epoch: 302 | disc_loss: 0.6370604038238525 | gen_loss: 0.302598237991333\n",
            "compute_gradients\n",
            "Epoch: 303 | disc_loss: 0.48450130224227905 | gen_loss: 0.31047260761260986\n",
            "compute_gradients\n",
            "Epoch: 304 | disc_loss: 0.5385366678237915 | gen_loss: 0.2952580153942108\n",
            "compute_gradients\n",
            "Epoch: 305 | disc_loss: 0.4047704339027405 | gen_loss: 0.2951033115386963\n",
            "compute_gradients\n",
            "Epoch: 306 | disc_loss: 0.4599390923976898 | gen_loss: 0.30244380235671997\n",
            "compute_gradients\n",
            "Epoch: 307 | disc_loss: 0.46663665771484375 | gen_loss: 0.3200518488883972\n",
            "compute_gradients\n",
            "Epoch: 308 | disc_loss: 0.3680921196937561 | gen_loss: 0.30104801058769226\n",
            "compute_gradients\n",
            "Epoch: 309 | disc_loss: 0.4540637731552124 | gen_loss: 0.2993927597999573\n",
            "compute_gradients\n",
            "Epoch: 310 | disc_loss: 0.4252753257751465 | gen_loss: 0.3096744418144226\n",
            "compute_gradients\n",
            "Epoch: 311 | disc_loss: 0.47751328349113464 | gen_loss: 0.31359347701072693\n",
            "compute_gradients\n",
            "Epoch: 312 | disc_loss: 0.44313788414001465 | gen_loss: 0.3209651708602905\n",
            "compute_gradients\n",
            "Epoch: 313 | disc_loss: 0.4117518365383148 | gen_loss: 0.28873804211616516\n",
            "compute_gradients\n",
            "Epoch: 314 | disc_loss: 0.6646407842636108 | gen_loss: 0.3061926066875458\n",
            "compute_gradients\n",
            "Epoch: 315 | disc_loss: 0.44506901502609253 | gen_loss: 0.3036341071128845\n",
            "compute_gradients\n",
            "Epoch: 316 | disc_loss: 0.4665866494178772 | gen_loss: 0.2850305736064911\n",
            "compute_gradients\n",
            "Epoch: 317 | disc_loss: 0.4219413995742798 | gen_loss: 0.2992821931838989\n",
            "compute_gradients\n",
            "Epoch: 318 | disc_loss: 0.4326767027378082 | gen_loss: 0.292265921831131\n",
            "compute_gradients\n",
            "Epoch: 319 | disc_loss: 0.4048500657081604 | gen_loss: 0.30633628368377686\n",
            "compute_gradients\n",
            "Epoch: 320 | disc_loss: 0.3516201376914978 | gen_loss: 0.3054909110069275\n",
            "compute_gradients\n",
            "Epoch: 321 | disc_loss: 0.4204341769218445 | gen_loss: 0.2839910686016083\n",
            "compute_gradients\n",
            "Epoch: 322 | disc_loss: 0.4281705617904663 | gen_loss: 0.2945360243320465\n",
            "compute_gradients\n",
            "Epoch: 323 | disc_loss: 0.4540095031261444 | gen_loss: 0.2975054979324341\n",
            "compute_gradients\n",
            "Epoch: 324 | disc_loss: 0.37427660822868347 | gen_loss: 0.2664475440979004\n",
            "compute_gradients\n",
            "Epoch: 325 | disc_loss: 0.429110586643219 | gen_loss: 0.3051495850086212\n",
            "compute_gradients\n",
            "Epoch: 326 | disc_loss: 0.48247626423835754 | gen_loss: 0.28280109167099\n",
            "compute_gradients\n",
            "Epoch: 327 | disc_loss: 0.44113779067993164 | gen_loss: 0.2879040539264679\n",
            "compute_gradients\n",
            "Epoch: 328 | disc_loss: 0.4216453731060028 | gen_loss: 0.30397260189056396\n",
            "compute_gradients\n",
            "Epoch: 329 | disc_loss: 0.45154884457588196 | gen_loss: 0.2817280888557434\n",
            "compute_gradients\n",
            "Epoch: 330 | disc_loss: 0.5323899984359741 | gen_loss: 0.2805819809436798\n",
            "compute_gradients\n",
            "Epoch: 331 | disc_loss: 0.3696291446685791 | gen_loss: 0.3021519184112549\n",
            "compute_gradients\n",
            "Epoch: 332 | disc_loss: 0.433976948261261 | gen_loss: 0.2989351153373718\n",
            "compute_gradients\n",
            "Epoch: 333 | disc_loss: 0.4869401454925537 | gen_loss: 0.2697453498840332\n",
            "compute_gradients\n",
            "Epoch: 334 | disc_loss: 0.4194416403770447 | gen_loss: 0.29608792066574097\n",
            "compute_gradients\n",
            "Epoch: 335 | disc_loss: 0.38786008954048157 | gen_loss: 0.27636948227882385\n",
            "compute_gradients\n",
            "Epoch: 336 | disc_loss: 0.34131038188934326 | gen_loss: 0.2961018681526184\n",
            "compute_gradients\n",
            "Epoch: 337 | disc_loss: 0.5197048187255859 | gen_loss: 0.28978070616722107\n",
            "compute_gradients\n",
            "Epoch: 338 | disc_loss: 0.4279699921607971 | gen_loss: 0.286367267370224\n",
            "compute_gradients\n",
            "Epoch: 339 | disc_loss: 0.43759894371032715 | gen_loss: 0.27582594752311707\n",
            "compute_gradients\n",
            "Epoch: 340 | disc_loss: 0.3792070746421814 | gen_loss: 0.286474347114563\n",
            "compute_gradients\n",
            "Epoch: 341 | disc_loss: 0.38535594940185547 | gen_loss: 0.279519259929657\n",
            "compute_gradients\n",
            "Epoch: 342 | disc_loss: 0.4312900900840759 | gen_loss: 0.273440957069397\n",
            "compute_gradients\n",
            "Epoch: 343 | disc_loss: 0.3848934471607208 | gen_loss: 0.28392311930656433\n",
            "compute_gradients\n",
            "Epoch: 344 | disc_loss: 0.3590371906757355 | gen_loss: 0.2607269287109375\n",
            "compute_gradients\n",
            "Epoch: 345 | disc_loss: 0.4325655400753021 | gen_loss: 0.2720460891723633\n",
            "compute_gradients\n",
            "Epoch: 346 | disc_loss: 0.4197852909564972 | gen_loss: 0.2815963327884674\n",
            "compute_gradients\n",
            "Epoch: 347 | disc_loss: 0.3951343297958374 | gen_loss: 0.2623445391654968\n",
            "compute_gradients\n",
            "Epoch: 348 | disc_loss: 0.352949857711792 | gen_loss: 0.2661796808242798\n",
            "compute_gradients\n",
            "Epoch: 349 | disc_loss: 0.36891916394233704 | gen_loss: 0.2593475580215454\n",
            "compute_gradients\n",
            "Epoch: 350 | disc_loss: 0.35298505425453186 | gen_loss: 0.2711993157863617\n",
            "compute_gradients\n",
            "Epoch: 351 | disc_loss: 0.4099750518798828 | gen_loss: 0.2768127918243408\n",
            "compute_gradients\n",
            "Epoch: 352 | disc_loss: 0.4115542769432068 | gen_loss: 0.26621025800704956\n",
            "compute_gradients\n",
            "Epoch: 353 | disc_loss: 0.4033641219139099 | gen_loss: 0.26825636625289917\n",
            "compute_gradients\n",
            "Epoch: 354 | disc_loss: 0.4910903871059418 | gen_loss: 0.2556110918521881\n",
            "compute_gradients\n",
            "Epoch: 355 | disc_loss: 0.44634750485420227 | gen_loss: 0.2473977506160736\n",
            "compute_gradients\n",
            "Epoch: 356 | disc_loss: 0.3996472954750061 | gen_loss: 0.27162352204322815\n",
            "compute_gradients\n",
            "Epoch: 357 | disc_loss: 0.4330355226993561 | gen_loss: 0.2717426121234894\n",
            "compute_gradients\n",
            "Epoch: 358 | disc_loss: 0.5396794676780701 | gen_loss: 0.26254332065582275\n",
            "compute_gradients\n",
            "Epoch: 359 | disc_loss: 0.29391244053840637 | gen_loss: 0.26701611280441284\n",
            "compute_gradients\n",
            "Epoch: 360 | disc_loss: 0.36361032724380493 | gen_loss: 0.2626603841781616\n",
            "compute_gradients\n",
            "Epoch: 361 | disc_loss: 0.4008624851703644 | gen_loss: 0.257725328207016\n",
            "compute_gradients\n",
            "Epoch: 362 | disc_loss: 0.4175611138343811 | gen_loss: 0.2860679030418396\n",
            "compute_gradients\n",
            "Epoch: 363 | disc_loss: 0.3746148943901062 | gen_loss: 0.2887181043624878\n",
            "compute_gradients\n",
            "Epoch: 364 | disc_loss: 0.3470553755760193 | gen_loss: 0.26206734776496887\n",
            "compute_gradients\n",
            "Epoch: 365 | disc_loss: 0.3705931603908539 | gen_loss: 0.2306336909532547\n",
            "compute_gradients\n",
            "Epoch: 366 | disc_loss: 0.35562947392463684 | gen_loss: 0.253756046295166\n",
            "compute_gradients\n",
            "Epoch: 367 | disc_loss: 0.39368414878845215 | gen_loss: 0.28091466426849365\n",
            "compute_gradients\n",
            "Epoch: 368 | disc_loss: 0.34359997510910034 | gen_loss: 0.2632811963558197\n",
            "compute_gradients\n",
            "Epoch: 369 | disc_loss: 0.4189937114715576 | gen_loss: 0.25972115993499756\n",
            "compute_gradients\n",
            "Epoch: 370 | disc_loss: 0.47678661346435547 | gen_loss: 0.23788635432720184\n",
            "compute_gradients\n",
            "Epoch: 371 | disc_loss: 0.41149768233299255 | gen_loss: 0.24851270020008087\n",
            "compute_gradients\n",
            "Epoch: 372 | disc_loss: 0.3779621124267578 | gen_loss: 0.25951823592185974\n",
            "compute_gradients\n",
            "Epoch: 373 | disc_loss: 0.45394957065582275 | gen_loss: 0.23807914555072784\n",
            "compute_gradients\n",
            "Epoch: 374 | disc_loss: 0.3687986135482788 | gen_loss: 0.25029295682907104\n",
            "compute_gradients\n",
            "Epoch: 375 | disc_loss: 0.4187926650047302 | gen_loss: 0.2578989267349243\n",
            "compute_gradients\n",
            "Epoch: 376 | disc_loss: 0.41525354981422424 | gen_loss: 0.2341001331806183\n",
            "compute_gradients\n",
            "Epoch: 377 | disc_loss: 0.32225096225738525 | gen_loss: 0.2584220767021179\n",
            "compute_gradients\n",
            "Epoch: 378 | disc_loss: 0.34489089250564575 | gen_loss: 0.23612633347511292\n",
            "compute_gradients\n",
            "Epoch: 379 | disc_loss: 0.3300710618495941 | gen_loss: 0.24281901121139526\n",
            "compute_gradients\n",
            "Epoch: 380 | disc_loss: 0.3979676365852356 | gen_loss: 0.25422152876853943\n",
            "compute_gradients\n",
            "Epoch: 381 | disc_loss: 0.42508846521377563 | gen_loss: 0.2397826761007309\n",
            "compute_gradients\n",
            "Epoch: 382 | disc_loss: 0.30076172947883606 | gen_loss: 0.2538808584213257\n",
            "compute_gradients\n",
            "Epoch: 383 | disc_loss: 0.4115069806575775 | gen_loss: 0.2422753870487213\n",
            "compute_gradients\n",
            "Epoch: 384 | disc_loss: 0.43658965826034546 | gen_loss: 0.23946088552474976\n",
            "compute_gradients\n",
            "Epoch: 385 | disc_loss: 0.3778095245361328 | gen_loss: 0.2528001368045807\n",
            "compute_gradients\n",
            "Epoch: 386 | disc_loss: 0.40672749280929565 | gen_loss: 0.22876740992069244\n",
            "compute_gradients\n",
            "Epoch: 387 | disc_loss: 0.37787190079689026 | gen_loss: 0.25164559483528137\n",
            "compute_gradients\n",
            "Epoch: 388 | disc_loss: 0.38478273153305054 | gen_loss: 0.24029184877872467\n",
            "compute_gradients\n",
            "Epoch: 389 | disc_loss: 0.3945411443710327 | gen_loss: 0.24701742827892303\n",
            "compute_gradients\n",
            "Epoch: 390 | disc_loss: 0.4039517045021057 | gen_loss: 0.25327372550964355\n",
            "compute_gradients\n",
            "Epoch: 391 | disc_loss: 0.4235506057739258 | gen_loss: 0.2318068891763687\n",
            "compute_gradients\n",
            "Epoch: 392 | disc_loss: 0.3729141056537628 | gen_loss: 0.2366647869348526\n",
            "compute_gradients\n",
            "Epoch: 393 | disc_loss: 0.32787826657295227 | gen_loss: 0.24369929730892181\n",
            "compute_gradients\n",
            "Epoch: 394 | disc_loss: 0.36824649572372437 | gen_loss: 0.24424435198307037\n",
            "compute_gradients\n",
            "Epoch: 395 | disc_loss: 0.38150545954704285 | gen_loss: 0.227618008852005\n",
            "compute_gradients\n",
            "Epoch: 396 | disc_loss: 0.3942069411277771 | gen_loss: 0.2437748908996582\n",
            "compute_gradients\n",
            "Epoch: 397 | disc_loss: 0.3729139566421509 | gen_loss: 0.2562258243560791\n",
            "compute_gradients\n",
            "Epoch: 398 | disc_loss: 0.3868439793586731 | gen_loss: 0.24859675765037537\n",
            "compute_gradients\n",
            "Epoch: 399 | disc_loss: 0.4291779398918152 | gen_loss: 0.2557183504104614\n",
            "compute_gradients\n",
            "Epoch: 400 | disc_loss: 0.38816219568252563 | gen_loss: 0.23799699544906616\n",
            "compute_gradients\n",
            "Epoch: 401 | disc_loss: 0.36592739820480347 | gen_loss: 0.2410491704940796\n",
            "compute_gradients\n",
            "Epoch: 402 | disc_loss: 0.3386616110801697 | gen_loss: 0.227580264210701\n",
            "compute_gradients\n",
            "Epoch: 403 | disc_loss: 0.40032318234443665 | gen_loss: 0.23462718725204468\n",
            "compute_gradients\n",
            "Epoch: 404 | disc_loss: 0.41679415106773376 | gen_loss: 0.2176504284143448\n",
            "compute_gradients\n",
            "Epoch: 405 | disc_loss: 0.40260568261146545 | gen_loss: 0.26207253336906433\n",
            "compute_gradients\n",
            "Epoch: 406 | disc_loss: 0.45494529604911804 | gen_loss: 0.22650228440761566\n",
            "compute_gradients\n",
            "Epoch: 407 | disc_loss: 0.32695072889328003 | gen_loss: 0.24612590670585632\n",
            "compute_gradients\n",
            "Epoch: 408 | disc_loss: 0.3739144206047058 | gen_loss: 0.23502317070960999\n",
            "compute_gradients\n",
            "Epoch: 409 | disc_loss: 0.3280962109565735 | gen_loss: 0.25128036737442017\n",
            "compute_gradients\n",
            "Epoch: 410 | disc_loss: 0.382732093334198 | gen_loss: 0.2129715532064438\n",
            "compute_gradients\n",
            "Epoch: 411 | disc_loss: 0.3855512738227844 | gen_loss: 0.21734899282455444\n",
            "compute_gradients\n",
            "Epoch: 412 | disc_loss: 0.41767174005508423 | gen_loss: 0.21561695635318756\n",
            "compute_gradients\n",
            "Epoch: 413 | disc_loss: 0.37643396854400635 | gen_loss: 0.2220887988805771\n",
            "compute_gradients\n",
            "Epoch: 414 | disc_loss: 0.3997419476509094 | gen_loss: 0.2497102916240692\n",
            "compute_gradients\n",
            "Epoch: 415 | disc_loss: 0.36295223236083984 | gen_loss: 0.22957511246204376\n",
            "compute_gradients\n",
            "Epoch: 416 | disc_loss: 0.3752845823764801 | gen_loss: 0.21628692746162415\n",
            "compute_gradients\n",
            "Epoch: 417 | disc_loss: 0.36179113388061523 | gen_loss: 0.2324122190475464\n",
            "compute_gradients\n",
            "Epoch: 418 | disc_loss: 0.4464474320411682 | gen_loss: 0.22555139660835266\n",
            "compute_gradients\n",
            "Epoch: 419 | disc_loss: 0.3928179442882538 | gen_loss: 0.23299579322338104\n",
            "compute_gradients\n",
            "Epoch: 420 | disc_loss: 0.36871957778930664 | gen_loss: 0.21579742431640625\n",
            "compute_gradients\n",
            "Epoch: 421 | disc_loss: 0.37846022844314575 | gen_loss: 0.2252364605665207\n",
            "compute_gradients\n",
            "Epoch: 422 | disc_loss: 0.39507097005844116 | gen_loss: 0.23176290094852448\n",
            "compute_gradients\n",
            "Epoch: 423 | disc_loss: 0.3605816960334778 | gen_loss: 0.2555864453315735\n",
            "compute_gradients\n",
            "Epoch: 424 | disc_loss: 0.4134403169155121 | gen_loss: 0.21133935451507568\n",
            "compute_gradients\n",
            "Epoch: 425 | disc_loss: 0.368967205286026 | gen_loss: 0.228226438164711\n",
            "compute_gradients\n",
            "Epoch: 426 | disc_loss: 0.4251384139060974 | gen_loss: 0.23367370665073395\n",
            "compute_gradients\n",
            "Epoch: 427 | disc_loss: 0.39844027161598206 | gen_loss: 0.23564785718917847\n",
            "compute_gradients\n",
            "Epoch: 428 | disc_loss: 0.35952553153038025 | gen_loss: 0.21121081709861755\n",
            "compute_gradients\n",
            "Epoch: 429 | disc_loss: 0.3477093577384949 | gen_loss: 0.21533361077308655\n",
            "compute_gradients\n",
            "Epoch: 430 | disc_loss: 0.3649419844150543 | gen_loss: 0.21066057682037354\n",
            "compute_gradients\n",
            "Epoch: 431 | disc_loss: 0.40933793783187866 | gen_loss: 0.2174062877893448\n",
            "compute_gradients\n",
            "Epoch: 432 | disc_loss: 0.38237860798835754 | gen_loss: 0.2283514142036438\n",
            "compute_gradients\n",
            "Epoch: 433 | disc_loss: 0.40057849884033203 | gen_loss: 0.24127280712127686\n",
            "compute_gradients\n",
            "Epoch: 434 | disc_loss: 0.4610779285430908 | gen_loss: 0.2195219099521637\n",
            "compute_gradients\n",
            "Epoch: 435 | disc_loss: 0.40172716975212097 | gen_loss: 0.2256021350622177\n",
            "compute_gradients\n",
            "Epoch: 436 | disc_loss: 0.3713610768318176 | gen_loss: 0.23190084099769592\n",
            "compute_gradients\n",
            "Epoch: 437 | disc_loss: 0.3170728385448456 | gen_loss: 0.22337885200977325\n",
            "compute_gradients\n",
            "Epoch: 438 | disc_loss: 0.3696276545524597 | gen_loss: 0.20315055549144745\n",
            "compute_gradients\n",
            "Epoch: 439 | disc_loss: 0.3302297592163086 | gen_loss: 0.21763141453266144\n",
            "compute_gradients\n",
            "Epoch: 440 | disc_loss: 0.4087541699409485 | gen_loss: 0.23368847370147705\n",
            "compute_gradients\n",
            "Epoch: 441 | disc_loss: 0.3732185959815979 | gen_loss: 0.2087095081806183\n",
            "compute_gradients\n",
            "Epoch: 442 | disc_loss: 0.369981050491333 | gen_loss: 0.22269287705421448\n",
            "compute_gradients\n",
            "Epoch: 443 | disc_loss: 0.3944060802459717 | gen_loss: 0.21631981432437897\n",
            "compute_gradients\n",
            "Epoch: 444 | disc_loss: 0.39735183119773865 | gen_loss: 0.22418594360351562\n",
            "compute_gradients\n",
            "Epoch: 445 | disc_loss: 0.36574020981788635 | gen_loss: 0.20834802091121674\n",
            "compute_gradients\n",
            "Epoch: 446 | disc_loss: 0.3579822778701782 | gen_loss: 0.2391599714756012\n",
            "compute_gradients\n",
            "Epoch: 447 | disc_loss: 0.4723132252693176 | gen_loss: 0.2381451427936554\n",
            "compute_gradients\n",
            "Epoch: 448 | disc_loss: 0.3786863684654236 | gen_loss: 0.21186628937721252\n",
            "compute_gradients\n",
            "Epoch: 449 | disc_loss: 0.396282434463501 | gen_loss: 0.21717756986618042\n",
            "compute_gradients\n",
            "Epoch: 450 | disc_loss: 0.41678282618522644 | gen_loss: 0.2217119336128235\n",
            "compute_gradients\n",
            "Epoch: 451 | disc_loss: 0.3617461323738098 | gen_loss: 0.21537308394908905\n",
            "compute_gradients\n",
            "Epoch: 452 | disc_loss: 0.42319801449775696 | gen_loss: 0.21291498839855194\n",
            "compute_gradients\n",
            "Epoch: 453 | disc_loss: 0.41839027404785156 | gen_loss: 0.24581120908260345\n",
            "compute_gradients\n",
            "Epoch: 454 | disc_loss: 0.40973329544067383 | gen_loss: 0.20376254618167877\n",
            "compute_gradients\n",
            "Epoch: 455 | disc_loss: 0.36976754665374756 | gen_loss: 0.21604545414447784\n",
            "compute_gradients\n",
            "Epoch: 456 | disc_loss: 0.3948974609375 | gen_loss: 0.21655502915382385\n",
            "compute_gradients\n",
            "Epoch: 457 | disc_loss: 0.4348701536655426 | gen_loss: 0.20303098857402802\n",
            "compute_gradients\n",
            "Epoch: 458 | disc_loss: 0.3758295178413391 | gen_loss: 0.20756632089614868\n",
            "compute_gradients\n",
            "Epoch: 459 | disc_loss: 0.4293339252471924 | gen_loss: 0.2069828063249588\n",
            "compute_gradients\n",
            "Epoch: 460 | disc_loss: 0.449872225522995 | gen_loss: 0.20530682802200317\n",
            "compute_gradients\n",
            "Epoch: 461 | disc_loss: 0.3889584541320801 | gen_loss: 0.20580612123012543\n",
            "compute_gradients\n",
            "Epoch: 462 | disc_loss: 0.45413947105407715 | gen_loss: 0.18834751844406128\n",
            "compute_gradients\n",
            "Epoch: 463 | disc_loss: 0.3984069526195526 | gen_loss: 0.18817561864852905\n",
            "compute_gradients\n",
            "Epoch: 464 | disc_loss: 0.41872233152389526 | gen_loss: 0.1964951902627945\n",
            "compute_gradients\n",
            "Epoch: 465 | disc_loss: 0.384592205286026 | gen_loss: 0.22958292067050934\n",
            "compute_gradients\n",
            "Epoch: 466 | disc_loss: 0.43313226103782654 | gen_loss: 0.20392431318759918\n",
            "compute_gradients\n",
            "Epoch: 467 | disc_loss: 0.47891461849212646 | gen_loss: 0.19986850023269653\n",
            "compute_gradients\n",
            "Epoch: 468 | disc_loss: 0.37188124656677246 | gen_loss: 0.21336308121681213\n",
            "compute_gradients\n",
            "Epoch: 469 | disc_loss: 0.44342470169067383 | gen_loss: 0.1955442875623703\n",
            "compute_gradients\n",
            "Epoch: 470 | disc_loss: 0.3954848051071167 | gen_loss: 0.21320585906505585\n",
            "compute_gradients\n",
            "Epoch: 471 | disc_loss: 0.39720648527145386 | gen_loss: 0.21604131162166595\n",
            "compute_gradients\n",
            "Epoch: 472 | disc_loss: 0.3941829204559326 | gen_loss: 0.20918148756027222\n",
            "compute_gradients\n",
            "Epoch: 473 | disc_loss: 0.46420854330062866 | gen_loss: 0.20447058975696564\n",
            "compute_gradients\n",
            "Epoch: 474 | disc_loss: 0.4328823685646057 | gen_loss: 0.20359502732753754\n",
            "compute_gradients\n",
            "Epoch: 475 | disc_loss: 0.40748345851898193 | gen_loss: 0.1884726583957672\n",
            "compute_gradients\n",
            "Epoch: 476 | disc_loss: 0.3729401230812073 | gen_loss: 0.2180304378271103\n",
            "compute_gradients\n",
            "Epoch: 477 | disc_loss: 0.45754992961883545 | gen_loss: 0.1825629025697708\n",
            "compute_gradients\n",
            "Epoch: 478 | disc_loss: 0.40786758065223694 | gen_loss: 0.1916331648826599\n",
            "compute_gradients\n",
            "Epoch: 479 | disc_loss: 0.4094159007072449 | gen_loss: 0.18077269196510315\n",
            "compute_gradients\n",
            "Epoch: 480 | disc_loss: 0.41167736053466797 | gen_loss: 0.2075173407793045\n",
            "compute_gradients\n",
            "Epoch: 481 | disc_loss: 0.4106065332889557 | gen_loss: 0.18798916041851044\n",
            "compute_gradients\n",
            "Epoch: 482 | disc_loss: 0.3956514298915863 | gen_loss: 0.1816556602716446\n",
            "compute_gradients\n",
            "Epoch: 483 | disc_loss: 0.41849982738494873 | gen_loss: 0.1965216100215912\n",
            "compute_gradients\n",
            "Epoch: 484 | disc_loss: 0.4672185778617859 | gen_loss: 0.17764125764369965\n",
            "compute_gradients\n",
            "Epoch: 485 | disc_loss: 0.3884372413158417 | gen_loss: 0.1964966058731079\n",
            "compute_gradients\n",
            "Epoch: 486 | disc_loss: 0.43698573112487793 | gen_loss: 0.1866786926984787\n",
            "compute_gradients\n",
            "Epoch: 487 | disc_loss: 0.4040100574493408 | gen_loss: 0.20575442910194397\n",
            "compute_gradients\n",
            "Epoch: 488 | disc_loss: 0.47988462448120117 | gen_loss: 0.1759135127067566\n",
            "compute_gradients\n",
            "Epoch: 489 | disc_loss: 0.4182366132736206 | gen_loss: 0.1927364468574524\n",
            "compute_gradients\n",
            "Epoch: 490 | disc_loss: 0.4014933705329895 | gen_loss: 0.19365006685256958\n",
            "compute_gradients\n",
            "Epoch: 491 | disc_loss: 0.420068621635437 | gen_loss: 0.2104889154434204\n",
            "compute_gradients\n",
            "Epoch: 492 | disc_loss: 0.4089168906211853 | gen_loss: 0.19580866396427155\n",
            "compute_gradients\n",
            "Epoch: 493 | disc_loss: 0.4343566298484802 | gen_loss: 0.19584770500659943\n",
            "compute_gradients\n",
            "Epoch: 494 | disc_loss: 0.4425208568572998 | gen_loss: 0.20058588683605194\n",
            "compute_gradients\n",
            "Epoch: 495 | disc_loss: 0.5043143033981323 | gen_loss: 0.18034763634204865\n",
            "compute_gradients\n",
            "Epoch: 496 | disc_loss: 0.5387506484985352 | gen_loss: 0.1926635503768921\n",
            "compute_gradients\n",
            "Epoch: 497 | disc_loss: 0.46046650409698486 | gen_loss: 0.1723359227180481\n",
            "compute_gradients\n",
            "Epoch: 498 | disc_loss: 0.42897430062294006 | gen_loss: 0.1988103836774826\n",
            "compute_gradients\n",
            "Epoch: 499 | disc_loss: 0.5034342408180237 | gen_loss: 0.1891707479953766\n",
            "compute_gradients\n",
            "Epoch: 500 | disc_loss: 0.3859272599220276 | gen_loss: 0.2162543535232544\n",
            "compute_gradients\n",
            "Epoch: 501 | disc_loss: 0.38866615295410156 | gen_loss: 0.2003614753484726\n",
            "compute_gradients\n",
            "Epoch: 502 | disc_loss: 0.5020297169685364 | gen_loss: 0.16548877954483032\n",
            "compute_gradients\n",
            "Epoch: 503 | disc_loss: 0.44128328561782837 | gen_loss: 0.185500368475914\n",
            "compute_gradients\n",
            "Epoch: 504 | disc_loss: 0.5265843868255615 | gen_loss: 0.19152764976024628\n",
            "compute_gradients\n",
            "Epoch: 505 | disc_loss: 0.4267122745513916 | gen_loss: 0.20444047451019287\n",
            "compute_gradients\n",
            "Epoch: 506 | disc_loss: 0.551681637763977 | gen_loss: 0.17820951342582703\n",
            "compute_gradients\n",
            "Epoch: 507 | disc_loss: 0.41636669635772705 | gen_loss: 0.20358730852603912\n",
            "compute_gradients\n",
            "Epoch: 508 | disc_loss: 0.4706762135028839 | gen_loss: 0.19384130835533142\n",
            "compute_gradients\n",
            "Epoch: 509 | disc_loss: 0.4211856722831726 | gen_loss: 0.18244151771068573\n",
            "compute_gradients\n",
            "Epoch: 510 | disc_loss: 0.4442007541656494 | gen_loss: 0.18736718595027924\n",
            "compute_gradients\n",
            "Epoch: 511 | disc_loss: 0.4093284010887146 | gen_loss: 0.21552085876464844\n",
            "compute_gradients\n",
            "Epoch: 512 | disc_loss: 0.5074141025543213 | gen_loss: 0.18108658492565155\n",
            "compute_gradients\n",
            "Epoch: 513 | disc_loss: 0.44012582302093506 | gen_loss: 0.1822940856218338\n",
            "compute_gradients\n",
            "Epoch: 514 | disc_loss: 0.41458144783973694 | gen_loss: 0.18698802590370178\n",
            "compute_gradients\n",
            "Epoch: 515 | disc_loss: 0.4694119393825531 | gen_loss: 0.17489676177501678\n",
            "compute_gradients\n",
            "Epoch: 516 | disc_loss: 0.47906771302223206 | gen_loss: 0.19203823804855347\n",
            "compute_gradients\n",
            "Epoch: 517 | disc_loss: 0.48339277505874634 | gen_loss: 0.16111499071121216\n",
            "compute_gradients\n",
            "Epoch: 518 | disc_loss: 0.4388234317302704 | gen_loss: 0.19480420649051666\n",
            "compute_gradients\n",
            "Epoch: 519 | disc_loss: 0.3917454779148102 | gen_loss: 0.20239947736263275\n",
            "compute_gradients\n",
            "Epoch: 520 | disc_loss: 0.4747374951839447 | gen_loss: 0.1990976333618164\n",
            "compute_gradients\n",
            "Epoch: 521 | disc_loss: 0.4431976079940796 | gen_loss: 0.19313377141952515\n",
            "compute_gradients\n",
            "Epoch: 522 | disc_loss: 0.5065691471099854 | gen_loss: 0.16909107565879822\n",
            "compute_gradients\n",
            "Epoch: 523 | disc_loss: 0.4587702453136444 | gen_loss: 0.18255861103534698\n",
            "compute_gradients\n",
            "Epoch: 524 | disc_loss: 0.5581056475639343 | gen_loss: 0.17245177924633026\n",
            "compute_gradients\n",
            "Epoch: 525 | disc_loss: 0.43447253108024597 | gen_loss: 0.18962731957435608\n",
            "compute_gradients\n",
            "Epoch: 526 | disc_loss: 0.4933064579963684 | gen_loss: 0.17777924239635468\n",
            "compute_gradients\n",
            "Epoch: 527 | disc_loss: 0.5321133136749268 | gen_loss: 0.18883799016475677\n",
            "compute_gradients\n",
            "Epoch: 528 | disc_loss: 0.5532867908477783 | gen_loss: 0.18397754430770874\n",
            "compute_gradients\n",
            "Epoch: 529 | disc_loss: 0.5650520324707031 | gen_loss: 0.18266025185585022\n",
            "compute_gradients\n",
            "Epoch: 530 | disc_loss: 0.46073663234710693 | gen_loss: 0.18629392981529236\n",
            "compute_gradients\n",
            "Epoch: 531 | disc_loss: 0.4632386565208435 | gen_loss: 0.19608810544013977\n",
            "compute_gradients\n",
            "Epoch: 532 | disc_loss: 0.46757373213768005 | gen_loss: 0.1948089450597763\n",
            "compute_gradients\n",
            "Epoch: 533 | disc_loss: 0.5324741005897522 | gen_loss: 0.1801936775445938\n",
            "compute_gradients\n",
            "Epoch: 534 | disc_loss: 0.4906885027885437 | gen_loss: 0.17207954823970795\n",
            "compute_gradients\n",
            "Epoch: 535 | disc_loss: 0.48764002323150635 | gen_loss: 0.17835506796836853\n",
            "compute_gradients\n",
            "Epoch: 536 | disc_loss: 0.4778543710708618 | gen_loss: 0.18454109132289886\n",
            "compute_gradients\n",
            "Epoch: 537 | disc_loss: 0.4653542935848236 | gen_loss: 0.19384878873825073\n",
            "compute_gradients\n",
            "Epoch: 538 | disc_loss: 0.537926971912384 | gen_loss: 0.1800943911075592\n",
            "compute_gradients\n",
            "Epoch: 539 | disc_loss: 0.5016982555389404 | gen_loss: 0.16568905115127563\n",
            "compute_gradients\n",
            "Epoch: 540 | disc_loss: 0.4235278069972992 | gen_loss: 0.19598853588104248\n",
            "compute_gradients\n",
            "Epoch: 541 | disc_loss: 0.5340673923492432 | gen_loss: 0.16134779155254364\n",
            "compute_gradients\n",
            "Epoch: 542 | disc_loss: 0.5731601715087891 | gen_loss: 0.18838219344615936\n",
            "compute_gradients\n",
            "Epoch: 543 | disc_loss: 0.5141856670379639 | gen_loss: 0.1785293072462082\n",
            "compute_gradients\n",
            "Epoch: 544 | disc_loss: 0.6067960262298584 | gen_loss: 0.18013593554496765\n",
            "compute_gradients\n",
            "Epoch: 545 | disc_loss: 0.6822822093963623 | gen_loss: 0.16685213148593903\n",
            "compute_gradients\n",
            "Epoch: 546 | disc_loss: 0.5025913119316101 | gen_loss: 0.17052778601646423\n",
            "compute_gradients\n",
            "Epoch: 547 | disc_loss: 0.6379161477088928 | gen_loss: 0.18073469400405884\n",
            "compute_gradients\n",
            "Epoch: 548 | disc_loss: 0.5168322920799255 | gen_loss: 0.1776721030473709\n",
            "compute_gradients\n",
            "Epoch: 549 | disc_loss: 0.4953431487083435 | gen_loss: 0.16947601735591888\n",
            "compute_gradients\n",
            "Epoch: 550 | disc_loss: 0.4614885747432709 | gen_loss: 0.1837838590145111\n",
            "compute_gradients\n",
            "Epoch: 551 | disc_loss: 0.48731499910354614 | gen_loss: 0.17843519151210785\n",
            "compute_gradients\n",
            "Epoch: 552 | disc_loss: 0.5130470991134644 | gen_loss: 0.16844436526298523\n",
            "compute_gradients\n",
            "Epoch: 553 | disc_loss: 0.47415903210639954 | gen_loss: 0.20124191045761108\n",
            "compute_gradients\n",
            "Epoch: 554 | disc_loss: 0.5088770389556885 | gen_loss: 0.17188760638237\n",
            "compute_gradients\n",
            "Epoch: 555 | disc_loss: 0.6072447299957275 | gen_loss: 0.1747162938117981\n",
            "compute_gradients\n",
            "Epoch: 556 | disc_loss: 0.5383780002593994 | gen_loss: 0.1639368236064911\n",
            "compute_gradients\n",
            "Epoch: 557 | disc_loss: 0.6018301248550415 | gen_loss: 0.17662450671195984\n",
            "compute_gradients\n",
            "Epoch: 558 | disc_loss: 0.5515282154083252 | gen_loss: 0.1730552613735199\n",
            "compute_gradients\n",
            "Epoch: 559 | disc_loss: 0.650600254535675 | gen_loss: 0.17097440361976624\n",
            "compute_gradients\n",
            "Epoch: 560 | disc_loss: 0.5811521410942078 | gen_loss: 0.16317366063594818\n",
            "compute_gradients\n",
            "Epoch: 561 | disc_loss: 0.47588521242141724 | gen_loss: 0.18512161076068878\n",
            "compute_gradients\n",
            "Epoch: 562 | disc_loss: 0.6284581422805786 | gen_loss: 0.15908153355121613\n",
            "compute_gradients\n",
            "Epoch: 563 | disc_loss: 0.5533273816108704 | gen_loss: 0.16780905425548553\n",
            "compute_gradients\n",
            "Epoch: 564 | disc_loss: 0.5171449780464172 | gen_loss: 0.18619151413440704\n",
            "compute_gradients\n",
            "Epoch: 565 | disc_loss: 0.613286554813385 | gen_loss: 0.1632450819015503\n",
            "compute_gradients\n",
            "Epoch: 566 | disc_loss: 0.5604041218757629 | gen_loss: 0.16876693069934845\n",
            "compute_gradients\n",
            "Epoch: 567 | disc_loss: 0.604504406452179 | gen_loss: 0.1655849814414978\n",
            "compute_gradients\n",
            "Epoch: 568 | disc_loss: 0.5022132992744446 | gen_loss: 0.1829928606748581\n",
            "compute_gradients\n",
            "Epoch: 569 | disc_loss: 0.5498635768890381 | gen_loss: 0.18031345307826996\n",
            "compute_gradients\n",
            "Epoch: 570 | disc_loss: 0.583631157875061 | gen_loss: 0.1847844123840332\n",
            "compute_gradients\n",
            "Epoch: 571 | disc_loss: 0.6663910746574402 | gen_loss: 0.1949053257703781\n",
            "compute_gradients\n",
            "Epoch: 572 | disc_loss: 0.6425197124481201 | gen_loss: 0.1708637773990631\n",
            "compute_gradients\n",
            "Epoch: 573 | disc_loss: 0.622584342956543 | gen_loss: 0.15742552280426025\n",
            "compute_gradients\n",
            "Epoch: 574 | disc_loss: 0.5235443711280823 | gen_loss: 0.1895543932914734\n",
            "compute_gradients\n",
            "Epoch: 575 | disc_loss: 0.5861597657203674 | gen_loss: 0.18073177337646484\n",
            "compute_gradients\n",
            "Epoch: 576 | disc_loss: 0.5514013767242432 | gen_loss: 0.177814781665802\n",
            "compute_gradients\n",
            "Epoch: 577 | disc_loss: 0.6191753149032593 | gen_loss: 0.16294361650943756\n",
            "compute_gradients\n",
            "Epoch: 578 | disc_loss: 0.5036818981170654 | gen_loss: 0.18662774562835693\n",
            "compute_gradients\n",
            "Epoch: 579 | disc_loss: 0.6782961487770081 | gen_loss: 0.1679447591304779\n",
            "compute_gradients\n",
            "Epoch: 580 | disc_loss: 0.6045194864273071 | gen_loss: 0.17585952579975128\n",
            "compute_gradients\n",
            "Epoch: 581 | disc_loss: 0.6701038479804993 | gen_loss: 0.1800439953804016\n",
            "compute_gradients\n",
            "Epoch: 582 | disc_loss: 0.6655727624893188 | gen_loss: 0.1701289713382721\n",
            "compute_gradients\n",
            "Epoch: 583 | disc_loss: 0.5312603712081909 | gen_loss: 0.16836310923099518\n",
            "compute_gradients\n",
            "Epoch: 584 | disc_loss: 0.5527380704879761 | gen_loss: 0.17253057658672333\n",
            "compute_gradients\n",
            "Epoch: 585 | disc_loss: 0.5862072706222534 | gen_loss: 0.16804301738739014\n",
            "compute_gradients\n",
            "Epoch: 586 | disc_loss: 0.5795103311538696 | gen_loss: 0.16636638343334198\n",
            "compute_gradients\n",
            "Epoch: 587 | disc_loss: 0.584528923034668 | gen_loss: 0.1705353558063507\n",
            "compute_gradients\n",
            "Epoch: 588 | disc_loss: 0.6181654334068298 | gen_loss: 0.173914834856987\n",
            "compute_gradients\n",
            "Epoch: 589 | disc_loss: 0.5888493061065674 | gen_loss: 0.16525036096572876\n",
            "compute_gradients\n",
            "Epoch: 590 | disc_loss: 0.6833216547966003 | gen_loss: 0.1680692732334137\n",
            "compute_gradients\n",
            "Epoch: 591 | disc_loss: 0.5618170499801636 | gen_loss: 0.17612531781196594\n",
            "compute_gradients\n",
            "Epoch: 592 | disc_loss: 0.6139100790023804 | gen_loss: 0.1672508269548416\n",
            "compute_gradients\n",
            "Epoch: 593 | disc_loss: 0.5949468612670898 | gen_loss: 0.1701096147298813\n",
            "compute_gradients\n",
            "Epoch: 594 | disc_loss: 0.5422827005386353 | gen_loss: 0.17700506746768951\n",
            "compute_gradients\n",
            "Epoch: 595 | disc_loss: 0.5989044308662415 | gen_loss: 0.16734090447425842\n",
            "compute_gradients\n",
            "Epoch: 596 | disc_loss: 0.6182725429534912 | gen_loss: 0.16749538481235504\n",
            "compute_gradients\n",
            "Epoch: 597 | disc_loss: 0.7186044454574585 | gen_loss: 0.15783175826072693\n",
            "compute_gradients\n",
            "Epoch: 598 | disc_loss: 0.6202232837677002 | gen_loss: 0.150778666138649\n",
            "compute_gradients\n",
            "Epoch: 599 | disc_loss: 0.5620617270469666 | gen_loss: 0.1706092804670334\n",
            "compute_gradients\n",
            "Epoch: 600 | disc_loss: 0.7709808945655823 | gen_loss: 0.15107496082782745\n",
            "compute_gradients\n",
            "Epoch: 601 | disc_loss: 0.6526898741722107 | gen_loss: 0.1622990369796753\n",
            "compute_gradients\n",
            "Epoch: 602 | disc_loss: 0.6325801610946655 | gen_loss: 0.16138075292110443\n",
            "compute_gradients\n",
            "Epoch: 603 | disc_loss: 0.7476761937141418 | gen_loss: 0.15214337408542633\n",
            "compute_gradients\n",
            "Epoch: 604 | disc_loss: 0.6677950620651245 | gen_loss: 0.15599983930587769\n",
            "compute_gradients\n",
            "Epoch: 605 | disc_loss: 0.6193512678146362 | gen_loss: 0.1728930026292801\n",
            "compute_gradients\n",
            "Epoch: 606 | disc_loss: 0.6099752187728882 | gen_loss: 0.1637166440486908\n",
            "compute_gradients\n",
            "Epoch: 607 | disc_loss: 0.7251870632171631 | gen_loss: 0.17149217426776886\n",
            "compute_gradients\n",
            "Epoch: 608 | disc_loss: 0.7072046995162964 | gen_loss: 0.17213407158851624\n",
            "compute_gradients\n",
            "Epoch: 609 | disc_loss: 0.7572282552719116 | gen_loss: 0.1586807668209076\n",
            "compute_gradients\n",
            "Epoch: 610 | disc_loss: 0.6223195791244507 | gen_loss: 0.17173312604427338\n",
            "compute_gradients\n",
            "Epoch: 611 | disc_loss: 0.5775254964828491 | gen_loss: 0.1698904186487198\n",
            "compute_gradients\n",
            "Epoch: 612 | disc_loss: 0.5990047454833984 | gen_loss: 0.1689964234828949\n",
            "compute_gradients\n",
            "Epoch: 613 | disc_loss: 0.6398417353630066 | gen_loss: 0.16613321006298065\n",
            "compute_gradients\n",
            "Epoch: 614 | disc_loss: 0.6036742925643921 | gen_loss: 0.17050530016422272\n",
            "compute_gradients\n",
            "Epoch: 615 | disc_loss: 0.6956988573074341 | gen_loss: 0.15133219957351685\n",
            "compute_gradients\n",
            "Epoch: 616 | disc_loss: 0.5676537752151489 | gen_loss: 0.1673801839351654\n",
            "compute_gradients\n",
            "Epoch: 617 | disc_loss: 0.6284629106521606 | gen_loss: 0.16505375504493713\n",
            "compute_gradients\n",
            "Epoch: 618 | disc_loss: 0.6314851641654968 | gen_loss: 0.18055570125579834\n",
            "compute_gradients\n",
            "Epoch: 619 | disc_loss: 0.6423742175102234 | gen_loss: 0.15162533521652222\n",
            "compute_gradients\n",
            "Epoch: 620 | disc_loss: 0.6682603359222412 | gen_loss: 0.17948263883590698\n",
            "compute_gradients\n",
            "Epoch: 621 | disc_loss: 0.6926443576812744 | gen_loss: 0.1767117828130722\n",
            "compute_gradients\n",
            "Epoch: 622 | disc_loss: 0.5923495292663574 | gen_loss: 0.18339680135250092\n",
            "compute_gradients\n",
            "Epoch: 623 | disc_loss: 0.716536283493042 | gen_loss: 0.15970319509506226\n",
            "compute_gradients\n",
            "Epoch: 624 | disc_loss: 0.8464699983596802 | gen_loss: 0.1429273784160614\n",
            "compute_gradients\n",
            "Epoch: 625 | disc_loss: 0.7032099962234497 | gen_loss: 0.16904373466968536\n",
            "compute_gradients\n",
            "Epoch: 626 | disc_loss: 0.612823486328125 | gen_loss: 0.16418595612049103\n",
            "compute_gradients\n",
            "Epoch: 627 | disc_loss: 0.5923585891723633 | gen_loss: 0.16942502558231354\n",
            "compute_gradients\n",
            "Epoch: 628 | disc_loss: 0.7216581106185913 | gen_loss: 0.14421477913856506\n",
            "compute_gradients\n",
            "Epoch: 629 | disc_loss: 0.7189342975616455 | gen_loss: 0.15996910631656647\n",
            "compute_gradients\n",
            "Epoch: 630 | disc_loss: 0.6525804996490479 | gen_loss: 0.15394222736358643\n",
            "compute_gradients\n",
            "Epoch: 631 | disc_loss: 0.5750372409820557 | gen_loss: 0.17060640454292297\n",
            "compute_gradients\n",
            "Epoch: 632 | disc_loss: 0.7092597484588623 | gen_loss: 0.1582491248846054\n",
            "compute_gradients\n",
            "Epoch: 633 | disc_loss: 0.7981235980987549 | gen_loss: 0.1481996774673462\n",
            "compute_gradients\n",
            "Epoch: 634 | disc_loss: 0.6885486841201782 | gen_loss: 0.16962595283985138\n",
            "compute_gradients\n",
            "Epoch: 635 | disc_loss: 0.7162990570068359 | gen_loss: 0.16445249319076538\n",
            "compute_gradients\n",
            "Epoch: 636 | disc_loss: 0.6783567667007446 | gen_loss: 0.16443198919296265\n",
            "compute_gradients\n",
            "Epoch: 637 | disc_loss: 0.8178447484970093 | gen_loss: 0.156810462474823\n",
            "compute_gradients\n",
            "Epoch: 638 | disc_loss: 0.7692816257476807 | gen_loss: 0.1539863497018814\n",
            "compute_gradients\n",
            "Epoch: 639 | disc_loss: 0.7384908199310303 | gen_loss: 0.16251806914806366\n",
            "compute_gradients\n",
            "Epoch: 640 | disc_loss: 0.7938659191131592 | gen_loss: 0.15231594443321228\n",
            "compute_gradients\n",
            "Epoch: 641 | disc_loss: 0.7775691747665405 | gen_loss: 0.15273882448673248\n",
            "compute_gradients\n",
            "Epoch: 642 | disc_loss: 0.8364819288253784 | gen_loss: 0.15418575704097748\n",
            "compute_gradients\n",
            "Epoch: 643 | disc_loss: 0.7784135341644287 | gen_loss: 0.15439607203006744\n",
            "compute_gradients\n",
            "Epoch: 644 | disc_loss: 0.7042025923728943 | gen_loss: 0.1647471785545349\n",
            "compute_gradients\n",
            "Epoch: 645 | disc_loss: 0.9019322395324707 | gen_loss: 0.15593518316745758\n",
            "compute_gradients\n",
            "Epoch: 646 | disc_loss: 0.6828198432922363 | gen_loss: 0.1643734574317932\n",
            "compute_gradients\n",
            "Epoch: 647 | disc_loss: 0.8052253723144531 | gen_loss: 0.14767326414585114\n",
            "compute_gradients\n",
            "Epoch: 648 | disc_loss: 0.7911722660064697 | gen_loss: 0.16584981977939606\n",
            "compute_gradients\n",
            "Epoch: 649 | disc_loss: 0.9019432067871094 | gen_loss: 0.13420571386814117\n",
            "compute_gradients\n",
            "Epoch: 650 | disc_loss: 0.8339575529098511 | gen_loss: 0.14729060232639313\n",
            "compute_gradients\n",
            "Epoch: 651 | disc_loss: 0.7792339324951172 | gen_loss: 0.16983041167259216\n",
            "compute_gradients\n",
            "Epoch: 652 | disc_loss: 0.7740155458450317 | gen_loss: 0.15402594208717346\n",
            "compute_gradients\n",
            "Epoch: 653 | disc_loss: 0.8423426747322083 | gen_loss: 0.15522529184818268\n",
            "compute_gradients\n",
            "Epoch: 654 | disc_loss: 0.7048841714859009 | gen_loss: 0.16710717976093292\n",
            "compute_gradients\n",
            "Epoch: 655 | disc_loss: 0.7931084632873535 | gen_loss: 0.1594519466161728\n",
            "compute_gradients\n",
            "Epoch: 656 | disc_loss: 0.8858774900436401 | gen_loss: 0.16890782117843628\n",
            "compute_gradients\n",
            "Epoch: 657 | disc_loss: 0.8551408052444458 | gen_loss: 0.15304477512836456\n",
            "compute_gradients\n",
            "Epoch: 658 | disc_loss: 0.8298531770706177 | gen_loss: 0.15254150331020355\n",
            "compute_gradients\n",
            "Epoch: 659 | disc_loss: 0.8192727565765381 | gen_loss: 0.15701860189437866\n",
            "compute_gradients\n",
            "Epoch: 660 | disc_loss: 0.8179290294647217 | gen_loss: 0.16667985916137695\n",
            "compute_gradients\n",
            "Epoch: 661 | disc_loss: 0.8556033372879028 | gen_loss: 0.16213145852088928\n",
            "compute_gradients\n",
            "Epoch: 662 | disc_loss: 0.8648594617843628 | gen_loss: 0.16063445806503296\n",
            "compute_gradients\n",
            "Epoch: 663 | disc_loss: 0.7322434186935425 | gen_loss: 0.17262814939022064\n",
            "compute_gradients\n",
            "Epoch: 664 | disc_loss: 0.8890702128410339 | gen_loss: 0.1615544557571411\n",
            "compute_gradients\n",
            "Epoch: 665 | disc_loss: 0.9334860444068909 | gen_loss: 0.17146164178848267\n",
            "compute_gradients\n",
            "Epoch: 666 | disc_loss: 0.6404426097869873 | gen_loss: 0.1720232218503952\n",
            "compute_gradients\n",
            "Epoch: 667 | disc_loss: 0.8953590989112854 | gen_loss: 0.15888747572898865\n",
            "compute_gradients\n",
            "Epoch: 668 | disc_loss: 0.9435720443725586 | gen_loss: 0.16273687779903412\n",
            "compute_gradients\n",
            "Epoch: 669 | disc_loss: 0.7914614677429199 | gen_loss: 0.15504732728004456\n",
            "compute_gradients\n",
            "Epoch: 670 | disc_loss: 0.699553370475769 | gen_loss: 0.17148056626319885\n",
            "compute_gradients\n",
            "Epoch: 671 | disc_loss: 0.806995689868927 | gen_loss: 0.1663517951965332\n",
            "compute_gradients\n",
            "Epoch: 672 | disc_loss: 0.9607731103897095 | gen_loss: 0.1494864523410797\n",
            "compute_gradients\n",
            "Epoch: 673 | disc_loss: 0.9511188864707947 | gen_loss: 0.1504301279783249\n",
            "compute_gradients\n",
            "Epoch: 674 | disc_loss: 0.8309158086776733 | gen_loss: 0.17210808396339417\n",
            "compute_gradients\n",
            "Epoch: 675 | disc_loss: 0.8979049324989319 | gen_loss: 0.14776074886322021\n",
            "compute_gradients\n",
            "Epoch: 676 | disc_loss: 0.8663889169692993 | gen_loss: 0.169796422123909\n",
            "compute_gradients\n",
            "Epoch: 677 | disc_loss: 0.9452071785926819 | gen_loss: 0.14194318652153015\n",
            "compute_gradients\n",
            "Epoch: 678 | disc_loss: 0.826371967792511 | gen_loss: 0.16184230148792267\n",
            "compute_gradients\n",
            "Epoch: 679 | disc_loss: 0.876031756401062 | gen_loss: 0.16781623661518097\n",
            "compute_gradients\n",
            "Epoch: 680 | disc_loss: 0.8605323433876038 | gen_loss: 0.14995557069778442\n",
            "compute_gradients\n",
            "Epoch: 681 | disc_loss: 0.9992166757583618 | gen_loss: 0.13939671218395233\n",
            "compute_gradients\n",
            "Epoch: 682 | disc_loss: 0.904026985168457 | gen_loss: 0.16010883450508118\n",
            "compute_gradients\n",
            "Epoch: 683 | disc_loss: 0.8010989427566528 | gen_loss: 0.16554075479507446\n",
            "compute_gradients\n",
            "Epoch: 684 | disc_loss: 0.9404382705688477 | gen_loss: 0.14712348580360413\n",
            "compute_gradients\n",
            "Epoch: 685 | disc_loss: 0.971529483795166 | gen_loss: 0.14962682127952576\n",
            "compute_gradients\n",
            "Epoch: 686 | disc_loss: 0.8427021503448486 | gen_loss: 0.1577976644039154\n",
            "compute_gradients\n",
            "Epoch: 687 | disc_loss: 0.808201014995575 | gen_loss: 0.17527762055397034\n",
            "compute_gradients\n",
            "Epoch: 688 | disc_loss: 0.9824150800704956 | gen_loss: 0.1445470005273819\n",
            "compute_gradients\n",
            "Epoch: 689 | disc_loss: 0.9543225765228271 | gen_loss: 0.14342404901981354\n",
            "compute_gradients\n",
            "Epoch: 690 | disc_loss: 0.7627965211868286 | gen_loss: 0.17703357338905334\n",
            "compute_gradients\n",
            "Epoch: 691 | disc_loss: 1.0681800842285156 | gen_loss: 0.15053321421146393\n",
            "compute_gradients\n",
            "Epoch: 692 | disc_loss: 0.8162733316421509 | gen_loss: 0.16485439240932465\n",
            "compute_gradients\n",
            "Epoch: 693 | disc_loss: 0.7828713059425354 | gen_loss: 0.17546260356903076\n",
            "compute_gradients\n",
            "Epoch: 694 | disc_loss: 1.0359883308410645 | gen_loss: 0.13850685954093933\n",
            "compute_gradients\n",
            "Epoch: 695 | disc_loss: 0.9964484572410583 | gen_loss: 0.14034205675125122\n",
            "compute_gradients\n",
            "Epoch: 696 | disc_loss: 0.8629046678543091 | gen_loss: 0.16341234743595123\n",
            "compute_gradients\n",
            "Epoch: 697 | disc_loss: 1.0698883533477783 | gen_loss: 0.14799228310585022\n",
            "compute_gradients\n",
            "Epoch: 698 | disc_loss: 0.9138328433036804 | gen_loss: 0.16568320989608765\n",
            "compute_gradients\n",
            "Epoch: 699 | disc_loss: 0.8826652765274048 | gen_loss: 0.16734734177589417\n",
            "compute_gradients\n",
            "Epoch: 700 | disc_loss: 1.2213984727859497 | gen_loss: 0.13257184624671936\n",
            "compute_gradients\n",
            "Epoch: 701 | disc_loss: 1.0405093431472778 | gen_loss: 0.1613014042377472\n",
            "compute_gradients\n",
            "Epoch: 702 | disc_loss: 0.9704176783561707 | gen_loss: 0.15873533487319946\n",
            "compute_gradients\n",
            "Epoch: 703 | disc_loss: 0.9546998739242554 | gen_loss: 0.15815454721450806\n",
            "compute_gradients\n",
            "Epoch: 704 | disc_loss: 0.9308276176452637 | gen_loss: 0.14778968691825867\n",
            "compute_gradients\n",
            "Epoch: 705 | disc_loss: 0.8896691203117371 | gen_loss: 0.14291100203990936\n",
            "compute_gradients\n",
            "Epoch: 706 | disc_loss: 0.9570464491844177 | gen_loss: 0.1633864939212799\n",
            "compute_gradients\n",
            "Epoch: 707 | disc_loss: 1.0535578727722168 | gen_loss: 0.1527087390422821\n",
            "compute_gradients\n",
            "Epoch: 708 | disc_loss: 0.9919471740722656 | gen_loss: 0.1604384481906891\n",
            "compute_gradients\n",
            "Epoch: 709 | disc_loss: 0.9182434678077698 | gen_loss: 0.14416342973709106\n",
            "compute_gradients\n",
            "Epoch: 710 | disc_loss: 1.0237913131713867 | gen_loss: 0.13366083800792694\n",
            "compute_gradients\n",
            "Epoch: 711 | disc_loss: 1.0675735473632812 | gen_loss: 0.1331355720758438\n",
            "compute_gradients\n",
            "Epoch: 712 | disc_loss: 0.8686298131942749 | gen_loss: 0.17171841859817505\n",
            "compute_gradients\n",
            "Epoch: 713 | disc_loss: 0.9660758972167969 | gen_loss: 0.15892477333545685\n",
            "compute_gradients\n",
            "Epoch: 714 | disc_loss: 0.8913166522979736 | gen_loss: 0.17200542986392975\n",
            "compute_gradients\n",
            "Epoch: 715 | disc_loss: 1.0322387218475342 | gen_loss: 0.15729384124279022\n",
            "compute_gradients\n",
            "Epoch: 716 | disc_loss: 1.0063042640686035 | gen_loss: 0.14957666397094727\n",
            "compute_gradients\n",
            "Epoch: 717 | disc_loss: 0.9492769241333008 | gen_loss: 0.1502540558576584\n",
            "compute_gradients\n",
            "Epoch: 718 | disc_loss: 0.972636878490448 | gen_loss: 0.15950456261634827\n",
            "compute_gradients\n",
            "Epoch: 719 | disc_loss: 1.022468090057373 | gen_loss: 0.14553095400333405\n",
            "compute_gradients\n",
            "Epoch: 720 | disc_loss: 0.9751096963882446 | gen_loss: 0.1643250584602356\n",
            "compute_gradients\n",
            "Epoch: 721 | disc_loss: 0.9422047734260559 | gen_loss: 0.14268453419208527\n",
            "compute_gradients\n",
            "Epoch: 722 | disc_loss: 0.9315171241760254 | gen_loss: 0.17249150574207306\n",
            "compute_gradients\n",
            "Epoch: 723 | disc_loss: 1.084876298904419 | gen_loss: 0.14638303220272064\n",
            "compute_gradients\n",
            "Epoch: 724 | disc_loss: 0.9573590755462646 | gen_loss: 0.14648260176181793\n",
            "compute_gradients\n",
            "Epoch: 725 | disc_loss: 0.9313638210296631 | gen_loss: 0.17245644330978394\n",
            "compute_gradients\n",
            "Epoch: 726 | disc_loss: 0.9959936738014221 | gen_loss: 0.15470312535762787\n",
            "compute_gradients\n",
            "Epoch: 727 | disc_loss: 1.0369620323181152 | gen_loss: 0.13772380352020264\n",
            "compute_gradients\n",
            "Epoch: 728 | disc_loss: 1.1131675243377686 | gen_loss: 0.15868878364562988\n",
            "compute_gradients\n",
            "Epoch: 729 | disc_loss: 1.0542649030685425 | gen_loss: 0.17084068059921265\n",
            "compute_gradients\n",
            "Epoch: 730 | disc_loss: 0.9920003414154053 | gen_loss: 0.1552169919013977\n",
            "compute_gradients\n",
            "Epoch: 731 | disc_loss: 1.1990196704864502 | gen_loss: 0.1469159573316574\n",
            "compute_gradients\n",
            "Epoch: 732 | disc_loss: 0.9771742820739746 | gen_loss: 0.14735344052314758\n",
            "compute_gradients\n",
            "Epoch: 733 | disc_loss: 1.1933965682983398 | gen_loss: 0.1557752937078476\n",
            "compute_gradients\n",
            "Epoch: 734 | disc_loss: 0.8087043762207031 | gen_loss: 0.1692478358745575\n",
            "compute_gradients\n",
            "Epoch: 735 | disc_loss: 1.1190752983093262 | gen_loss: 0.16142164170742035\n",
            "compute_gradients\n",
            "Epoch: 736 | disc_loss: 1.0618600845336914 | gen_loss: 0.14946740865707397\n",
            "compute_gradients\n",
            "Epoch: 737 | disc_loss: 1.1502115726470947 | gen_loss: 0.1325553059577942\n",
            "compute_gradients\n",
            "Epoch: 738 | disc_loss: 0.9844022989273071 | gen_loss: 0.15247182548046112\n",
            "compute_gradients\n",
            "Epoch: 739 | disc_loss: 1.0031514167785645 | gen_loss: 0.17196817696094513\n",
            "compute_gradients\n",
            "Epoch: 740 | disc_loss: 0.9773216247558594 | gen_loss: 0.15289077162742615\n",
            "compute_gradients\n",
            "Epoch: 741 | disc_loss: 1.181854486465454 | gen_loss: 0.14165188372135162\n",
            "compute_gradients\n",
            "Epoch: 742 | disc_loss: 1.2449856996536255 | gen_loss: 0.14170585572719574\n",
            "compute_gradients\n",
            "Epoch: 743 | disc_loss: 1.21706223487854 | gen_loss: 0.1460386961698532\n",
            "compute_gradients\n",
            "Epoch: 744 | disc_loss: 1.0754776000976562 | gen_loss: 0.16439689695835114\n",
            "compute_gradients\n",
            "Epoch: 745 | disc_loss: 1.0241446495056152 | gen_loss: 0.14736565947532654\n",
            "compute_gradients\n",
            "Epoch: 746 | disc_loss: 1.1529550552368164 | gen_loss: 0.13581730425357819\n",
            "compute_gradients\n",
            "Epoch: 747 | disc_loss: 1.0766797065734863 | gen_loss: 0.15233100950717926\n",
            "compute_gradients\n",
            "Epoch: 748 | disc_loss: 1.1211020946502686 | gen_loss: 0.16784407198429108\n",
            "compute_gradients\n",
            "Epoch: 749 | disc_loss: 1.310937762260437 | gen_loss: 0.15221557021141052\n",
            "compute_gradients\n",
            "Epoch: 750 | disc_loss: 1.4055285453796387 | gen_loss: 0.14833006262779236\n",
            "compute_gradients\n",
            "Epoch: 751 | disc_loss: 1.0852023363113403 | gen_loss: 0.15338976681232452\n",
            "compute_gradients\n",
            "Epoch: 752 | disc_loss: 1.1885885000228882 | gen_loss: 0.14833088219165802\n",
            "compute_gradients\n",
            "Epoch: 753 | disc_loss: 1.0865979194641113 | gen_loss: 0.1724136471748352\n",
            "compute_gradients\n",
            "Epoch: 754 | disc_loss: 1.070603847503662 | gen_loss: 0.15574060380458832\n",
            "compute_gradients\n",
            "Epoch: 755 | disc_loss: 1.1544034481048584 | gen_loss: 0.1606431007385254\n",
            "compute_gradients\n",
            "Epoch: 756 | disc_loss: 1.1100454330444336 | gen_loss: 0.16006805002689362\n",
            "compute_gradients\n",
            "Epoch: 757 | disc_loss: 1.438288688659668 | gen_loss: 0.14136038720607758\n",
            "compute_gradients\n",
            "Epoch: 758 | disc_loss: 1.266679286956787 | gen_loss: 0.14687471091747284\n",
            "compute_gradients\n",
            "Epoch: 759 | disc_loss: 1.2477219104766846 | gen_loss: 0.13037651777267456\n",
            "compute_gradients\n",
            "Epoch: 760 | disc_loss: 1.1206002235412598 | gen_loss: 0.1566031277179718\n",
            "compute_gradients\n",
            "Epoch: 761 | disc_loss: 1.1043168306350708 | gen_loss: 0.15088699758052826\n",
            "compute_gradients\n",
            "Epoch: 762 | disc_loss: 1.0941627025604248 | gen_loss: 0.15131624042987823\n",
            "compute_gradients\n",
            "Epoch: 763 | disc_loss: 1.1422606706619263 | gen_loss: 0.15204206109046936\n",
            "compute_gradients\n",
            "Epoch: 764 | disc_loss: 1.2769290208816528 | gen_loss: 0.14943721890449524\n",
            "compute_gradients\n",
            "Epoch: 765 | disc_loss: 1.2573503255844116 | gen_loss: 0.1501537561416626\n",
            "compute_gradients\n",
            "Epoch: 766 | disc_loss: 1.513340711593628 | gen_loss: 0.1295887976884842\n",
            "compute_gradients\n",
            "Epoch: 767 | disc_loss: 1.171802043914795 | gen_loss: 0.1458793431520462\n",
            "compute_gradients\n",
            "Epoch: 768 | disc_loss: 1.2660564184188843 | gen_loss: 0.14362066984176636\n",
            "compute_gradients\n",
            "Epoch: 769 | disc_loss: 1.3051128387451172 | gen_loss: 0.14280182123184204\n",
            "compute_gradients\n",
            "Epoch: 770 | disc_loss: 1.027116060256958 | gen_loss: 0.1558143049478531\n",
            "compute_gradients\n",
            "Epoch: 771 | disc_loss: 1.250178337097168 | gen_loss: 0.14297188818454742\n",
            "compute_gradients\n",
            "Epoch: 772 | disc_loss: 1.3116356134414673 | gen_loss: 0.1292228102684021\n",
            "compute_gradients\n",
            "Epoch: 773 | disc_loss: 1.0901930332183838 | gen_loss: 0.14664039015769958\n",
            "compute_gradients\n",
            "Epoch: 774 | disc_loss: 1.184564232826233 | gen_loss: 0.13960173726081848\n",
            "compute_gradients\n",
            "Epoch: 775 | disc_loss: 1.2736985683441162 | gen_loss: 0.1403658092021942\n",
            "compute_gradients\n",
            "Epoch: 776 | disc_loss: 1.0823308229446411 | gen_loss: 0.1367095410823822\n",
            "compute_gradients\n",
            "Epoch: 777 | disc_loss: 1.397275447845459 | gen_loss: 0.13322517275810242\n",
            "compute_gradients\n",
            "Epoch: 778 | disc_loss: 1.3345091342926025 | gen_loss: 0.13676564395427704\n",
            "compute_gradients\n",
            "Epoch: 779 | disc_loss: 1.4073820114135742 | gen_loss: 0.14918971061706543\n",
            "compute_gradients\n",
            "Epoch: 780 | disc_loss: 1.0682032108306885 | gen_loss: 0.14183177053928375\n",
            "compute_gradients\n",
            "Epoch: 781 | disc_loss: 1.1908822059631348 | gen_loss: 0.13843032717704773\n",
            "compute_gradients\n",
            "Epoch: 782 | disc_loss: 1.278534173965454 | gen_loss: 0.14929282665252686\n",
            "compute_gradients\n",
            "Epoch: 783 | disc_loss: 1.2473944425582886 | gen_loss: 0.1517384648323059\n",
            "compute_gradients\n",
            "Epoch: 784 | disc_loss: 1.408395767211914 | gen_loss: 0.14414797723293304\n",
            "compute_gradients\n",
            "Epoch: 785 | disc_loss: 1.236255407333374 | gen_loss: 0.1527443677186966\n",
            "compute_gradients\n",
            "Epoch: 786 | disc_loss: 1.3621782064437866 | gen_loss: 0.13809913396835327\n",
            "compute_gradients\n",
            "Epoch: 787 | disc_loss: 1.3332412242889404 | gen_loss: 0.14530602097511292\n",
            "compute_gradients\n",
            "Epoch: 788 | disc_loss: 1.2213125228881836 | gen_loss: 0.15476742386817932\n",
            "compute_gradients\n",
            "Epoch: 789 | disc_loss: 1.0532639026641846 | gen_loss: 0.15483610332012177\n",
            "compute_gradients\n",
            "Epoch: 790 | disc_loss: 1.4571608304977417 | gen_loss: 0.1340329349040985\n",
            "compute_gradients\n",
            "Epoch: 791 | disc_loss: 1.4009654521942139 | gen_loss: 0.12776058912277222\n",
            "compute_gradients\n",
            "Epoch: 792 | disc_loss: 1.289624571800232 | gen_loss: 0.12938882410526276\n",
            "compute_gradients\n",
            "Epoch: 793 | disc_loss: 1.3447794914245605 | gen_loss: 0.1349783092737198\n",
            "compute_gradients\n",
            "Epoch: 794 | disc_loss: 1.4294297695159912 | gen_loss: 0.14421948790550232\n",
            "compute_gradients\n",
            "Epoch: 795 | disc_loss: 1.2780697345733643 | gen_loss: 0.14538432657718658\n",
            "compute_gradients\n",
            "Epoch: 796 | disc_loss: 1.5023612976074219 | gen_loss: 0.1300262212753296\n",
            "compute_gradients\n",
            "Epoch: 797 | disc_loss: 1.3176673650741577 | gen_loss: 0.14202280342578888\n",
            "compute_gradients\n",
            "Epoch: 798 | disc_loss: 1.316359043121338 | gen_loss: 0.1332308053970337\n",
            "compute_gradients\n",
            "Epoch: 799 | disc_loss: 1.1566097736358643 | gen_loss: 0.1395001858472824\n",
            "compute_gradients\n",
            "Epoch: 800 | disc_loss: 1.5477027893066406 | gen_loss: 0.13606764376163483\n",
            "compute_gradients\n",
            "Epoch: 801 | disc_loss: 1.3252668380737305 | gen_loss: 0.14079181849956512\n",
            "compute_gradients\n",
            "Epoch: 802 | disc_loss: 1.564049482345581 | gen_loss: 0.13086648285388947\n",
            "compute_gradients\n",
            "Epoch: 803 | disc_loss: 1.4636447429656982 | gen_loss: 0.12555037438869476\n",
            "compute_gradients\n",
            "Epoch: 804 | disc_loss: 1.242738962173462 | gen_loss: 0.1473187506198883\n",
            "compute_gradients\n",
            "Epoch: 805 | disc_loss: 1.492621660232544 | gen_loss: 0.12818942964076996\n",
            "compute_gradients\n",
            "Epoch: 806 | disc_loss: 1.5131964683532715 | gen_loss: 0.13978548347949982\n",
            "compute_gradients\n",
            "Epoch: 807 | disc_loss: 1.6562738418579102 | gen_loss: 0.1189127191901207\n",
            "compute_gradients\n",
            "Epoch: 808 | disc_loss: 1.2928857803344727 | gen_loss: 0.146102175116539\n",
            "compute_gradients\n",
            "Epoch: 809 | disc_loss: 1.3025702238082886 | gen_loss: 0.1351873129606247\n",
            "compute_gradients\n",
            "Epoch: 810 | disc_loss: 1.3483537435531616 | gen_loss: 0.1366458535194397\n",
            "compute_gradients\n",
            "Epoch: 811 | disc_loss: 1.1419289112091064 | gen_loss: 0.1550757735967636\n",
            "compute_gradients\n",
            "Epoch: 812 | disc_loss: 1.4678999185562134 | gen_loss: 0.1422787308692932\n",
            "compute_gradients\n",
            "Epoch: 813 | disc_loss: 1.4880650043487549 | gen_loss: 0.1349472552537918\n",
            "compute_gradients\n",
            "Epoch: 814 | disc_loss: 1.4459397792816162 | gen_loss: 0.14686569571495056\n",
            "compute_gradients\n",
            "Epoch: 815 | disc_loss: 1.4843835830688477 | gen_loss: 0.1264634132385254\n",
            "compute_gradients\n",
            "Epoch: 816 | disc_loss: 1.3051798343658447 | gen_loss: 0.12596650421619415\n",
            "compute_gradients\n",
            "Epoch: 817 | disc_loss: 1.608400583267212 | gen_loss: 0.1282057911157608\n",
            "compute_gradients\n",
            "Epoch: 818 | disc_loss: 1.58876633644104 | gen_loss: 0.1468430608510971\n",
            "compute_gradients\n",
            "Epoch: 819 | disc_loss: 1.5924065113067627 | gen_loss: 0.12748590111732483\n",
            "compute_gradients\n",
            "Epoch: 820 | disc_loss: 1.5224213600158691 | gen_loss: 0.11985810101032257\n",
            "compute_gradients\n",
            "Epoch: 821 | disc_loss: 1.263909101486206 | gen_loss: 0.14619745314121246\n",
            "compute_gradients\n",
            "Epoch: 822 | disc_loss: 1.4570345878601074 | gen_loss: 0.14667962491512299\n",
            "compute_gradients\n",
            "Epoch: 823 | disc_loss: 1.3181273937225342 | gen_loss: 0.13690483570098877\n",
            "compute_gradients\n",
            "Epoch: 824 | disc_loss: 1.4843047857284546 | gen_loss: 0.12505319714546204\n",
            "compute_gradients\n",
            "Epoch: 825 | disc_loss: 1.7110810279846191 | gen_loss: 0.11934977769851685\n",
            "compute_gradients\n",
            "Epoch: 826 | disc_loss: 1.280239462852478 | gen_loss: 0.1318213790655136\n",
            "compute_gradients\n",
            "Epoch: 827 | disc_loss: 1.4400687217712402 | gen_loss: 0.12885509431362152\n",
            "compute_gradients\n",
            "Epoch: 828 | disc_loss: 1.5516078472137451 | gen_loss: 0.12195659428834915\n",
            "compute_gradients\n",
            "Epoch: 829 | disc_loss: 1.2561460733413696 | gen_loss: 0.14379310607910156\n",
            "compute_gradients\n",
            "Epoch: 830 | disc_loss: 1.6664668321609497 | gen_loss: 0.13684695959091187\n",
            "compute_gradients\n",
            "Epoch: 831 | disc_loss: 1.3670222759246826 | gen_loss: 0.12396535277366638\n",
            "compute_gradients\n",
            "Epoch: 832 | disc_loss: 1.427337646484375 | gen_loss: 0.13725923001766205\n",
            "compute_gradients\n",
            "Epoch: 833 | disc_loss: 1.473150372505188 | gen_loss: 0.13693684339523315\n",
            "compute_gradients\n",
            "Epoch: 834 | disc_loss: 1.5953468084335327 | gen_loss: 0.12684302031993866\n",
            "compute_gradients\n",
            "Epoch: 835 | disc_loss: 1.6413640975952148 | gen_loss: 0.11956395953893661\n",
            "compute_gradients\n",
            "Epoch: 836 | disc_loss: 1.1777939796447754 | gen_loss: 0.15160974860191345\n",
            "compute_gradients\n",
            "Epoch: 837 | disc_loss: 1.523305058479309 | gen_loss: 0.14070600271224976\n",
            "compute_gradients\n",
            "Epoch: 838 | disc_loss: 1.382777214050293 | gen_loss: 0.14227832853794098\n",
            "compute_gradients\n",
            "Epoch: 839 | disc_loss: 1.483603835105896 | gen_loss: 0.12779684364795685\n",
            "compute_gradients\n",
            "Epoch: 840 | disc_loss: 1.5619287490844727 | gen_loss: 0.1367248296737671\n",
            "compute_gradients\n",
            "Epoch: 841 | disc_loss: 1.2636452913284302 | gen_loss: 0.13542389869689941\n",
            "compute_gradients\n",
            "Epoch: 842 | disc_loss: 1.7216306924819946 | gen_loss: 0.13318969309329987\n",
            "compute_gradients\n",
            "Epoch: 843 | disc_loss: 1.4400949478149414 | gen_loss: 0.12861238420009613\n",
            "compute_gradients\n",
            "Epoch: 844 | disc_loss: 1.075282096862793 | gen_loss: 0.13391397893428802\n",
            "compute_gradients\n",
            "Epoch: 845 | disc_loss: 1.7113959789276123 | gen_loss: 0.12185842543840408\n",
            "compute_gradients\n",
            "Epoch: 846 | disc_loss: 1.5349055528640747 | gen_loss: 0.13780301809310913\n",
            "compute_gradients\n",
            "Epoch: 847 | disc_loss: 1.708082914352417 | gen_loss: 0.1384875327348709\n",
            "compute_gradients\n",
            "Epoch: 848 | disc_loss: 1.4291577339172363 | gen_loss: 0.11206422746181488\n",
            "compute_gradients\n",
            "Epoch: 849 | disc_loss: 1.5656132698059082 | gen_loss: 0.13019663095474243\n",
            "compute_gradients\n",
            "Epoch: 850 | disc_loss: 1.4195148944854736 | gen_loss: 0.12174608558416367\n",
            "compute_gradients\n",
            "Epoch: 851 | disc_loss: 1.5387895107269287 | gen_loss: 0.1287417858839035\n",
            "compute_gradients\n",
            "Epoch: 852 | disc_loss: 1.2633384466171265 | gen_loss: 0.12948226928710938\n",
            "compute_gradients\n",
            "Epoch: 853 | disc_loss: 1.6046061515808105 | gen_loss: 0.11752521246671677\n",
            "compute_gradients\n",
            "Epoch: 854 | disc_loss: 1.6421910524368286 | gen_loss: 0.11607957631349564\n",
            "compute_gradients\n",
            "Epoch: 855 | disc_loss: 1.2731006145477295 | gen_loss: 0.1353583037853241\n",
            "compute_gradients\n",
            "Epoch: 856 | disc_loss: 1.717033863067627 | gen_loss: 0.1351333111524582\n",
            "compute_gradients\n",
            "Epoch: 857 | disc_loss: 1.5093104839324951 | gen_loss: 0.11415998637676239\n",
            "compute_gradients\n",
            "Epoch: 858 | disc_loss: 1.6870043277740479 | gen_loss: 0.12130049616098404\n",
            "compute_gradients\n",
            "Epoch: 859 | disc_loss: 1.4890832901000977 | gen_loss: 0.142777681350708\n",
            "compute_gradients\n",
            "Epoch: 860 | disc_loss: 1.5024136304855347 | gen_loss: 0.1493314504623413\n",
            "compute_gradients\n",
            "Epoch: 861 | disc_loss: 1.4745103120803833 | gen_loss: 0.139757439494133\n",
            "compute_gradients\n",
            "Epoch: 862 | disc_loss: 1.5994131565093994 | gen_loss: 0.10931781679391861\n",
            "compute_gradients\n",
            "Epoch: 863 | disc_loss: 1.4996438026428223 | gen_loss: 0.13124604523181915\n",
            "compute_gradients\n",
            "Epoch: 864 | disc_loss: 1.4439212083816528 | gen_loss: 0.13315744698047638\n",
            "compute_gradients\n",
            "Epoch: 865 | disc_loss: 1.6665525436401367 | gen_loss: 0.12815751135349274\n",
            "compute_gradients\n",
            "Epoch: 866 | disc_loss: 1.6466703414916992 | gen_loss: 0.12489534169435501\n",
            "compute_gradients\n",
            "Epoch: 867 | disc_loss: 1.4536750316619873 | gen_loss: 0.12251584231853485\n",
            "compute_gradients\n",
            "Epoch: 868 | disc_loss: 1.4749308824539185 | gen_loss: 0.11913824826478958\n",
            "compute_gradients\n",
            "Epoch: 869 | disc_loss: 1.7028846740722656 | gen_loss: 0.120445616543293\n",
            "compute_gradients\n",
            "Epoch: 870 | disc_loss: 1.7150648832321167 | gen_loss: 0.11713633686304092\n",
            "compute_gradients\n",
            "Epoch: 871 | disc_loss: 1.6127078533172607 | gen_loss: 0.12456226348876953\n",
            "compute_gradients\n",
            "Epoch: 872 | disc_loss: 1.4885250329971313 | gen_loss: 0.1290341317653656\n",
            "compute_gradients\n",
            "Epoch: 873 | disc_loss: 1.7766891717910767 | gen_loss: 0.12912683188915253\n",
            "compute_gradients\n",
            "Epoch: 874 | disc_loss: 1.553889513015747 | gen_loss: 0.1230873093008995\n",
            "compute_gradients\n",
            "Epoch: 875 | disc_loss: 1.6567769050598145 | gen_loss: 0.1238318607211113\n",
            "compute_gradients\n",
            "Epoch: 876 | disc_loss: 1.6219701766967773 | gen_loss: 0.12811750173568726\n",
            "compute_gradients\n",
            "Epoch: 877 | disc_loss: 1.561428189277649 | gen_loss: 0.13160234689712524\n",
            "compute_gradients\n",
            "Epoch: 878 | disc_loss: 1.6164132356643677 | gen_loss: 0.11957281827926636\n",
            "compute_gradients\n",
            "Epoch: 879 | disc_loss: 1.8799378871917725 | gen_loss: 0.12045489996671677\n",
            "compute_gradients\n",
            "Epoch: 880 | disc_loss: 1.7864336967468262 | gen_loss: 0.12343742698431015\n",
            "compute_gradients\n",
            "Epoch: 881 | disc_loss: 1.5605614185333252 | gen_loss: 0.11332345753908157\n",
            "compute_gradients\n",
            "Epoch: 882 | disc_loss: 1.8343756198883057 | gen_loss: 0.12297295778989792\n",
            "compute_gradients\n",
            "Epoch: 883 | disc_loss: 1.7071423530578613 | gen_loss: 0.1154259741306305\n",
            "compute_gradients\n",
            "Epoch: 884 | disc_loss: 1.8967843055725098 | gen_loss: 0.11653975397348404\n",
            "compute_gradients\n",
            "Epoch: 885 | disc_loss: 1.8274861574172974 | gen_loss: 0.11490769684314728\n",
            "compute_gradients\n",
            "Epoch: 886 | disc_loss: 1.6140269041061401 | gen_loss: 0.13403671979904175\n",
            "compute_gradients\n",
            "Epoch: 887 | disc_loss: 1.88870108127594 | gen_loss: 0.10857779532670975\n",
            "compute_gradients\n",
            "Epoch: 888 | disc_loss: 1.59812331199646 | gen_loss: 0.12242521345615387\n",
            "compute_gradients\n",
            "Epoch: 889 | disc_loss: 1.8944249153137207 | gen_loss: 0.11100591719150543\n",
            "compute_gradients\n",
            "Epoch: 890 | disc_loss: 1.9246220588684082 | gen_loss: 0.11152472347021103\n",
            "compute_gradients\n",
            "Epoch: 891 | disc_loss: 1.7233529090881348 | gen_loss: 0.11300739645957947\n",
            "compute_gradients\n",
            "Epoch: 892 | disc_loss: 1.8589547872543335 | gen_loss: 0.09992937743663788\n",
            "compute_gradients\n",
            "Epoch: 893 | disc_loss: 1.8324389457702637 | gen_loss: 0.12428831309080124\n",
            "compute_gradients\n",
            "Epoch: 894 | disc_loss: 1.8248579502105713 | gen_loss: 0.1216178610920906\n",
            "compute_gradients\n",
            "Epoch: 895 | disc_loss: 1.7233707904815674 | gen_loss: 0.11555536091327667\n",
            "compute_gradients\n",
            "Epoch: 896 | disc_loss: 1.6679363250732422 | gen_loss: 0.11955533176660538\n",
            "compute_gradients\n",
            "Epoch: 897 | disc_loss: 1.9011882543563843 | gen_loss: 0.11697059869766235\n",
            "compute_gradients\n",
            "Epoch: 898 | disc_loss: 1.7164785861968994 | gen_loss: 0.119727723300457\n",
            "compute_gradients\n",
            "Epoch: 899 | disc_loss: 1.9574214220046997 | gen_loss: 0.11154051870107651\n",
            "compute_gradients\n",
            "Epoch: 900 | disc_loss: 1.8066166639328003 | gen_loss: 0.10908306390047073\n",
            "compute_gradients\n",
            "Epoch: 901 | disc_loss: 1.7131119966506958 | gen_loss: 0.11130346357822418\n",
            "compute_gradients\n",
            "Epoch: 902 | disc_loss: 1.8139088153839111 | gen_loss: 0.11971689015626907\n",
            "compute_gradients\n",
            "Epoch: 903 | disc_loss: 1.7702021598815918 | gen_loss: 0.12130223214626312\n",
            "compute_gradients\n",
            "Epoch: 904 | disc_loss: 1.7804464101791382 | gen_loss: 0.13173705339431763\n",
            "compute_gradients\n",
            "Epoch: 905 | disc_loss: 1.5690710544586182 | gen_loss: 0.13084784150123596\n",
            "compute_gradients\n",
            "Epoch: 906 | disc_loss: 1.8613662719726562 | gen_loss: 0.10447148978710175\n",
            "compute_gradients\n",
            "Epoch: 907 | disc_loss: 1.7766273021697998 | gen_loss: 0.11598730832338333\n",
            "compute_gradients\n",
            "Epoch: 908 | disc_loss: 1.9488937854766846 | gen_loss: 0.10812034457921982\n",
            "compute_gradients\n",
            "Epoch: 909 | disc_loss: 1.6568455696105957 | gen_loss: 0.11430571228265762\n",
            "compute_gradients\n",
            "Epoch: 910 | disc_loss: 1.8601391315460205 | gen_loss: 0.11577816307544708\n",
            "compute_gradients\n",
            "Epoch: 911 | disc_loss: 1.732614517211914 | gen_loss: 0.11974252015352249\n",
            "compute_gradients\n",
            "Epoch: 912 | disc_loss: 2.061546802520752 | gen_loss: 0.1070539578795433\n",
            "compute_gradients\n",
            "Epoch: 913 | disc_loss: 1.901258111000061 | gen_loss: 0.1238696277141571\n",
            "compute_gradients\n",
            "Epoch: 914 | disc_loss: 1.7415072917938232 | gen_loss: 0.11892424523830414\n",
            "compute_gradients\n",
            "Epoch: 915 | disc_loss: 1.932602882385254 | gen_loss: 0.11698424071073532\n",
            "compute_gradients\n",
            "Epoch: 916 | disc_loss: 1.8882231712341309 | gen_loss: 0.11741601675748825\n",
            "compute_gradients\n",
            "Epoch: 917 | disc_loss: 1.7407708168029785 | gen_loss: 0.11433222889900208\n",
            "compute_gradients\n",
            "Epoch: 918 | disc_loss: 1.924597144126892 | gen_loss: 0.09983985126018524\n",
            "compute_gradients\n",
            "Epoch: 919 | disc_loss: 2.0266776084899902 | gen_loss: 0.11783000081777573\n",
            "compute_gradients\n",
            "Epoch: 920 | disc_loss: 1.76620352268219 | gen_loss: 0.10610301792621613\n",
            "compute_gradients\n",
            "Epoch: 921 | disc_loss: 1.8661320209503174 | gen_loss: 0.11675973236560822\n",
            "compute_gradients\n",
            "Epoch: 922 | disc_loss: 1.8204007148742676 | gen_loss: 0.12032337486743927\n",
            "compute_gradients\n",
            "Epoch: 923 | disc_loss: 2.2639684677124023 | gen_loss: 0.1001674085855484\n",
            "compute_gradients\n",
            "Epoch: 924 | disc_loss: 1.8367161750793457 | gen_loss: 0.11457217484712601\n",
            "compute_gradients\n",
            "Epoch: 925 | disc_loss: 1.907069444656372 | gen_loss: 0.10765333473682404\n",
            "compute_gradients\n",
            "Epoch: 926 | disc_loss: 2.278158664703369 | gen_loss: 0.11188908666372299\n",
            "compute_gradients\n",
            "Epoch: 927 | disc_loss: 1.7982187271118164 | gen_loss: 0.10846418142318726\n",
            "compute_gradients\n",
            "Epoch: 928 | disc_loss: 1.582128643989563 | gen_loss: 0.1297290027141571\n",
            "compute_gradients\n",
            "Epoch: 929 | disc_loss: 1.8475292921066284 | gen_loss: 0.11628395318984985\n",
            "compute_gradients\n",
            "Epoch: 930 | disc_loss: 1.8864682912826538 | gen_loss: 0.1160341277718544\n",
            "compute_gradients\n",
            "Epoch: 931 | disc_loss: 1.92018723487854 | gen_loss: 0.11220162361860275\n",
            "compute_gradients\n",
            "Epoch: 932 | disc_loss: 2.3046762943267822 | gen_loss: 0.10366321355104446\n",
            "compute_gradients\n",
            "Epoch: 933 | disc_loss: 2.070150375366211 | gen_loss: 0.09979096055030823\n",
            "compute_gradients\n",
            "Epoch: 934 | disc_loss: 1.7758662700653076 | gen_loss: 0.11656863987445831\n",
            "compute_gradients\n",
            "Epoch: 935 | disc_loss: 1.9095056056976318 | gen_loss: 0.11987975984811783\n",
            "compute_gradients\n",
            "Epoch: 936 | disc_loss: 1.9326519966125488 | gen_loss: 0.10620630532503128\n",
            "compute_gradients\n",
            "Epoch: 937 | disc_loss: 1.8850840330123901 | gen_loss: 0.09173335880041122\n",
            "compute_gradients\n",
            "Epoch: 938 | disc_loss: 2.1701619625091553 | gen_loss: 0.1081918254494667\n",
            "compute_gradients\n",
            "Epoch: 939 | disc_loss: 2.131033182144165 | gen_loss: 0.11111950129270554\n",
            "compute_gradients\n",
            "Epoch: 940 | disc_loss: 1.9622981548309326 | gen_loss: 0.10870619863271713\n",
            "compute_gradients\n",
            "Epoch: 941 | disc_loss: 1.7599273920059204 | gen_loss: 0.1215740516781807\n",
            "compute_gradients\n",
            "Epoch: 942 | disc_loss: 2.1470556259155273 | gen_loss: 0.10392284393310547\n",
            "compute_gradients\n",
            "Epoch: 943 | disc_loss: 1.9330015182495117 | gen_loss: 0.10319443047046661\n",
            "compute_gradients\n",
            "Epoch: 944 | disc_loss: 2.089139461517334 | gen_loss: 0.11076358705759048\n",
            "compute_gradients\n",
            "Epoch: 945 | disc_loss: 2.1754844188690186 | gen_loss: 0.10213486850261688\n",
            "compute_gradients\n",
            "Epoch: 946 | disc_loss: 1.9450502395629883 | gen_loss: 0.1229536160826683\n",
            "compute_gradients\n",
            "Epoch: 947 | disc_loss: 1.9792897701263428 | gen_loss: 0.10078192502260208\n",
            "compute_gradients\n",
            "Epoch: 948 | disc_loss: 1.6601691246032715 | gen_loss: 0.11994844675064087\n",
            "compute_gradients\n",
            "Epoch: 949 | disc_loss: 1.9302293062210083 | gen_loss: 0.11324772238731384\n",
            "compute_gradients\n",
            "Epoch: 950 | disc_loss: 1.888214111328125 | gen_loss: 0.11077655851840973\n",
            "compute_gradients\n",
            "Epoch: 951 | disc_loss: 2.020282030105591 | gen_loss: 0.1043434739112854\n",
            "compute_gradients\n",
            "Epoch: 952 | disc_loss: 1.9635982513427734 | gen_loss: 0.10620511323213577\n",
            "compute_gradients\n",
            "Epoch: 953 | disc_loss: 2.061415433883667 | gen_loss: 0.11577603965997696\n",
            "compute_gradients\n",
            "Epoch: 954 | disc_loss: 1.923661470413208 | gen_loss: 0.12514694035053253\n",
            "compute_gradients\n",
            "Epoch: 955 | disc_loss: 2.282897472381592 | gen_loss: 0.09977470338344574\n",
            "compute_gradients\n",
            "Epoch: 956 | disc_loss: 1.841255784034729 | gen_loss: 0.11505188047885895\n",
            "compute_gradients\n",
            "Epoch: 957 | disc_loss: 2.1892805099487305 | gen_loss: 0.10559340566396713\n",
            "compute_gradients\n",
            "Epoch: 958 | disc_loss: 2.3367459774017334 | gen_loss: 0.11016301810741425\n",
            "compute_gradients\n",
            "Epoch: 959 | disc_loss: 2.263420820236206 | gen_loss: 0.11510007083415985\n",
            "compute_gradients\n",
            "Epoch: 960 | disc_loss: 2.324803352355957 | gen_loss: 0.11031358689069748\n",
            "compute_gradients\n",
            "Epoch: 961 | disc_loss: 1.9725182056427002 | gen_loss: 0.1087975725531578\n",
            "compute_gradients\n",
            "Epoch: 962 | disc_loss: 2.3928375244140625 | gen_loss: 0.09186641871929169\n",
            "compute_gradients\n",
            "Epoch: 963 | disc_loss: 2.2283570766448975 | gen_loss: 0.11455574631690979\n",
            "compute_gradients\n",
            "Epoch: 964 | disc_loss: 1.9374308586120605 | gen_loss: 0.11591709405183792\n",
            "compute_gradients\n",
            "Epoch: 965 | disc_loss: 2.376408815383911 | gen_loss: 0.09418893605470657\n",
            "compute_gradients\n",
            "Epoch: 966 | disc_loss: 2.0600640773773193 | gen_loss: 0.09804469347000122\n",
            "compute_gradients\n",
            "Epoch: 967 | disc_loss: 2.067688465118408 | gen_loss: 0.1017221137881279\n",
            "compute_gradients\n",
            "Epoch: 968 | disc_loss: 2.0277674198150635 | gen_loss: 0.0988796055316925\n",
            "compute_gradients\n",
            "Epoch: 969 | disc_loss: 1.8524250984191895 | gen_loss: 0.10422752052545547\n",
            "compute_gradients\n",
            "Epoch: 970 | disc_loss: 2.4282760620117188 | gen_loss: 0.10786907374858856\n",
            "compute_gradients\n",
            "Epoch: 971 | disc_loss: 2.3848817348480225 | gen_loss: 0.10867207497358322\n",
            "compute_gradients\n",
            "Epoch: 972 | disc_loss: 2.3117642402648926 | gen_loss: 0.10943065583705902\n",
            "compute_gradients\n",
            "Epoch: 973 | disc_loss: 2.217238426208496 | gen_loss: 0.11140797287225723\n",
            "compute_gradients\n",
            "Epoch: 974 | disc_loss: 2.143054723739624 | gen_loss: 0.1125309094786644\n",
            "compute_gradients\n",
            "Epoch: 975 | disc_loss: 2.223564624786377 | gen_loss: 0.11327989399433136\n",
            "compute_gradients\n",
            "Epoch: 976 | disc_loss: 2.207062005996704 | gen_loss: 0.1176675409078598\n",
            "compute_gradients\n",
            "Epoch: 977 | disc_loss: 2.0232372283935547 | gen_loss: 0.10267749428749084\n",
            "compute_gradients\n",
            "Epoch: 978 | disc_loss: 1.9254522323608398 | gen_loss: 0.11083754897117615\n",
            "compute_gradients\n",
            "Epoch: 979 | disc_loss: 1.7535643577575684 | gen_loss: 0.12131136655807495\n",
            "compute_gradients\n",
            "Epoch: 980 | disc_loss: 2.361349105834961 | gen_loss: 0.10035619884729385\n",
            "compute_gradients\n",
            "Epoch: 981 | disc_loss: 2.3861329555511475 | gen_loss: 0.10374607145786285\n",
            "compute_gradients\n",
            "Epoch: 982 | disc_loss: 2.211331844329834 | gen_loss: 0.1043873205780983\n",
            "compute_gradients\n",
            "Epoch: 983 | disc_loss: 2.3356246948242188 | gen_loss: 0.09912406653165817\n",
            "compute_gradients\n",
            "Epoch: 984 | disc_loss: 2.3030452728271484 | gen_loss: 0.10953865945339203\n",
            "compute_gradients\n",
            "Epoch: 985 | disc_loss: 2.2468488216400146 | gen_loss: 0.11319876462221146\n",
            "compute_gradients\n",
            "Epoch: 986 | disc_loss: 2.1246931552886963 | gen_loss: 0.11114160716533661\n",
            "compute_gradients\n",
            "Epoch: 987 | disc_loss: 2.223064422607422 | gen_loss: 0.10014959424734116\n",
            "compute_gradients\n",
            "Epoch: 988 | disc_loss: 2.2186267375946045 | gen_loss: 0.11088264733552933\n",
            "compute_gradients\n",
            "Epoch: 989 | disc_loss: 2.1835076808929443 | gen_loss: 0.10386449843645096\n",
            "compute_gradients\n",
            "Epoch: 990 | disc_loss: 2.289417266845703 | gen_loss: 0.10720911622047424\n",
            "compute_gradients\n",
            "Epoch: 991 | disc_loss: 2.1492505073547363 | gen_loss: 0.1066967025399208\n",
            "compute_gradients\n",
            "Epoch: 992 | disc_loss: 2.5169472694396973 | gen_loss: 0.09900366514921188\n",
            "compute_gradients\n",
            "Epoch: 993 | disc_loss: 2.2399353981018066 | gen_loss: 0.10492607206106186\n",
            "compute_gradients\n",
            "Epoch: 994 | disc_loss: 2.4846408367156982 | gen_loss: 0.10306558758020401\n",
            "compute_gradients\n",
            "Epoch: 995 | disc_loss: 2.3219711780548096 | gen_loss: 0.0929759293794632\n",
            "compute_gradients\n",
            "Epoch: 996 | disc_loss: 2.327683210372925 | gen_loss: 0.10361161082983017\n",
            "compute_gradients\n",
            "Epoch: 997 | disc_loss: 2.008546829223633 | gen_loss: 0.09775710850954056\n",
            "compute_gradients\n",
            "Epoch: 998 | disc_loss: 2.2948710918426514 | gen_loss: 0.09968478232622147\n",
            "compute_gradients\n",
            "Epoch: 999 | disc_loss: 2.282830238342285 | gen_loss: 0.09136958420276642\n",
            "compute_gradients\n",
            "Epoch: 1000 | disc_loss: 2.4166433811187744 | gen_loss: 0.09934978187084198\n",
            "compute_gradients\n",
            "Epoch: 1001 | disc_loss: 2.0439248085021973 | gen_loss: 0.10044349730014801\n",
            "compute_gradients\n",
            "Epoch: 1002 | disc_loss: 2.1149046421051025 | gen_loss: 0.09332266449928284\n",
            "compute_gradients\n",
            "Epoch: 1003 | disc_loss: 2.5052778720855713 | gen_loss: 0.09151505678892136\n",
            "compute_gradients\n",
            "Epoch: 1004 | disc_loss: 2.3249759674072266 | gen_loss: 0.1071961373090744\n",
            "compute_gradients\n",
            "Epoch: 1005 | disc_loss: 2.5944690704345703 | gen_loss: 0.10259485244750977\n",
            "compute_gradients\n",
            "Epoch: 1006 | disc_loss: 2.1591482162475586 | gen_loss: 0.10014572739601135\n",
            "compute_gradients\n",
            "Epoch: 1007 | disc_loss: 2.4486873149871826 | gen_loss: 0.10244111716747284\n",
            "compute_gradients\n",
            "Epoch: 1008 | disc_loss: 2.424220085144043 | gen_loss: 0.09583310037851334\n",
            "compute_gradients\n",
            "Epoch: 1009 | disc_loss: 2.249063491821289 | gen_loss: 0.10236344486474991\n",
            "compute_gradients\n",
            "Epoch: 1010 | disc_loss: 2.4961657524108887 | gen_loss: 0.08688678592443466\n",
            "compute_gradients\n",
            "Epoch: 1011 | disc_loss: 2.4380924701690674 | gen_loss: 0.09921043366193771\n",
            "compute_gradients\n",
            "Epoch: 1012 | disc_loss: 2.535083293914795 | gen_loss: 0.09556557983160019\n",
            "compute_gradients\n",
            "Epoch: 1013 | disc_loss: 2.1813924312591553 | gen_loss: 0.09670543670654297\n",
            "compute_gradients\n",
            "Epoch: 1014 | disc_loss: 1.976588487625122 | gen_loss: 0.09739211946725845\n",
            "compute_gradients\n",
            "Epoch: 1015 | disc_loss: 2.30549693107605 | gen_loss: 0.09362027794122696\n",
            "compute_gradients\n",
            "Epoch: 1016 | disc_loss: 2.34904408454895 | gen_loss: 0.09227807074785233\n",
            "compute_gradients\n",
            "Epoch: 1017 | disc_loss: 2.420654296875 | gen_loss: 0.0925840362906456\n",
            "compute_gradients\n",
            "Epoch: 1018 | disc_loss: 2.0611155033111572 | gen_loss: 0.10820964723825455\n",
            "compute_gradients\n",
            "Epoch: 1019 | disc_loss: 2.49094557762146 | gen_loss: 0.10675536841154099\n",
            "compute_gradients\n",
            "Epoch: 1020 | disc_loss: 2.3408026695251465 | gen_loss: 0.105866439640522\n",
            "compute_gradients\n",
            "Epoch: 1021 | disc_loss: 2.7780587673187256 | gen_loss: 0.09068386256694794\n",
            "compute_gradients\n",
            "Epoch: 1022 | disc_loss: 2.211353302001953 | gen_loss: 0.09950003772974014\n",
            "compute_gradients\n",
            "Epoch: 1023 | disc_loss: 2.325435161590576 | gen_loss: 0.09791377931833267\n",
            "compute_gradients\n",
            "Epoch: 1024 | disc_loss: 2.3684208393096924 | gen_loss: 0.08947239816188812\n",
            "compute_gradients\n",
            "Epoch: 1025 | disc_loss: 2.296863079071045 | gen_loss: 0.08741017431020737\n",
            "compute_gradients\n",
            "Epoch: 1026 | disc_loss: 2.369832992553711 | gen_loss: 0.09310027211904526\n",
            "compute_gradients\n",
            "Epoch: 1027 | disc_loss: 2.4311721324920654 | gen_loss: 0.09576834738254547\n",
            "compute_gradients\n",
            "Epoch: 1028 | disc_loss: 2.2261593341827393 | gen_loss: 0.09256952255964279\n",
            "compute_gradients\n",
            "Epoch: 1029 | disc_loss: 2.525841236114502 | gen_loss: 0.09137482941150665\n",
            "compute_gradients\n",
            "Epoch: 1030 | disc_loss: 2.2450859546661377 | gen_loss: 0.1041286438703537\n",
            "compute_gradients\n",
            "Epoch: 1031 | disc_loss: 2.470343828201294 | gen_loss: 0.09528125822544098\n",
            "compute_gradients\n",
            "Epoch: 1032 | disc_loss: 2.2547075748443604 | gen_loss: 0.09473887085914612\n",
            "compute_gradients\n",
            "Epoch: 1033 | disc_loss: 2.695242404937744 | gen_loss: 0.08577565103769302\n",
            "compute_gradients\n",
            "Epoch: 1034 | disc_loss: 2.5123462677001953 | gen_loss: 0.07622915506362915\n",
            "compute_gradients\n",
            "Epoch: 1035 | disc_loss: 2.408632278442383 | gen_loss: 0.09915066510438919\n",
            "compute_gradients\n",
            "Epoch: 1036 | disc_loss: 2.597886085510254 | gen_loss: 0.09234225004911423\n",
            "compute_gradients\n",
            "Epoch: 1037 | disc_loss: 2.534074068069458 | gen_loss: 0.10230064392089844\n",
            "compute_gradients\n",
            "Epoch: 1038 | disc_loss: 2.722874164581299 | gen_loss: 0.08616241812705994\n",
            "compute_gradients\n",
            "Epoch: 1039 | disc_loss: 2.263956308364868 | gen_loss: 0.09604907035827637\n",
            "compute_gradients\n",
            "Epoch: 1040 | disc_loss: 2.6701889038085938 | gen_loss: 0.08699176460504532\n",
            "compute_gradients\n",
            "Epoch: 1041 | disc_loss: 2.4552512168884277 | gen_loss: 0.09013568609952927\n",
            "compute_gradients\n",
            "Epoch: 1042 | disc_loss: 2.5526680946350098 | gen_loss: 0.08295723795890808\n",
            "compute_gradients\n",
            "Epoch: 1043 | disc_loss: 2.6570823192596436 | gen_loss: 0.08953031897544861\n",
            "compute_gradients\n",
            "Epoch: 1044 | disc_loss: 2.569542407989502 | gen_loss: 0.08654952794313431\n",
            "compute_gradients\n",
            "Epoch: 1045 | disc_loss: 2.4846367835998535 | gen_loss: 0.09196250140666962\n",
            "compute_gradients\n",
            "Epoch: 1046 | disc_loss: 2.1399519443511963 | gen_loss: 0.09830281883478165\n",
            "compute_gradients\n",
            "Epoch: 1047 | disc_loss: 2.3753294944763184 | gen_loss: 0.08899369835853577\n",
            "compute_gradients\n",
            "Epoch: 1048 | disc_loss: 2.3046727180480957 | gen_loss: 0.08370056748390198\n",
            "compute_gradients\n",
            "Epoch: 1049 | disc_loss: 2.708038330078125 | gen_loss: 0.08989822119474411\n",
            "compute_gradients\n",
            "Epoch: 1050 | disc_loss: 2.291919231414795 | gen_loss: 0.0927378386259079\n",
            "compute_gradients\n",
            "Epoch: 1051 | disc_loss: 2.5190048217773438 | gen_loss: 0.0842994898557663\n",
            "compute_gradients\n",
            "Epoch: 1052 | disc_loss: 2.502117872238159 | gen_loss: 0.09488306939601898\n",
            "compute_gradients\n",
            "Epoch: 1053 | disc_loss: 2.661334991455078 | gen_loss: 0.07951737195253372\n",
            "compute_gradients\n",
            "Epoch: 1054 | disc_loss: 2.4370651245117188 | gen_loss: 0.09196101129055023\n",
            "compute_gradients\n",
            "Epoch: 1055 | disc_loss: 2.40008544921875 | gen_loss: 0.09238814562559128\n",
            "compute_gradients\n",
            "Epoch: 1056 | disc_loss: 2.3649282455444336 | gen_loss: 0.08811532706022263\n",
            "compute_gradients\n",
            "Epoch: 1057 | disc_loss: 2.140763521194458 | gen_loss: 0.09483134001493454\n",
            "compute_gradients\n",
            "Epoch: 1058 | disc_loss: 2.2000882625579834 | gen_loss: 0.09774384647607803\n",
            "compute_gradients\n",
            "Epoch: 1059 | disc_loss: 2.241044759750366 | gen_loss: 0.09085073322057724\n",
            "compute_gradients\n",
            "Epoch: 1060 | disc_loss: 2.467526435852051 | gen_loss: 0.08625466376543045\n",
            "compute_gradients\n",
            "Epoch: 1061 | disc_loss: 2.1228647232055664 | gen_loss: 0.09362571686506271\n",
            "compute_gradients\n",
            "Epoch: 1062 | disc_loss: 2.485887289047241 | gen_loss: 0.0861988440155983\n",
            "compute_gradients\n",
            "Epoch: 1063 | disc_loss: 2.3003904819488525 | gen_loss: 0.08153820037841797\n",
            "compute_gradients\n",
            "Epoch: 1064 | disc_loss: 2.4618923664093018 | gen_loss: 0.08958402276039124\n",
            "compute_gradients\n",
            "Epoch: 1065 | disc_loss: 2.444389581680298 | gen_loss: 0.08660561591386795\n",
            "compute_gradients\n",
            "Epoch: 1066 | disc_loss: 2.33023738861084 | gen_loss: 0.09121054410934448\n",
            "compute_gradients\n",
            "Epoch: 1067 | disc_loss: 2.63759183883667 | gen_loss: 0.08427823334932327\n",
            "compute_gradients\n",
            "Epoch: 1068 | disc_loss: 2.300967216491699 | gen_loss: 0.0900932028889656\n",
            "compute_gradients\n",
            "Epoch: 1069 | disc_loss: 2.2796058654785156 | gen_loss: 0.08300570398569107\n",
            "compute_gradients\n",
            "Epoch: 1070 | disc_loss: 2.447416305541992 | gen_loss: 0.08576343208551407\n",
            "compute_gradients\n",
            "Epoch: 1071 | disc_loss: 2.7259273529052734 | gen_loss: 0.08156751841306686\n",
            "compute_gradients\n",
            "Epoch: 1072 | disc_loss: 2.7002198696136475 | gen_loss: 0.08939669281244278\n",
            "compute_gradients\n",
            "Epoch: 1073 | disc_loss: 2.1456847190856934 | gen_loss: 0.08921431750059128\n",
            "compute_gradients\n",
            "Epoch: 1074 | disc_loss: 2.199240207672119 | gen_loss: 0.09739243239164352\n",
            "compute_gradients\n",
            "Epoch: 1075 | disc_loss: 3.0678234100341797 | gen_loss: 0.07662764936685562\n",
            "compute_gradients\n",
            "Epoch: 1076 | disc_loss: 2.4946725368499756 | gen_loss: 0.07976926118135452\n",
            "compute_gradients\n",
            "Epoch: 1077 | disc_loss: 2.73376727104187 | gen_loss: 0.08619117736816406\n",
            "compute_gradients\n",
            "Epoch: 1078 | disc_loss: 2.5463292598724365 | gen_loss: 0.08570274710655212\n",
            "compute_gradients\n",
            "Epoch: 1079 | disc_loss: 2.865788698196411 | gen_loss: 0.07376623898744583\n",
            "compute_gradients\n",
            "Epoch: 1080 | disc_loss: 2.5388691425323486 | gen_loss: 0.08477336168289185\n",
            "compute_gradients\n",
            "Epoch: 1081 | disc_loss: 2.707948684692383 | gen_loss: 0.08095277100801468\n",
            "compute_gradients\n",
            "Epoch: 1082 | disc_loss: 2.6353776454925537 | gen_loss: 0.08180367201566696\n",
            "compute_gradients\n",
            "Epoch: 1083 | disc_loss: 2.607492446899414 | gen_loss: 0.08044452965259552\n",
            "compute_gradients\n",
            "Epoch: 1084 | disc_loss: 2.6704893112182617 | gen_loss: 0.07880871742963791\n",
            "compute_gradients\n",
            "Epoch: 1085 | disc_loss: 2.1258723735809326 | gen_loss: 0.08448590338230133\n",
            "compute_gradients\n",
            "Epoch: 1086 | disc_loss: 2.5110080242156982 | gen_loss: 0.08184078335762024\n",
            "compute_gradients\n",
            "Epoch: 1087 | disc_loss: 2.793536901473999 | gen_loss: 0.08098742365837097\n",
            "compute_gradients\n",
            "Epoch: 1088 | disc_loss: 2.4550156593322754 | gen_loss: 0.08388103544712067\n",
            "compute_gradients\n",
            "Epoch: 1089 | disc_loss: 2.4118199348449707 | gen_loss: 0.08022430539131165\n",
            "compute_gradients\n",
            "Epoch: 1090 | disc_loss: 3.015834331512451 | gen_loss: 0.07276477664709091\n",
            "compute_gradients\n",
            "Epoch: 1091 | disc_loss: 2.638949394226074 | gen_loss: 0.06829352676868439\n",
            "compute_gradients\n",
            "Epoch: 1092 | disc_loss: 2.5185256004333496 | gen_loss: 0.07703780382871628\n",
            "compute_gradients\n",
            "Epoch: 1093 | disc_loss: 2.4844255447387695 | gen_loss: 0.09358169883489609\n",
            "compute_gradients\n",
            "Epoch: 1094 | disc_loss: 2.0943245887756348 | gen_loss: 0.08353029191493988\n",
            "compute_gradients\n",
            "Epoch: 1095 | disc_loss: 2.6489956378936768 | gen_loss: 0.07625497877597809\n",
            "compute_gradients\n",
            "Epoch: 1096 | disc_loss: 2.880408763885498 | gen_loss: 0.07341918349266052\n",
            "compute_gradients\n",
            "Epoch: 1097 | disc_loss: 2.765962839126587 | gen_loss: 0.0804269090294838\n",
            "compute_gradients\n",
            "Epoch: 1098 | disc_loss: 2.6464672088623047 | gen_loss: 0.08363375812768936\n",
            "compute_gradients\n",
            "Epoch: 1099 | disc_loss: 2.811617136001587 | gen_loss: 0.08349506556987762\n",
            "compute_gradients\n",
            "Epoch: 1100 | disc_loss: 2.201935291290283 | gen_loss: 0.0847572311758995\n",
            "compute_gradients\n",
            "Epoch: 1101 | disc_loss: 2.202627420425415 | gen_loss: 0.08544932305812836\n",
            "compute_gradients\n",
            "Epoch: 1102 | disc_loss: 2.632930278778076 | gen_loss: 0.0757230743765831\n",
            "compute_gradients\n",
            "Epoch: 1103 | disc_loss: 2.456495523452759 | gen_loss: 0.08617827296257019\n",
            "compute_gradients\n",
            "Epoch: 1104 | disc_loss: 2.587820529937744 | gen_loss: 0.07819479703903198\n",
            "compute_gradients\n",
            "Epoch: 1105 | disc_loss: 2.4230730533599854 | gen_loss: 0.09578962624073029\n",
            "compute_gradients\n",
            "Epoch: 1106 | disc_loss: 2.605847120285034 | gen_loss: 0.07536144554615021\n",
            "compute_gradients\n",
            "Epoch: 1107 | disc_loss: 2.508310556411743 | gen_loss: 0.09190740436315536\n",
            "compute_gradients\n",
            "Epoch: 1108 | disc_loss: 2.3507049083709717 | gen_loss: 0.08266551047563553\n",
            "compute_gradients\n",
            "Epoch: 1109 | disc_loss: 2.8894755840301514 | gen_loss: 0.08250290900468826\n",
            "compute_gradients\n",
            "Epoch: 1110 | disc_loss: 2.462801694869995 | gen_loss: 0.08693353831768036\n",
            "compute_gradients\n",
            "Epoch: 1111 | disc_loss: 2.626777410507202 | gen_loss: 0.08173605054616928\n",
            "compute_gradients\n",
            "Epoch: 1112 | disc_loss: 2.610771656036377 | gen_loss: 0.08355756103992462\n",
            "compute_gradients\n",
            "Epoch: 1113 | disc_loss: 2.8856935501098633 | gen_loss: 0.0700087770819664\n",
            "compute_gradients\n",
            "Epoch: 1114 | disc_loss: 3.0047428607940674 | gen_loss: 0.06784042716026306\n",
            "compute_gradients\n",
            "Epoch: 1115 | disc_loss: 2.728242874145508 | gen_loss: 0.07662581652402878\n",
            "compute_gradients\n",
            "Epoch: 1116 | disc_loss: 2.5897903442382812 | gen_loss: 0.07916805893182755\n",
            "compute_gradients\n",
            "Epoch: 1117 | disc_loss: 2.5955522060394287 | gen_loss: 0.08854976296424866\n",
            "compute_gradients\n",
            "Epoch: 1118 | disc_loss: 2.6215391159057617 | gen_loss: 0.08031485229730606\n",
            "compute_gradients\n",
            "Epoch: 1119 | disc_loss: 2.616638422012329 | gen_loss: 0.07381980866193771\n",
            "compute_gradients\n",
            "Epoch: 1120 | disc_loss: 2.3285608291625977 | gen_loss: 0.07733830064535141\n",
            "compute_gradients\n",
            "Epoch: 1121 | disc_loss: 2.5007143020629883 | gen_loss: 0.07854001224040985\n",
            "compute_gradients\n",
            "Epoch: 1122 | disc_loss: 2.681943893432617 | gen_loss: 0.07718256115913391\n",
            "compute_gradients\n",
            "Epoch: 1123 | disc_loss: 2.457273483276367 | gen_loss: 0.0841297060251236\n",
            "compute_gradients\n",
            "Epoch: 1124 | disc_loss: 2.937016248703003 | gen_loss: 0.08681647479534149\n",
            "compute_gradients\n",
            "Epoch: 1125 | disc_loss: 2.8843820095062256 | gen_loss: 0.07914627343416214\n",
            "compute_gradients\n",
            "Epoch: 1126 | disc_loss: 2.4813220500946045 | gen_loss: 0.07948644459247589\n",
            "compute_gradients\n",
            "Epoch: 1127 | disc_loss: 2.6523733139038086 | gen_loss: 0.07392559200525284\n",
            "compute_gradients\n",
            "Epoch: 1128 | disc_loss: 2.610661506652832 | gen_loss: 0.08116906881332397\n",
            "compute_gradients\n",
            "Epoch: 1129 | disc_loss: 2.281869888305664 | gen_loss: 0.08446760475635529\n",
            "compute_gradients\n",
            "Epoch: 1130 | disc_loss: 2.3151185512542725 | gen_loss: 0.0974743440747261\n",
            "compute_gradients\n",
            "Epoch: 1131 | disc_loss: 2.7174224853515625 | gen_loss: 0.08371341973543167\n",
            "compute_gradients\n",
            "Epoch: 1132 | disc_loss: 2.550028085708618 | gen_loss: 0.08500894904136658\n",
            "compute_gradients\n",
            "Epoch: 1133 | disc_loss: 2.44380521774292 | gen_loss: 0.08148109167814255\n",
            "compute_gradients\n",
            "Epoch: 1134 | disc_loss: 2.652247428894043 | gen_loss: 0.08932986110448837\n",
            "compute_gradients\n",
            "Epoch: 1135 | disc_loss: 2.1628291606903076 | gen_loss: 0.0933598130941391\n",
            "compute_gradients\n",
            "Epoch: 1136 | disc_loss: 2.4818899631500244 | gen_loss: 0.08570807427167892\n",
            "compute_gradients\n",
            "Epoch: 1137 | disc_loss: 2.730654716491699 | gen_loss: 0.07338432967662811\n",
            "compute_gradients\n",
            "Epoch: 1138 | disc_loss: 2.758807420730591 | gen_loss: 0.07759639620780945\n",
            "compute_gradients\n",
            "Epoch: 1139 | disc_loss: 2.3620715141296387 | gen_loss: 0.08385369926691055\n",
            "compute_gradients\n",
            "Epoch: 1140 | disc_loss: 2.138298511505127 | gen_loss: 0.09126357734203339\n",
            "compute_gradients\n",
            "Epoch: 1141 | disc_loss: 2.669022560119629 | gen_loss: 0.07592529058456421\n",
            "compute_gradients\n",
            "Epoch: 1142 | disc_loss: 2.692160129547119 | gen_loss: 0.07538943737745285\n",
            "compute_gradients\n",
            "Epoch: 1143 | disc_loss: 2.7229788303375244 | gen_loss: 0.08153855055570602\n",
            "compute_gradients\n",
            "Epoch: 1144 | disc_loss: 2.4997940063476562 | gen_loss: 0.08203929662704468\n",
            "compute_gradients\n",
            "Epoch: 1145 | disc_loss: 2.5080888271331787 | gen_loss: 0.0791935846209526\n",
            "compute_gradients\n",
            "Epoch: 1146 | disc_loss: 2.4543278217315674 | gen_loss: 0.08506113290786743\n",
            "compute_gradients\n",
            "Epoch: 1147 | disc_loss: 2.343485116958618 | gen_loss: 0.0939927026629448\n",
            "compute_gradients\n",
            "Epoch: 1148 | disc_loss: 2.806600570678711 | gen_loss: 0.07753462344408035\n",
            "compute_gradients\n",
            "Epoch: 1149 | disc_loss: 2.647507905960083 | gen_loss: 0.08904337882995605\n",
            "compute_gradients\n",
            "Epoch: 1150 | disc_loss: 2.387622356414795 | gen_loss: 0.08148684352636337\n",
            "compute_gradients\n",
            "Epoch: 1151 | disc_loss: 2.7266323566436768 | gen_loss: 0.080967016518116\n",
            "compute_gradients\n",
            "Epoch: 1152 | disc_loss: 2.8239166736602783 | gen_loss: 0.08697931468486786\n",
            "compute_gradients\n",
            "Epoch: 1153 | disc_loss: 2.5868818759918213 | gen_loss: 0.08447473496198654\n",
            "compute_gradients\n",
            "Epoch: 1154 | disc_loss: 2.934734344482422 | gen_loss: 0.07443852722644806\n",
            "compute_gradients\n",
            "Epoch: 1155 | disc_loss: 2.8160715103149414 | gen_loss: 0.07192225009202957\n",
            "compute_gradients\n",
            "Epoch: 1156 | disc_loss: 2.6786110401153564 | gen_loss: 0.07505469769239426\n",
            "compute_gradients\n",
            "Epoch: 1157 | disc_loss: 2.4954464435577393 | gen_loss: 0.07879472523927689\n",
            "compute_gradients\n",
            "Epoch: 1158 | disc_loss: 2.6867501735687256 | gen_loss: 0.08080999553203583\n",
            "compute_gradients\n",
            "Epoch: 1159 | disc_loss: 2.867994546890259 | gen_loss: 0.08421646803617477\n",
            "compute_gradients\n",
            "Epoch: 1160 | disc_loss: 3.063499927520752 | gen_loss: 0.06830066442489624\n",
            "compute_gradients\n",
            "Epoch: 1161 | disc_loss: 2.8053503036499023 | gen_loss: 0.07698836922645569\n",
            "compute_gradients\n",
            "Epoch: 1162 | disc_loss: 2.2120893001556396 | gen_loss: 0.08222425729036331\n",
            "compute_gradients\n",
            "Epoch: 1163 | disc_loss: 2.4458987712860107 | gen_loss: 0.08642996847629547\n",
            "compute_gradients\n",
            "Epoch: 1164 | disc_loss: 2.6208245754241943 | gen_loss: 0.08311241865158081\n",
            "compute_gradients\n",
            "Epoch: 1165 | disc_loss: 2.8264548778533936 | gen_loss: 0.08048558980226517\n",
            "compute_gradients\n",
            "Epoch: 1166 | disc_loss: 2.460465431213379 | gen_loss: 0.0840703696012497\n",
            "compute_gradients\n",
            "Epoch: 1167 | disc_loss: 2.518792152404785 | gen_loss: 0.08572939038276672\n",
            "compute_gradients\n",
            "Epoch: 1168 | disc_loss: 2.5465846061706543 | gen_loss: 0.08271243423223495\n",
            "compute_gradients\n",
            "Epoch: 1169 | disc_loss: 2.773297071456909 | gen_loss: 0.07819531112909317\n",
            "compute_gradients\n",
            "Epoch: 1170 | disc_loss: 2.6375811100006104 | gen_loss: 0.08078557997941971\n",
            "compute_gradients\n",
            "Epoch: 1171 | disc_loss: 2.769036293029785 | gen_loss: 0.07865583151578903\n",
            "compute_gradients\n",
            "Epoch: 1172 | disc_loss: 2.2708637714385986 | gen_loss: 0.08361781388521194\n",
            "compute_gradients\n",
            "Epoch: 1173 | disc_loss: 2.810934066772461 | gen_loss: 0.07825011014938354\n",
            "compute_gradients\n",
            "Epoch: 1174 | disc_loss: 2.9874813556671143 | gen_loss: 0.07729876041412354\n",
            "compute_gradients\n",
            "Epoch: 1175 | disc_loss: 2.575920343399048 | gen_loss: 0.08279610425233841\n",
            "compute_gradients\n",
            "Epoch: 1176 | disc_loss: 2.616509437561035 | gen_loss: 0.09095967561006546\n",
            "compute_gradients\n",
            "Epoch: 1177 | disc_loss: 2.905130386352539 | gen_loss: 0.07456650584936142\n",
            "compute_gradients\n",
            "Epoch: 1178 | disc_loss: 2.6423468589782715 | gen_loss: 0.08246642351150513\n",
            "compute_gradients\n",
            "Epoch: 1179 | disc_loss: 2.9367034435272217 | gen_loss: 0.0692274272441864\n",
            "compute_gradients\n",
            "Epoch: 1180 | disc_loss: 2.79384708404541 | gen_loss: 0.08018029481172562\n",
            "compute_gradients\n",
            "Epoch: 1181 | disc_loss: 3.077702283859253 | gen_loss: 0.0747784972190857\n",
            "compute_gradients\n",
            "Epoch: 1182 | disc_loss: 2.9652979373931885 | gen_loss: 0.07390324771404266\n",
            "compute_gradients\n",
            "Epoch: 1183 | disc_loss: 2.442051649093628 | gen_loss: 0.08383236825466156\n",
            "compute_gradients\n",
            "Epoch: 1184 | disc_loss: 2.97823429107666 | gen_loss: 0.07058300077915192\n",
            "compute_gradients\n",
            "Epoch: 1185 | disc_loss: 2.4043526649475098 | gen_loss: 0.08489678055047989\n",
            "compute_gradients\n",
            "Epoch: 1186 | disc_loss: 2.5547704696655273 | gen_loss: 0.08737754821777344\n",
            "compute_gradients\n",
            "Epoch: 1187 | disc_loss: 2.4919965267181396 | gen_loss: 0.08066649734973907\n",
            "compute_gradients\n",
            "Epoch: 1188 | disc_loss: 2.5241992473602295 | gen_loss: 0.07658714056015015\n",
            "compute_gradients\n",
            "Epoch: 1189 | disc_loss: 2.608764171600342 | gen_loss: 0.07976052165031433\n",
            "compute_gradients\n",
            "Epoch: 1190 | disc_loss: 2.424326181411743 | gen_loss: 0.08799834549427032\n",
            "compute_gradients\n",
            "Epoch: 1191 | disc_loss: 2.5631768703460693 | gen_loss: 0.07532724738121033\n",
            "compute_gradients\n",
            "Epoch: 1192 | disc_loss: 2.726808786392212 | gen_loss: 0.07453037053346634\n",
            "compute_gradients\n",
            "Epoch: 1193 | disc_loss: 2.6410539150238037 | gen_loss: 0.08362402021884918\n",
            "compute_gradients\n",
            "Epoch: 1194 | disc_loss: 2.3866591453552246 | gen_loss: 0.09032002091407776\n",
            "compute_gradients\n",
            "Epoch: 1195 | disc_loss: 2.8776357173919678 | gen_loss: 0.06904757767915726\n",
            "compute_gradients\n",
            "Epoch: 1196 | disc_loss: 2.5829596519470215 | gen_loss: 0.0826701745390892\n",
            "compute_gradients\n",
            "Epoch: 1197 | disc_loss: 2.501957893371582 | gen_loss: 0.07378269731998444\n",
            "compute_gradients\n",
            "Epoch: 1198 | disc_loss: 2.821291208267212 | gen_loss: 0.08016958087682724\n",
            "compute_gradients\n",
            "Epoch: 1199 | disc_loss: 2.8228683471679688 | gen_loss: 0.070558100938797\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "ExecuteTime": {
          "end_time": "2019-05-14T06:46:24.425722Z",
          "start_time": "2019-05-14T06:46:24.188266Z"
        },
        "colab_type": "code",
        "id": "XZbwB70ESYcH",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 286
        },
        "outputId": "06dc4844-67ef-4644-aabb-1d5c1523136d"
      },
      "source": [
        "plt.plot(losses.gen_loss.values)\n"
      ],
      "execution_count": 19,
      "outputs": [
        {
          "output_type": "execute_result",
          "data": {
            "text/plain": [
              "[<matplotlib.lines.Line2D at 0x7fba64fdcf98>]"
            ]
          },
          "metadata": {
            "tags": []
          },
          "execution_count": 19
        },
        {
          "output_type": "display_data",
          "data": {
            "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXcAAAD7CAYAAACRxdTpAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0\ndHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nO3dd3hUVfrA8e+b3kggEHoJHUGkGBRF\nBBQRxLbiroptXVksq9tsiPoTsaGuba2LdS3o6iqrCLpYQEAWMKBU6b2Hkp6QMuf3x0wmUzOTZJIp\neT/PMw8z5547897c8M7JueeeI8YYlFJKRZaoYAeglFIq8DS5K6VUBNLkrpRSEUiTu1JKRSBN7kop\nFYFigh0AQKtWrUxmZmaww1BKqbCycuXKI8aYDE/bQiK5Z2Zmkp2dHewwlFIqrIjILm/btFtGKaUi\nkCZ3pZSKQJrclVIqAmlyV0qpCKTJXSmlIpBfyV1E0kVktogUicguEZnopd40ESkXkUKHR7fAhqyU\nUsoXf4dCvgSUAW2AgcBcEVltjFnvoe6/jDHXBCpApZRSteez5S4iycAE4AFjTKExZgnwOXBtQwcX\nSMYY1u3Lo+hEBQAWiyF75zG2HCrAGENucRmf/bwPnQJZKRUJ/Gm59wIqjDGbHcpWAyO81L9IRI4B\nB4AXjTGv1DPGWjmQV0KLpDjmrN7Pe8t3c7TwBPkl5eSXVtjrdGmZxKH8UkrLLW77/+nDn3n9uizW\n7sujZ5sUOrVIYuPBfK4Y0rkxD0MpperFn+SeAuS7lOUBzTzU/QiYCRwCTgc+EZFcY8wHrhVFZDIw\nGaBz57onztV7cvn3yr18t/Ew+3JL/NqnotJgcc/rdpPecb9b9jdZnRCRuoaplFKNyp/kXgikupSl\nAgWuFY0xGxxeLhWR54HLAbfkboyZifWLgKysrDr1hby2aDuPzvvFrTw6SmjTLJ5rzujCaZnpdEpP\nIjUhlsS4aHsdi8VQXF7Jki05dGyRxJRP17BuXz7/mjyUz1fv5/3lu53es+u982iRFMs9Y/tw+akd\niYnWgUZKqdAlvvqYbX3ux4F+xpgttrJ3gP3GmCk+9r0HON0Yc1lN9bKyskxd5pb5eU8un6zcS0y0\nMO7kdpzWNd3eZ17fVnZFpYWf9+RSVFbJ9W+ucNo2/pR2vHDlIKKitCWvlAoeEVlpjMnyuM2fC4gi\n8iFggElYR8vMA850HS0jIpcAi4BcYAgwG5hqjPlnTe9f1+TeWLblFHLu09+7lb99wxD+8f12Xrs+\ni5T4kJiDTSnVhAQiuacDbwLnAUeBKcaYWSIyHPjSGJNiq/cBMAaIB/YCLxtj/u7r/UM9uQN89OMe\nyi0WLBbDA5+5jwDd9MhY4mOiPeyplFINo6bk7ldz0xhzDLjUQ/lirBdcq15fVdcgQ91vhnSyPz+9\nW0vGPLvIafve4yV0z0hx3U0ppYJCrwrWQa82zVj1wHlOZT/vzmVfbgk7jhQFKSqllKqmHcV1lJ4c\n5/T6jo9X258vn3oubVITGjskpZSy05Z7Pbi23qscLy5r5EiUUsqZJvd6cG29V3lzyQ5OVFQ2cjRK\nKVVNk3uAjO3X1v78o+y93PnxGtbtywtiREqppkz73Otp+dRzqbAYUuJi+Gr9QXv5nNX7mbN6Pztn\njA9idEqppkpb7vXUJjWBDs0TSUuKZfRJbdy2Z06Zyy8HXKfmUUqphqXJPYBevz6LnzxcZH1ojqdp\n75VSquFocg+wFh4usnZtpTc3KaUalyb3BvDEhP5OrxNio1i3L4/Sch1Bo5RqHJrcG8AVQzpzRreW\n9tdv/bCTC19Ywl3/XhPEqJRSTYkm9wYSG+P+o52zej9v/bAjCNEopZoaTe4NxGKxzrZ577g+TuWv\nLdoejHCUUk2MJvcGcs/YPvRp24zfZHVyKo+O1gU+lFINT29iaiD9O6bx1Z/PdiuPjdLvU6VUw9NM\n0wgu6F89NUGsrr2qlGoEmmkaQbu0RPvzTYcKeHr+piBGo5RqCjS5N4K7zu/N45f1J7NlEgAvfLeV\nL9bs13HvSqkGo8m9ESTERnPVaZ1JSai+xHHbrJ8Y/Yz7ottKKRUImtwbUcvkeKfXe4+XMOCh+cxc\ntC1IESmlIpUm90ZU1S3jKK+knMfmbQxCNEqpSKZDIRvRnef3Jjk+hnX781m0OSfY4SilIpi23BtR\ns4RY7h7bh+4ZycEORSkV4TS5B8GfR/cKdghKqQinyT0I0hJjgx2CUirCaXIPEcaYYIeglIogmtxD\nxK9f/R87jxSRX1oe7FCUUhFAk3uQvXz1YACydx1n5N8W8utX/hfkiJRSkUCHQgbJuzeeBkCP1s7r\nq246VBCMcJRSEUaTe5AM75lhf/7rUzvy8cq9QYxGKRVptFsmBCTERgc7BKVUhNHkHgJW7joe7BCU\nUhFGk3sI2HO82Om1DotUStWXJvcQEB3lvK7qRS8uIafghCZ5pVSdaXIPAb8b1tXp9bp9+Qx59Bu6\n3juPikpLkKJSSoUzTe4h4PZzegAwvGcrt237cksaOxylVATwK7mLSLqIzBaRIhHZJSITfdSPE5Ff\nRETH9/lBRNj+2AU8d8VAt23lldo1o5SqPX9b7i8BZUAb4GrgFRHpV0P9uwCdsLwWoqKEeA9DIkc/\n8z0fZ+8JQkRKqXDmM7mLSDIwAXjAGFNojFkCfA5c66V+V+Aa4PFABtoUxMd4Ph3v/G9XI0eilAp3\n/rTcewEVxpjNDmWrAW8t9xeAqUCNncUiMllEskUkOydHG/kAMS6jZqpYdNSMUqqW/EnuKUC+S1ke\n0My1ooj8Cog2xsz29abGmJnGmCxjTFZGRoav6k2CSHVyf/O3Wfbn6/fnkzllLt9sOBSMsJRSYcif\n5F4IpLqUpQJOM1zZum+eBP4YmNCatqQ492l/Jr2THYRIlFLhyJ+JwzYDMSLS0xizxVY2AFjvUq8n\nkAkstrVA44A0ETkIDDXG7AxIxBHuphHdyOqSTmy0jlJVStWdz+RujCkSkU+B6SIyCRgIXAKc6VJ1\nHdDJ4fWZwIvAYHTkjN/uHXcSAGv25nrcXmkxbne0KqWUK3+bh7cCicBh4APgFmPMehEZLiKFAMaY\nCmPMwaoHcAyw2F5XNkj0EczbNdTScv1RKqV882s+d2PMMeBSD+WLsV5w9bTPQqBjfYJryiq9ZPfS\n8kqOFZXRJjWBOC9DJ5VSSrNDiLJYPCf3C19YwvAnF/DuMh37rpTyTpN7iGqeFOex/EBeKQA7jhQ2\nZjhKqTCjyT1E9Widwie3nMmTl5/icfsXaw40ckRKqXCia6iGsFO7tLBfQO3VJoUWSXEs33EMgNzi\n8mCGppQKcdpyD3Gd05MAGNajFamJsU7bHpqzXpfoU0p5pMk9xHVKT+KrPw9nyrg+HCsqc9r21g87\nmfDK0iBFppQKZZrcw0CftqnEx0Rz/ZmZwQ5FKRUmNLmHkYsHtGfbYxcEOwylVBjQ5B5mPE09sOto\nEcVlFUGIRikVqjS5R4ARTy3k+jdXBDsMpVQI0eQehl64apBb2Y87j5M5Za7OPaOUAjS5h6Wa5pTJ\nK9Hx70opTe5hKa6Gud51RT6lFGhyD0sZzeK9biuvtDRiJEqpUKXJPQz1a5/KXef35k/n9nTb9uGP\nu4MQkVIq1GhyD0Miwh9G9eCkdm5rlPPSgm0s3XZEW/BKNXE6cVgYKzrheWTMxNeWA9C/Qxof3XQG\niXHRjRmWUioEaMs9jBWU1jwyZu2+PNbvz2ukaJRSoUSTexgb1LmFzzq6mLZSTZMm9zA2oFNzn3Vi\novQUK9UU6f/8CGfQge9KNUWa3MPc0inn8MdzegCQlhjL1Av6OG3feKCAx+b9wqkPfx2M8JRSQaLJ\nPcy1b55IJ9tqTb8a1IHJZ3d32n73J2uYuWg7R10W+lBKRTZN7hHgkoEduGVkd+4Y0wuA+8efFOSI\nlFLBpuPcI0BcTBT3jK3ujpk0vBsJsdHc/591QYxKKRVM2nKPUAP9GEmjlIpcmtwjVEy0+/j2SouO\nnFGqqdDkHqEqKt0Tuc43o1TTock9QjVLcL+c8umqffbnxhi+35yD0QnglYpImtwjVJeWyXx3xwin\nsqmz1wIwc9E2ut47j+vfXMGcNQeCEZ5SqoFpco9g3TJS3Mryist5bN5G++vcYh3/rlQk0uTexNz8\n3kqn1566b5RS4U+Te4Rz7Zr5ac9xp9fJcZrclYpEmtwjXLeMFFo7rLlaWu48YkYvpyoVmTS5NwEf\nTh7qdZunIZNKqfCnyb0J6JaRQofmiR63PfnfjUz7fD15xeVU6Dh4pSKGX8ldRNJFZLaIFInILhGZ\n6KXeX0Rku4jki8h+EXlWRLRTNwSM7J3hsXzX0WLeXrqTAdPn0+O+L30u3aeUCg/+ttxfAsqANsDV\nwCsi0s9Dvc+BwcaYVOBkYADwx0AEqurn7rF9aJUS57Ne/2nz9U5WpSKAz+QuIsnABOABY0yhMWYJ\n1iR+rWtdY8w2Y0xu1a6ABegRwHhVHaUlxvLAhX39qrtmry6qrVS486fl3guoMMZsdihbDXhquSMi\nE0UkHziCteX+Dy/1JotItohk5+Tk1DJsVRf+jmnfn1vSwJEopRqaP8k9Bch3KcsDmnmqbIyZZeuW\n6QW8ChzyUm+mMSbLGJOVkeG5P1gFVsvk6iGRQzJbeK13+wc/8c0Gj6dNKRUm/EnuhUCqS1kqUFDT\nTsaYLcB64OW6haYCrUVSdZ/7DcO61lj3nWW7GjocpVQD8ie5bwZiRKSnQ9kArInblxigu89aqlG0\nSYunZXIcL00czDl9WtdYV0fNKBXefCZ3Y0wR8CkwXUSSRWQYcAnwrmtdEZkkIq1tz/sC9wLfBjZk\nVVfxMdGsfOA8xp/Sjugo98U8HOUWuyf3Y0VlPPLFBsoqdDSNUqHO36GQtwKJwGHgA+AWY8x6ERku\nIoUO9YYBa0WkCJhne0wNZMAqMGJ8JPcdR4o4WnjCqeyeT9bw+pIdLNt+tCFDU0oFgF/J3RhzzBhz\nqTEm2RjT2Rgzy1a+2BiT4lDvBmNMG1u9TGPMXcaY0oYKXtWdiPDIpSfXWOfUR75xer3tsPV7PEVn\nklQq5On0A03YVad15okJ/Wus8+mqvfbnVbPQnCjXbhmlQp0m9yYsOkq4Ykhnpl7Qx2udv3602q3s\nqteWsX6/3uikVCjT5K4QrP3vAzqmedy+93ixW9nKXcc91FRKhQpN7gqxXVs9tUu6x+1nPbGAbTmF\nTotpWyw6VbBSoUyTu7IzNSzdkVPgPHJGp4FXKrRpcleIrenu0DDnyiGdnOpcOXMZO49Wd884tuKV\nUqFHk7vCdcR7XHQUlw3uWOM+Fk3uSoU0HbCs7H3uAO9POp3O6UkccbmByZVO+a5UaNOWu2Jkb+s8\nM5cO6sCwHq3olJ5ElNR8B6u23JUKbdpyV3RtlczOGeOdysp8NM19rda05VABR4vKGNqtZb3jU0rV\nnrbclUc1t9thwaYcissqvG4/79lFXDlzWWCDUkr5TZO78ujULi2YMs77naur9+TS9//+y9PzNzVi\nVEopf2lyVx6JCDeP6M6qB86jc3qS13ovfLfV6fVbP+wgc8pcj3U/yt7jNtOkUqphaHJXNUpPjqNF\nUiwAs35/usc6mVPmsvVwAV+tO8hDczZ4rLPraBF3/3sNt3/wU4PFqpSqphdUlU9xMdY2QE0jaEY/\ns8hj+fz1BxnTry0l5ZUAPodYKqUCQ1vuyqf4mGiAOq3ANPndlYx9bhEFpdaLr76GWCqlAkOTu/Kp\nmW1xjso6Tha28WABK3YcAzS5K9VYNLkrnx6+9GRuPKsrw3u2qvN7VLX6va3d+vw3W8icMpcTFZV1\n/gylVDVN7sqnVinxPHBhX2Ki6/7rUnXTU5SX5P76ku0AlJbpvAZKBYImd9Uo7C13gafnb6LbvdXD\nJY0x1Wv4aa+NUgGho2VUozjh0C3jODZ+19EiRjy10P5apxJWKjC05a4aRaltKOSPO6uX57NYDNuP\nFDnVq9AVnpQKCE3uqk42PjzW/jw5Ltpn/R0uSRzgh21H+Gl3rlOZLt+nVGBot4yqk/iYKCad1ZWL\nBrRn4mu+JwjL9rCg9rVvrHAr05a7UoGhLXdVJyLC/Rf2ZUCn5gF930qLYc+xYp6Zv0n735WqB03u\nqlYW3z2KFVPPdSq7dFAHAGZc1r/e728xhlveX8nfv9vKtpzCer+fUk2VdsuoWunkYYbIhy7ux93n\n9yE+Noq1+/IYd3I7rnljeZ3ev8JiKC23jqzRHhql6k6Tu6q3mOgo0pKsfwQ++qv+9booarEYqu5z\n0l4ZpepOu2VUwHm7C9UfFRaD2O5k0nValao7Te4qpFRaDOLQcj+YV8ovB/KDG5RSYUiTuwopb/2w\n0/580ZYchj7+LeOeX+xWb1tOIZ+s3AtAXnE5ucVljRWiUmFBk7tqEMunnsvok9p43d6heaLH8k9W\n7bVPCzzjy41e9x//98Xc8fFqAAZMn8/A6V/XI1qlIo8md9Ug2qQm0LWVdWTNxzefwbbHLnDaHhPt\nvV/enynfq0bU1HWOeaUinY6WUQ3mjjG9GdW7NUMy0922JcV5/9WrKblPfG0ZXVom219XzVmjlHKm\nLXfVYBJiozmzR/UCH5seGcuP943myctPYVTvDKe6z/xmgP35wTz3dVa/WLMfgKXbjvLBit328hJN\n7kp55FdyF5F0EZktIkUisktEJnqpd5eIrBORAhHZISJ3BTZcFc7iY6LJaBbPb7I62W9QOqNbS169\n5lQqKqu7Vzwton3brJ94/pstbuX7c0saLF6lwpm/LfeXgDKgDXA18IqI9PNQT4DrgBbAWOA2Ebky\nEIGqyFI1b8zI3hmMPbmtX/3sz36z2f68aij9xS/+YC/LLS7jqIcvBqWaIp/JXUSSgQnAA8aYQmPM\nEuBz4FrXusaYJ40xq4wxFcaYTcBnwLBAB63C38TTO9M+LYGLB7YHrPPTnNwh1e/9PV1HHf7kAk59\n5Bvv+1gM0z5fr3PWqCbBn5Z7L6DCGLPZoWw14KnlbiciAgwH1tc9PBWpurRMZum959IuzTokMjY6\nii9uH85zVwys83sWlFYA3kfQbD9SyNtLd3LTuyvr/BlKhQt/knsK4HqLYB7QzMd+02zv/5anjSIy\nWUSyRSQ7JyfHjzBUUxAfU/9r/EeLTrAvt4RNBwsor7Swdm+e03ad1kA1Bf4MhSwEXP9eTgUKvO0g\nIrdh7Xsfbozx2AlqjJkJzATIysrS/20KsI6wqa+cghPc8t4qdh8rpl/7VNbvz+ebv47Avvq2w2/b\nNxsO8VH2HmZel1Xvz1UqlPjTTNoMxIhIT4eyAXjpbhGR3wFTgHONMXvrH6JqShxb7rNvPbNO73Gk\nsIzdx4oBWL/f+kfnsSLP0xNMeieb+RsO6cIgKuL4TO7GmCLgU2C6iCSLyDDgEuBd17oicjXwGHCe\nMWZ7oINVkS8+1vormRwXzaDOLer0HnNW73crs47GsSZwT2l8y2G9yKoii78dnLcCicBh4APgFmPM\nehEZLiKO/yseAVoCP4pIoe3xamBDVpEsPsbaLeN4F2pt/Xul+x+Mby/daV+f1VMrfcyzi+r8eUqF\nIr+SuzHmmDHmUmNMsjGmszFmlq18sTEmxaFeV2NMrDEmxeFxc0MFryJPj9YpXDmkE/ePPymg7zt3\nzQGnG6U++3kfZz7+rVOdUX9bGNDPVCqYdPoBFVISYqOZMeEU+7QF5/RpHbD3fvX7bQDkl1bwpw9/\nZn9eqdP2HUeK/HqfzYcK+OznfQGLS6mGoMldhbRTOqYF7L2+WHMAgOKyijq/R+GJCsY8u4g/ffhz\noMJSqkFoclch7fZzevquVEs1zRL89PxNNe578YtLAhyNUg1Dk7sKadFRwnaXueBdZ5Q8v5/3RUE8\nKauweN32wndbKa+08N6yXXy6yv3C7Pac6q6b+iwEnldczvQ5GzhRobNaqoahyV2FvKgosd2EZNU2\nLcFp+/NXDnLbZ9ak0+v0WWmJsfzu7R+5/z/r+OtHq2usW1bp/Uuixv0qLAyYPp83f9hB7/u/qtN7\nKOWLJncVFnq0TmH1g2NYeOdIxGEKSRHrRdhuGc5DJ7tlpLi+hV/ySspZvOWIU9nT8zdxw1sr2Ooy\nFt5Xcq+otHDf7LV8umovx4vK+G7jIaDmaYrzSsqZs3o/5ZUWHvxsHS8t2GrftvVwAYcLrBeBi8sq\neH3xdl2JSnmlKzGpsJGWGEtaYiwTT+vMrOXWBTuibYn+qiGdeXTeL/a66clxAfnMqbPX2j9rwabv\nnbat35fPu8t28odRPejX3nrh1xjDkq1HGNa9FQs35fD+8t28v7x6cZHXr8uia4b3MfwDHpoPwJDM\nFvy48zgAMxdtZ/E9oxj9zCKio4Rtj13AM/M38/qSHbROTeDiAe0DcqwqsmjLXYWdkzuksXbaGMDa\nZQPOS/NlNIsnLiaKnTPG1/uzZjkkZldXvbaMeWsPMv7vS+wt6O82HubaN1bQbeo8nvt2s9s+k97J\nJre43Klsz7Fit773TQerp27KKynnlGnWpF9pMew5Vsx/NxwEoOhE3Uf+qMimyV2FpShbNo92WeVj\nQKfmzL39LI/7tG4W32DxVC0S8o9F1bNurNvnOpmq1YRXljq9Hv7kAp78ynmUTnSU99VLRv1tIXuO\nWbt2tFtGeaPJXYWlONsEY5PP7uZUntWlBa1Tqy+4OvbFL7xrJJcMrLkL45WrBwM1J1dP5q49QOaU\nuazYcaxW+1VZty+PubZx+ADHXVr3jiocErpOeKa80eSuwlJstLXb5S/n9XIqd811j17aH4DTuqaT\nFBfDlHF9nLpwJgzuaH8+pm8bxvVvx+oHx7DmwTG1iuehORtqdwAulu84xh9mrar1fgXaLaO80OSu\nIsIZ3VsCMLqv83QFJ7Wzrilzy4juALRLS2TH49V98Q9cWD2HzQsTrUMq0xJjSY4Pj7EGrt05SlUJ\nj99gpXzo1z7N4wXU5klxHssHdW5Oy+Q4EuOss1DGRUfZZ6QMpEsGtuezn92nIA6kjQfz6dPW//Vn\nVdOgLXfVJM2+dRivXz+EuOgobjyrK/+6aWiN9add1LdOk5g1xjDFsc8txmIxdL13Lq8vrr6ge7ig\nlBe+3UJeiXP//U+7j/P+8l0NHpcKLk3uqkkTER64sK/PhUHaNU8kNrp2F1kBOrZI4pFLT65reH7b\ncCAfY+CRudax/qXllZz26Lc8/fVmvv3lkFPdX728lPtmr2vwmFRwaXJXyg9926WSnuz/UMrrz+hC\np/REurRM4pqhXRowMqsLX3Ce0KzPA9XTGngbUOM60mbN3lwyp8zlx511G/GjQosmd6W8+NuvBwDw\n3R0j6JSexP3jT2LaRX2ZdlFft7qbHhnLfRdUX5z9y3m9WHz3OQFZ8Lu2/vqR83TE5Q7TJDg+Ly5z\nvnHqf9uOAjB//UGn8gN5JWROmcs3G5z/AlChTZO7Ul5cfmpHds4Yb5+nJjk+ht8O68q5JznPQjm+\nfzviY6I5uUP13POx0c7/td64Psvvzx3aLb0eUcOnq5wXEsktKedo4Qk++nEPPe/70l7e78H/kjll\nLm/9sAPA/kV0wmXWzJ935wLwr+w99YpLNS4dLaNULTmOk2+eFMtLthufqoZjAm4tdtcvhJq0SvHd\n/dO3XSrjTm7L01+7T3HgasaXG/nbfzcxtFtLj9sfmrOBdmkJ5NsuvJaWO7foqyZIq7pxTIUHPVtK\n1VJSXHWb6M3fDvFYx9cdrqd39d46L6uwcHIH56GNrq8NUFiLFaUqLIbdx4q9br/5vVX2L4qPsp3n\nsS+xdd/ERWu6CCd6tpSqpfTkOD699Uw2TD+fwT5G2Th65jcD7M9n/X4oj1/W32n7tbYLr2WVFt67\n8XT7VAjgPoeOMYbhPZwXLfGlpuTuSVX/fFVLXuexCS+a3JWqg8GdWzi14P1x2eCOTD67G61S4omO\nEpLinLtuzrR165RVWGieFMe4/u3s26pmv3z4kn72srN6tqpr+D5N+3w9/af9l7IKi71bRpN7eNHk\nrlQjmnrBSWTfPxqwXoh964bqbp0EW7J3XAbwjvN68c7vTrPPglmV5FN8TI/QLi3B6S+F2np76U5K\nyy1sPlTAY/M2As7XGlTo0+SuVJDEREcxqnf1Xa/Jtr8EEh1a9Lef25Oze2XYu2W6tUrhnrF9eHHi\nYGpyfr+2XOYwKVpdvb10p/35F2sOcCi/1Gn7tM/Xkzllbr0/RwWeJnelAmjplHNYdNeoWu3TMjmO\n9mkJnNIxjbN7ZXD9GZludX41uAMAma2SuGVkd7d1ZB2d1jWdm20TpXnj76Lirv30z32zBYDr3lzB\nU//d6JT8q4x9bhFPfrXRr/dXDUeHQioVQO2bJ9Z6nx/vs3bTREUJ7/zuNI91rhzSictP7eg2fj45\nLpqWKfFOSfjdG0/zOQlaTJR/7TrX+emPFp5g0j+zWbQ5h0Wbc9zql1VY2HiwgI0HC7h7bB+nbZlT\n5nJe3za8dp3/Y/5V3WlyVyrIovxYGEREPM5ts3ba+QB8vyWHlPgY1u7Nc0rs3TOS2ZZTxHd3jOCc\np6vXgM0v9b4YSE1W7DzmtkwgWC+2RkeJz2X/vta7XBuNdssoFcaiooSoKGFU79YMyUznd2d1ddr+\n0U1nsOSeUXTLSKFFUqy9/NwaZrj8+OYzvG7zlNgBvt5wkMVbcih0SO67jxaz+VCBx/qOftp9nMwp\nc9l5pMhnXeU/Te5KRbCWKfF0bJEEwII7R9rLfzusK8O9DKXs1772c8Pf/N4qrn1jBQWl1cn97KcW\nMObZRR7rHysq471lu8icMpdnbDdPLbXNbePocEGp2x2zyj/aLaNUE9E8Kc7pdXpynMd6rv36tfGE\nhwupvxzI5x/fb7O/fn3xdh7/cqN93PziLUcASIh1/9zTHv2WM7q15IPJNc+3r9xpy12pJqSlQ0K/\n6ezuJNuGXd40oht/Ht0TgJgo8TlGfuoFfTyWf+/hIuu45xfzH4fVqB6Z+4vHG6Jc5+NZvt3akv/f\ndvcWvfJNW+5KNSFf/3UEucVlAPRtn8r66WMpKaskPiaKqCjhz6OtC457uxu1Y4tE9h4vYfLZ3e03\nNwXK3DUH2HyogJYp8Vw7tLMSjckAAA4DSURBVAtXzFwW0PdvajS5K9WEpCfHuXXHJMa5D5usWsej\nTWo8h/JP2MsX3DmSisqGmYZg7toDsNb6/JrTOztt+2DFbq4c0gnR22T9pt0ySik3abaRNVcM6cw/\nHcbex0ZH2b8MLhvUocE+f7nL+Pp7P13Lmz/sbLDPi0Sa3JVSbsb0bcPzVw7k9nN6MKKX59knn7li\nINed0TBLCH6wYrdb2cNfbPBaf+eRIioqLV63N0Wa3JVSbkSESwZ2sI+ceeuGIR6XFxzbr639efu0\nBC4e0D4gn/+ZwwVYR9m29V2/3nCIx+dVLwY+8m8Luf6tFQH57EjhV3IXkXQRmS0iRSKyS0Qmeqk3\nSkQWiEieiOwMaKRKqaAZ1bs1vx3W1a38zB6t7OPnKyyGv181iOa2Lp3LT3WeuCwQi31c/ur/yC0u\n4/fvZPOPRduxWIx9LdgftlpH1azafZw8LzdbNSX+/rRfAsqANsDVwCsi0s9DvSLgTeCuwISnlAp1\nibYhjBW2ETYPXdyPVilxzLisPx1bVM+1U9v555s73FHraOD0r+3PL3xhidOUB5UWw2UvL2Xi68t4\nffH2Jn0DlM/kLiLJwATgAWNMoTFmCfA5cK1rXWPMCmPMu8D2gEeqlApJVTcfVa3cdMnADmTffx4x\n0VH2eegBp+eOHL8AHH3we983Lm04kM+cNdVdOIcLrFMSr9+fzyNzf2HmoqabivxpufcCKowxjivx\nrgY8tdz9JiKTRSRbRLJzctxvfFBKhYeqhUP+Yhsj7+j3w61dOclx0dx+Tg/G9K2eanjro+O4f/xJ\nvD/pdI/ve1K7VJ6ccIrPz3/yq03255e+9IPTtqKyCp7/ZgtLt1nvgl267Qg7HOaw2Xu8mCOFJ6it\nI4Un2HrY97w5wSTG1DxmVUSGAx8bY9o6lP0euNoYM9LLPqOB140xmf4EkZWVZbKzs/2NWSkVpowx\ndL13HgA7Z4x32jbjy428apum4MGL+nKDrY9//f482qYmUFxWyfAnF9Tq89qmJnDQtsDI6v8bw4Dp\n80mIjWLjw+MA6zTEMVHC1scu8Lj/7J/20r9Dc3q0TnEqH/DQfPJKyt2OobGJyEpjjMc5lP1puRcC\nrjMJpQKh/bWllAo5Nd2E5Ljp0oHVY+j7tU+jZUo8ndKTav15Bx1WjhowfT4ApeUWjhWV2csrXO7G\n/Sh7D3ts8+P/5V+rOf8598nP8kqsF2z/89M+jDHkFJzgjx/85HPK48bkT3LfDMSISE+HsgHA+oYJ\nSSnVFFWNpmmVEk8LL5OaBcrIpxawxcN0xBWVFu7+9xp+9fIPVPVqVFqM1+kY/vyvn7nvP+uYOnst\nn6/e73UI57p9eRx16f7x1WtSXz6TuzGmCPgUmC4iySIyDLgEeNe1rohEiUgCEGt9KQki0rBnSSkV\nVt6+YQh3nd/brXzy2d24/owuLLp7pNd9104bw1k9rKNubh1ZvZTg6JP8WzawSn5pBed5mI641LY4\n+ZHCMk44LFR+ML+UfbklFHhY5GTW8t32RUi8zV9/4QtL+NXLS+2vP1m5l673zuNgXqnH+oHg71DI\nW4FE4DDwAXCLMWa9iAwXkUKHemcDJcA8oLPt+fwAxquUCnMje7fmD6N6uJUnx8fw0CUnkxTnfcqr\nZgmxvDfpdHbOGM9vsjoB0Ck9kdevz7In/fP61i7RQ/VEaScchk5WjZ8H+PaXQwyb8R2XvbyU2T/t\n9fo+by/dyd7j1i6d1XtyMcbYRxHtPlbMtpxC+v3fV9zx8Wp7WUPxeUG1MegFVaVUbZWUVdLvwa94\n7spBXDygPUcLTzDjy42M7tuGm95dWev3e/uGISTHx/DrV/9Xr7japSVw1Wmd7YuQzJp0OhNfXw5U\nz6pZZc5tZ9G/Y1qdP6umC6qa3JVSEWXt3jwuenGJx20je2ewcFPoDL2edlFfj3f++qu+o2WUUips\npCZau3WS4qK5dWR3Xrsui6ttUwjHRHleaDxYps3xPhlafWlyV0pFlGYJ1mkLTlRYuHtsH87r24Yr\nhlj75/t3aM7iu8/hi9vPYsV959brc24a0a3esUL1ZGiBpsldKRVRmiVYW+6OwxdP6dicz28bxm3n\n9KBtWgInd0ijdbMEp1b8tUNrN33xiJ6ep0J25M+UyCUNNP+NJnelVESJjY5ifP92vPlb567oUzo2\nJzrKuUvm3zefaX8+/ZLazagS5fBeSXHRLJ96rtsXxPRLTmbjw2NrfB9vY+jrS5O7UirivHT1YM7p\n43tI5IBOzZkyrg+d0hPd7p795JYzWffQ+ax+cIy9rG1qAu9POp2T2qXSv0Ma0VFCrzYpLLxzJG1S\nE4jx0J+fEBvNjWc5XzS9Zmj1MoIFpQ1zV6uuoaqUatJuHtGdm0d0dypbcd+5tG6W4FZ32VRrP/2X\nfxoOwMaHxyJAjO3u2ltGdGfr4UIWbznitN+gzs3tz5PjopkwuCPvLbOuNtVQyV1b7kopZdM9IxmA\nlsnxftWPjY6yJ3aA1qkJ/PMG65qz3WzvBTCwkzW533V+b767cyQdW1jnyenWKpnBXZrTEHScu1JK\n2eQUnGDTwQK3hUUWbjrM0cIyJrisLuXNki1H6NOuGa1SvH9JVFRaiI6SGidT86Wmce7aLaOUUjYZ\nzeLJaOaekEf2bl2r9/Fn1amYACw7WBPtllFKqQikyV0ppSKQJnellIpAmtyVUioCaXJXSqkIpMld\nKaUikCZ3pZSKQJrclVIqAoXEHaoikgPsquPurYAjPmuFBz2W0BQpxxIpxwF6LFW6GGM8zj0cEsm9\nPkQk29vtt+FGjyU0RcqxRMpxgB6LP7RbRimlIpAmd6WUikCRkNxnBjuAANJjCU2RciyRchygx+JT\n2Pe5K6WUchcJLXellFIuNLkrpVQE0uSulFIRKGyTu4iki8hsESkSkV0iMjHYMXkjIvEi8oYtzgIR\n+VlExjlsP1dENopIsYgsEJEuLvu+KSL5InJQRP4anKNwJiI9RaRURN5zKJtoO8YiEfmPiKQ7bAvJ\n8yUiV4rIL7a4tonIcFt5WJ0TEckUkXkictwW04siEmPbNlBEVtqOZaWIDHTYT0TkCRE5ans8IfVZ\n9632cd8mItkickJE3nbZVudzUNO+jX0sIjJURL4WkWMikiMiH4tIO4ftNZ6Dms5fjYwxYfkAPgD+\nBaQAZwF5QL9gx+Ul1mRgGpCJ9Qv1QqDA9rqVLfZfAwnAU8Ayh30fBxYDLYCTgIPA2BA4pvm2uN6z\nve5nO6azbedkFvBhKJ8v4Dysd0YPtZ2XDrZH2J0TYB7wti3etsBa4I9AnO0Y/wLE28p2AXG2/W4C\nNgEdbce+Abi5EeO+DLgUeAV426G8zufA175BOJZxtlhSgSTgTeArh+1ez4Gv81djPMH8hazHDzEZ\nKAN6OZS9C8wIdmy1OIY1wARgMrDU5dhKgD621/uBMQ7bH3ZMmkGK/UrgI6xfWFXJ/TFglkOd7rZz\n1CxUzxewFLjRQ3k4npNfgAscXj8F/AMYA+zDNjLOtm23QyJcCkx22HZjYyRCD/E/4pIQ63wOfO3b\n2MfiYftgoMDl99DjOfB1/mp6hGu3TC+gwhiz2aFsNdbWY8gTkTZYj2E91phXV20zxhQB24B+ItIC\naOe4nSAfp4ikAtMB164I1+PYhi2hE4LnS0SigSwgQ0S2isheW1dGImF2TmyeA64UkSQR6YC1tfgV\n1rjWGFtWsFlDdbxOx0poHAvU7xx43beBY/bX2Vj/71ep6Rz4On9ehWtyTwHyXcrysLYSQ5qIxALv\nA/80xmzEeix5LtWqjiXF4bXrtmB5GHjDGLPXpdzXcYTa+WoDxAKXA8OBgcAg4H7C75wALML6Hz4f\n2AtkA/+h5mPBw/Y8IKUx+929qM858HXMQSMipwD/B9zlUFzTOajzsYRrci/E2n/lKBVrn2/IEpEo\nrN0RZcBttuKajqXQ4bXrtkZnu5AzGnjWw2ZfxxFq56vE9u8LxpgDxpgjwDPABYTROQH779VXwKdY\nuyBaYe2LfgLfP3vX7alAoUtLMRjqcw5C8fcNEekBfAn8yRiz2GFTTeegzscSrsl9MxAjIj0dygbg\n/KdOSLF9C7+BtcU4wRhTbtu0HmvsVfWSsfZXrzfGHAcOOG4nuMc5EutF4N0ichC4E5ggIqtwP45u\nWC8AbSYEz5ftZ7sXcExiVc/D6ZwApAOdgReNMSeMMUeBt7B+Ua0HTnFpiZ9CdbxOx0rwj6VKfc6B\n130bOGavbKN1vgEeNsa867K5pnPg6/x519gXTgJ40eJDrCMwkoFhhMDoCx/xvgosA1JcyjNssU/A\nemX/CZxHBcwAvsfaEuuD9Zc6KCMzsF7pb+vw+Bvwb9sxVHUJDLedk/dwHi0TcucL67WDH4HWtp/v\nYqzdTmFzThxi2g5MAWKA5sBsrCOWqkZb/Anrl+1tOI+WuRnrxdgOQHtb0mjM0TIxtp/x41j/qk2w\nldX5HPjaNwjH0gFrn/+dXvbzeg58nb8a4wnmL2Q9f5DpWPsUi7BePZ4Y7JhqiLUL1lZhKdY/s6oe\nV9u2jwY2Yu0qWAhkOuwbj3XoVD5wCPhrsI/HIbZp2EbL2F5PtJ2LIuAzID2UzxfWPveXgVysQ+n+\nDiSE4znBes1gIXAc68IPHwFtbNsGASttx7IKGOSwnwBPAsdsjydxGJnRSL9DxuUxrb7noKZ9G/tY\ngAdtzx3/7xf6ew5qOn81PXTiMKWUikDh2ueulFKqBprclVIqAmlyV0qpCKTJXSmlIpAmd6WUikCa\n3JVSKgJpcldKqQikyV0ppSLQ/wP9FA/tqwp5hAAAAABJRU5ErkJggg==\n",
            "text/plain": [
              "<Figure size 432x288 with 1 Axes>"
            ]
          },
          "metadata": {
            "tags": []
          }
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "ExecuteTime": {
          "end_time": "2019-05-14T06:46:38.649136Z",
          "start_time": "2019-05-14T06:46:38.440378Z"
        },
        "colab_type": "code",
        "id": "FydTnKDxSYcL",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 286
        },
        "outputId": "9af71a5c-9188-4088-9c3b-9364b82a46ab"
      },
      "source": [
        "plt.plot(losses.disc_loss.values)\n"
      ],
      "execution_count": 20,
      "outputs": [
        {
          "output_type": "execute_result",
          "data": {
            "text/plain": [
              "[<matplotlib.lines.Line2D at 0x7fba64b27b00>]"
            ]
          },
          "metadata": {
            "tags": []
          },
          "execution_count": 20
        },
        {
          "output_type": "display_data",
          "data": {
            "image/png": "iVBORw0KGgoAAAANSUhEUgAAAWsAAAD7CAYAAACsV7WPAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0\ndHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nO3dd3zU9f3A8dc7g4QQQhghDIEAspGh\ncSDiQKQIrVZtK7bW1mqx1tVa20JrK4oWnG3dP1oUqxVHpS4UUJElKgQQKCtsCJAQRiZZl/v8/riR\nm7lLcpe7S97Px+Me3Hfe+5sveedzn+9niDEGpZRS0S0u0gEopZQKTJO1UkrFAE3WSikVAzRZK6VU\nDNBkrZRSMSAhXCfu0qWLycrKCtfplVKqRVq/fv1xY0yG5/qwJeusrCxycnLCdXqllGqRROSAr/Va\nDaKUUjFAk7VSSsUATdZKKRUDNFkrpVQM0GStlFIxQJO1UkrFAE3WSikVA6IuWX+4+QgfbDoS6TCU\nUiqqhK1TTGO9tHofm/OKGT+4K+2Soi48pZSKiIAlaxEp83jVisgz4Qpo6rm9sVgNJ8urw/URSikV\ncwIma2NMquMFdAMqgLfDFVB6SiIAxRU14foIpZSKOQ2ts74OOAasCkMsAKSntAGg6LQma6WUcmho\nsv4J8C/jZ+JGEZkmIjkiklNYWNiogBwl66IKrQZRSimHoJO1iPQBLgFe8bePMWauMSbbGJOdkeE1\nwl9Q0tvakvUpLVkrpZRTQ0rWPwZWG2P2hSsYgDR7si4+rSVrpZRyaEiyvol6StWhkpwYT9vEeK2z\nVkopF0ElaxG5EOhJGFuBuEpPSaRIW4MopZRTsCXrnwALjTGl4QzGoUPbRPYdL2+Oj1JKqZgQVBdB\nY8xt4Q7E1Y5829+E09UWUtpoL0allIq6sUEA+me0A+BYSVWEI1FKqegQlcn6T98eCsCJck3WSikF\nUZqsu6QmAZCz/1SEI1FKqegQlcm6bxdbNcjsj3dgtfrsLKmUUq1KVCbrdkkJJCXYQssvqYxwNEop\nFXlRmawBXrjxbECTtVJKQRQn667tkwE4pslaKaWiN1l362BL1vnFmqyVUipqk3WnlDYkxgsFpdp8\nTymlojZZx8UJXdsns69Qu50rpVTUJmuAw0UVLN6aT9b0RZEORSmlIiqqk7VSSimbqE7Wa6aPj3QI\nSikVFaI6WXe3twgBqNWejEqpViyqk7WION+XVVkiGIlSSkVWVCdrgNnXngVAuSZrpVQrFvXJOjnR\nFmKVxRrhSJRSKnKiPlm3iY8HoFqTtVKqFYv+ZG0ffU+TtVKqNQs6WYvIVBHZLiLlIrJHRMaFMzAH\nx1CpVZba5vg4pZSKSkHNRisiVwCPAtcDa4Hu4QzKlaNkfbS4kvIqC+2SdAJdpVTrE2zJ+kHgIWPM\nV8YYqzHmsDHmcDgDc3CUrO9asJGrn/uiOT5SKaWiTsBkLSLxQDaQISK7RSRPRJ4VkbY+9p0mIjki\nklNYWBiSAB0la4Ddx8pCck6llIo1wZSsM4FE4HvAOGAUMBq433NHY8xcY0y2MSY7IyMjJAEmJcSH\n5DxKKRXLgknWFfZ/nzHGHDXGHAeeAiaHL6w63Vy6nIP2ZFRKtU4Bk7Ux5hSQB7gOztFsA3WkJiXw\n5Yy6AZ1eXL6nuT5aKaWiRrAPGF8G7hKRriLSEfg18GH4wnLXvUNd9fjmw8XN9bFKKRU1gm0HNwvo\nAuQClcBbwCPhCqo+EngXpZRqcYIqWRtjaowxvzTGpBtjuhlj7jbGRGQm2xW5hTz1SW4kPloppSIm\n6rubO7RPrvsS8PRnuyIYiVJKNb+YSdbLfnOp27JVJyNQSrUiMZOsM9onuS2XV2sTPqVU6xEzyRpg\n1neHO99re2ulVGsSU8n6xvN7O9+XVWqyVkq1HjGVrF3nZCzVkrVSqhWJqWTtSkvWSqnWJGaT9anT\n1ZEOQSmlmk3MJut73vgm0iEopVSziblkvep3l0U6BKWUanYxl6x7dUphaPc0AHILSiMcjVJKNY+Y\nS9YA246WAPDmukMRjkQppZpHTCbrK4d3A+DzHcciHIlSSjWPmEzWf586GoC9x8sjHIlSSjWPmEzW\nbRLiyEyzjRWSNX0RNbXWCEeklFLhFZPJGuCu8QOc77cdKYlgJEopFX4xm6xdlVTWRDoEpZQKq5hN\n1tef28v5vryqNoKRKKVU+MVssk6Mrwu9XAd1Ukq1cEElaxFZLiKVIlJmf+0Md2ANcVonIlBKtXAN\nKVnfaYxJtb8GhS2iRijTahClVAsXs9UgAB1TEgF4fMkO7XqulGrRGpKsZ4vIcRH5QkQu9bWDiEwT\nkRwRySksLAxNhPXY+OeJAFgNTPzrSg6dPB32z1RKqUgINln/HugH9ATmAh+ISH/PnYwxc40x2caY\n7IyMjBCGGZwlW/Ob/TOVUqo5BJWsjTFfG2NKjTFVxphXgC+AyeENreE6prTBor0ZlVItUGPrrA0g\nAfdqZk8v28WZf/yYyhp94KiUalkCJmsRSReRb4lIsogkiMiPgIuBxeEPL7BhPdKc7w+csNVZHymq\niFQ4SikVFsGUrBOBh4FC4DhwF/BdY0xuOAML1uWDu3qte3TxjghEopRS4ZMQaAdjTCFwbjPE0ii3\nXdKfSouVuSv3Otct2VoQwYiUUir0YrqdNUC7pAT+MHlIpMNQSqmwivlkrZRSrUGLTdZWq4l0CEop\nFTItNlmX6kh8SqkWpMUk6+E909yWS3VCAqVUC9JikvWrPzvfbbmkQkvWSqmWo8Uk63T7CHwOWrJW\nSrUkLSZZiwi//dYgHrlmOACb8orYklcc4aiUUio0AnaKiSV3XHYmO/Nt41r/5SNbL8b9c6ZEMiSl\nlAqJFlOydmibGB/pEJRSKuRaXLJOTmxxl6SUUi0vWSd5lKy1c4xSqiVoccnas2T94so9EYpEKaVC\np8Ul6zbx7pf02OKdjJ2zTCckUErFtBaXrEW8J7A5XFTBroKyCESjlFKh0eKStT8+crhSSsWMFpms\nZ313uNc6TdZKqVjWIpP1jy/oQ98u7dzWGW0UopSKYS0yWftSXWuNdAhKKdVoDUrWIjJARCpF5LVw\nBRQqZ3Rs67ZcbdFkrZSKXQ0tWT8HrAtHIKH2zA2j3ZZrtGStlIphQSdrEZkKFAGfhS+c0ElPaeM2\niJOWrJVSsSyoZC0iacBDwL0B9psmIjkiklNYWBiK+JrsvL6dALjllRyWbs2PcDRKKdU4wZasZwHz\njDF59e1kjJlrjMk2xmRnZGQ0PboQ+Ms1ZznfT3t1fQQjUUqpxgs4nrWIjAImAKMD7RuNOrRNDLyT\nUkpFuWAmH7gUyAIO2rtypwLxIjLUGHN2+EILjY4pmqyVUrEvmGQ9F3jDZfk+bMn79nAEFGoJ8a2m\nKblSqgULmKyNMaeB045lESkDKo0x0fEEUSmlWoEGFzuNMTONMTeGI5hwuXxw10iHoJRSTdIq6ghu\nHts30iEopVSTtIpk7erxJTt4d+PhSIehlFINEswDxpiXmZbkfP/c57Zpvr47umekwlFKqQZrFSXr\nAZntueG8XpEOQymlGq1VJGuA7D6d3JZ1TkalVCxpNcm6R7r7kKn/WLk3QpEopVTDtZpk7Tm+9Yny\n6ghFopRSDddqknW7JPdnqUbn+VJKxZBWk6zbJsa7LddqslZKxZBWk6yTEtwvtcZiOHjitJ+9lVIq\nurSaZB0XJ27Lb+Yc4uLHP2fDwVMRikgppYLXapK1P/sKyyMdglJKBdQqk/WwHmnO9wnxUs+eSikV\nHVpFd3OHu8efSZ/O7ViRW8jWIyUAFJZWRTgqpZQKrFWVrO+dOIjrzjnDbd3Di7ZHKBqllApeq0rW\nSikVqzRZK6VUDGiVyfp7HlUhSikV7Vplsr54YAZr/3C5c/mNtQcjGI1SKlblnTpNaWVNs3xWUMla\nRF4TkaMiUiIiuSJya7gDC7cuqXUTEkxfuAWrVbufK9USXP7kcn7x6vqQnjPv1GnGzlnGoZPuvZ4v\nevRzrn72i5B+lj/BlqxnA1nGmDTgKuBhETknfGGFX1yccMtFdXMzllZZIhiNUipU9hSWs3hrvnN5\nydZ8jpVUeu1XWllDtcUa1DnfzsnjcFEFb+cc8tq293jzdKwLKlkbY7YaYxwNko391T9sUTWTWpfS\n9IYD2u1cqZamptbKba+uZ+o/vvLadtbMpfz8XzlBncdqH/jNMWzFsh0FLNyQ59webNJviqDrrEXk\neRE5DewAjgIf+dhnmojkiEhOYWFhCMMMv5vnr4t0CEqpEHMUyA54DNrmqPZckVuI1Wo4Wlzh9xyV\nNbVUVNtmlooXW7L+2fwc7n1rk3Ofx5fsCGncvgSdrI0xvwTaA+OAhYBX1z9jzFxjTLYxJjsjIyN0\nUYaJ55jWE/+6IkKRKKXCwZGsrR6/66WVddWeL32xjzGzl7H7WJnPc4x+6BP+uXof4D0gnMO+4+Ef\nwbNBrUGMMbXGmNXAGcDt4Qmp+Xg+Uswt8H2zlFKxp8pSi8WerI2BBfZWX6fKqzl/9qfO/bbZh57I\n2X/S53kqXOZrTfCTrAHKqyz8Z31e2CY2aWzTvQRaQJ2154QESqmW47LHl7s9l5qxcAsV1bV8sr2A\nypq6OuYu7W0tw4oqAjfBEz+5WgQe/GAr9729ia/3nQxLwg6YrEWkq4hMFZFUEYkXkW8BNwCfhTya\nZnb35QO4e/yZkQ5DKRUGR4or3ZI1wHvfHCbOI+M6ljzzq6XW6lUX/ZePdvDk0p0+P++YfVC4qXO/\nIt9H65OmCqZkbbBVeeQBp4AngF8ZY94PeTTNrF1SAvdOHBTpMJRSPuQWlPLaVweadI5zH/nUbXn6\nwi14FY7tK4y9YtTx8HHe6n089/ker3M+s2y3z8+K91fsDpGAQ6QaYwqBS8IaRYTdP2WIjr6nVJT5\n1t9WYgzceEGfoI+pqW14EzqxZ2tHyXr4zCVcMTSTzLTkBp3HtW67e4e2DY4jkFbZ3dzT97N7Od/v\nKijVmc+VigKuv4bVFiurdhWSNX0R6/afpKCkkn+u2uv1uzp2zrKA5/UsADuWX/3yAD/4vy85XV3L\ne98cYe7KvUHHWlBSyZo9J4LevzFa1eQD/iQn1v3NuuKvK+neIZlRvdJ54caY7qSpVItgtRpmLNzC\nO/ZOKA+8t5XEeGFTXjGDurVn3IC6ZsLHgphMxLPO+oXltqqO/JLKRtc1F4ShjtqTlqyBpIR44l2a\n5BwtruTj/+XXc4RSqrlYrIZPtxc4l7cdLWFTXjEAP5631pkoP3PZpz4nyqtDH2Nt+L+Na7K2m3Hl\n4EiHoFSrVny6hv0+xtmotRpS2vhvZlt02tbk7pZXgus6PuvDbY0LsB7h+APgSZO1XZK2uVYqoq78\n+0oufWK513qL1UrbepK18ere1jJpsrZLTtAfhVKRdKTYVp2xZGs+X++te1hXXFHjNTSpK9cOLg3V\nt0u7Rh8LMKBrapOObwjNUHb1/eVWSoVH1vRF/Hje126dV257dT3Xz60bJe9n89dRU0+dcEV1Laca\nWQ3RP6PxyfaH5/d2e9YVbpqs7ZITNFkrFQmrdh1nRe4xv9sDjdnzxrqDjJ71idf6308K/ByqMe2y\nA3nsuhEhPydosnYqr9bJB5QKl4KSSt7fdMTv9qYMovbeN97nvf3S/l4j7Xm64bzeXt3R/dk/Z4rX\nOmOgc2obr/U/OLeX17pQ0GRtd8XQzEiHoFSLU1BSyafbCvjNW5u4e8FGjhRVcKKsisNF7uNHz/k4\ntONBW40J2LntNxMHOkvWv54wMOhzz/zOUPs7w9NTRzc2xAbTTjF2KW30R6FUqN0w9yu3aa925pc6\nJ/rwVVoNFWMgUA1HnIhzCNUObf3//nfv4N7tPNGlMULn1CRmX3sWAzNTeXjRdjYeLGp80AFohlJK\nhcXi/x31mp/QtaffzvzSsH22McZZDTL5rG58tKWuk1v75ARKKy20SYjDYs/o9RXWVv7uMrdlz7FE\nbjivNwBvThuDxRq+6b20GkQp1ShVllqOFlfQb8YiVuS6T+NnjOEXr23wOuZocV2y/unLa8MWmzF1\ns8MkeTQeeOjqYeTcP4HUpARmXjWMc/p0ZESvDl7neOy6Ebxz+xgS44NLk20S4sL6DV2TtYvvn3OG\n2/I/V+3lvxvz+NunuRGKSKnotKuglEH3L2bM7GVYDTy7bJfb9jfWec8CDpDvkqxdE3djnde3k8/1\nVlM3pZfr2D9gS95dUm0TDozu3ZF3br/QuewqPSWRc/p4nz/NXmXSqZ33w8Vw0mTt4vHvj3Tr1vrw\nou38+s1N/O3TXfUcpVTrc9eCjW7Lnq0qdhwt8Xncmzm+k3iw7ps4kFsv6gvYqjfOd0nWC35+AT88\n31YlYTDU+ilZ+2ob3SU1ibV/vNxtnb+WIpOHd2f2tWdxz4QBjb+QRtA6a6VUk9UaW9VHSaWFxHgJ\naQfwi87sQu/OKfzlmrMAnE0A40TcEqpIXScXY+rqlJM8StaJ8b47snRt7/4g0TNXx8fZPi8uTpz1\n1M1Jk7WH5uuPpFTLselQEX1nfARAz/S2jB/ctdHnahMfR7X9wd8vLunPdI9B1hwzucTHCQc9uqEf\nsz/A7JLahlP2AZ5cS9b3TxnChf27BBVHrUfTv00PTIzoWPdaDaKUCqnDRRWNHlxpVK90tz4PPTt6\nz7hyhn3diDPSGZjZ3m3b6N4dARg/OBNHbYfrxNi3jutHcj2Dtr13x1iyOqcAkNnevR47NSmB9smJ\nDbia0ApYshaRJOB5YALQCdgDzDDGfBzm2JRSUUoCzDfoqwDatX0SZVUWTlfXcn12LxIThAevGk7/\nP3zk3OeZG0bz109y6z1RdlYnPr5nHIO7tafWahjSPY23cw4xunc6SQnxbH9oEm3bxNOz4wAqamr5\nyYV9eHTxDtKSA1ckjOyVzrLfXMrGQ0Wc06djwP2bUzDVIAnAIWzzMB4EJgNvichZxpj9YYwtqlhq\nrSQE2YRHqZaooKSSy59cwZu3XRBw339/fdBr3bHSKgZmppJbUMaAzFRuHdfPa59uHZLd6iL9lYKH\ndE8DICFeuGJopltp3DEoW4e2iTz8XVs996yrhzH2zOCqP+LiJOoSNQQ3YW45MNNl1Ycisg84B9gf\nnrAix1+JocqiyVq1bit2FlJWZeGmeWsbPdi+I/km+BmtLjE+ztnp5MyuqVwzumfjgvXw4zFZITlP\nJDU4+4hIJjAQ2Br6cCJvpI/G8WBL1kq1Zo5WFU2ZFaWdvdNIvEvB55KBtjkU982eDNRNYDttXD8t\nILlo0E9CRBKBfwOvGGO8Rl4RkWkikiMiOYWFhd4niAEv3ngO79w+htsudv+KVmWp9XOEUi3Xe98c\n5vInl1NZU8sTS3c2+XyOjiTxLt9g5998LvtmT3Z+q73ubFvnNH8dXlqroJO1iMQBrwLVwJ2+9jHG\nzDXGZBtjsjMyMnztEvXaJ9t6LXl2Ma2ssWK1Gh5dvIMjHiOGKdWSlFTW8JOX1jLrw23c88Y37Cks\n5+Uv9nPoZMP/33u2aU5PsbWmcB1DQ0Tcqh/H9O/M/jlTyGriLC4tTVDtrMX2k5wHZAKTjTE1YY0q\nCrTxmOarylLL5sPFvLB8DxsOnOLN28ZEKDKlwmPDwVM8sWQna/bYptRyHe/Dsz1zsBLj49j0wASG\n/nkJAO2SbCnndLV+U22oYEvWLwBDgO8YY1pFsdKzZF1VY3WO0FVZU8s9b2xk65HiSISmVFhc+/wa\nZ6L2tGCtd+uOYAjuI9o5HjBW1miybqhg2ln3AW4DqoB8l68rtxlj/h3G2CLKc/CXKovV+eDjwMnT\nbMorZkteMcvuu7T5g1MqynVq1wYB/jhlCACv3XI+NVYr2+1jhlRosm6wYJruHaAV9sIe3tO9VUiV\npdY5AExcgA4BSsUSq9Xw2JKmPzx0FR8nrPvjBOfyRQNsbZz3FdrGt65qwozkrZW2i/GjR7p7N9fn\nP9/j/OrWjBMaKxUSpp5prrbnl/Diij0NPqdrR5SbxvRx2+bvV8Qx64rjQaMKniZrPzqluI9V++Xe\nE5wstz1XDdTVVqlIOlZSya2vrKOksq4dQN8ZH9F3xkc889kuFv/vKFnTF1FeZcFqNdz6Sk6DP+Nv\n149yNrGbMCSTh64eztd/uJxFd18E1LWV9jRpeDf+PnUUv7z0zIZfWCuno+750baNdzfX+97eBPge\n90CpSCmuqCEpIc758O6ZZbv5dPsxbn9tPX+9fpTb0J9PfpLL4G62wY++88xqvj2ie4MnAXDMnfjJ\ntgIAZ4k9My3ZOQGtv6pCEeHqUaHpldjaaMm6HnOuPYsP77qIB68a5rb+eFmV7Y0WsFUUGPngUiY/\nvcq57EiYX+w+wXmPfMarXx1w23+Hfe7DvcfLeXrZbr/n/dH5vWmbGM+7d4x1rnMd7N9RHWh1Kb10\naGur3tCEHHpasq7HVPsA4906JPPA+y2yd71qIfYWlnPo5GlueWWd16D5f3r3f40656Bu7dk+axKn\nqy3OdfNvPs/53lF6dv289smJbJ45kdQwzkXYWulPNAidm3muNaWCYYzhdZf2zytyC8ktKAvZ+S8b\nZJtAINll8H7X6kFH13HH+M8OaREc87kl02QdBH2gqKLRuMc+J+9UXR+1FB/PWRpj1tXD3Eapi4sT\npp7biykjurvtN7JXOi//9FzG9O8cks9V9dNkrVQUytl/ku+9+CXv3jGWUb3SvbYv2ZrvlqgBPtx8\ntEmf+Y+bst2a47mac90In+sva8L0Xaph9AFjE+wtLCfvVOPGTFCqPp/tOAbAF7uPu603xrBo81Fu\ne3W91zHL7Mc0lqOViIpOmqwb6N+3nu+2vCVPxwdRobNg7UGuena1s4WFZxO4DQeLuOP1DSH/3K0P\nfotenVIC76giRqtBGsgxapiDo6mSUk3hqPZwcDSRixPYfrSEK/++yt+hjbZj1iTW7DnOmH5dfPYr\nUNFFS9ZBOrNrKuA9HZE+fFSh4Nndu7zaMbSB8NOX14blM5MT4xk/OFMTdYzQknWQPrzrIqpqrBSU\nuvf2ch1EXanGKq5wHyK+tNJi/7eGU6db/PDxKghasg5ScmI8HVISvUrWu4+Frl2rar0cydnBMdj/\n08t2U63zfyo0WTeY56QED36wjXX7T0YoGhXrLLVWtuQVU13rnpA3HSpq1Pmy+3Ssd7s+Y4ldmqwb\nKCHeu456b6GWrpW7eav3BfX/Yv6a/Xzn2dXstY/zHKyXf3qu7/P97DzuvnwAi381zmdTvJd+ei4/\nvTCLz3XSjJijybqBUnyMebBu/ynWHzgVgWhUNKqy1DLrw21urTtcFZZWkTV9EeOfXE5uQWmjPqN7\net1IeuvvrxvkPzUpgXuvGMjgbmks/tXFXhPW9u6UwsyrhtFXJ6ONOZqsGygt2TtZ/2d9Hte9sCYC\n0ahoVFNrayN9srza5/bNebYqjr2F5by/6UijPsO1Oq5zapLf/X56YRYAbez7x+vMGTFLk3UDiQip\nSb4b0Xy4+QhWzyHPVKsT6IHg8p11s4ZXNnB6K8cMK4lxcdw/ZQj/+lndKHhDuqd57f+HyUPY/ciV\nzhK2JuvYFVSyFpE7RSRHRKpEZH6YY4p6y35zCat+d5nX+jtf38hbOYciEJGKFsdKKikocW/e6Rj/\nvNpiZeb7W73Gl/bn7N7eY4I4RsCLjxduHdePiwdmALYOLu/fOdZrfxEhIT5Ok3QLEGw76yPAw8C3\ngLYB9m3xuqYl+53PrrC0qpmjUdHivW8Oc88b37itG/7AEsqqLPx96igS4uKYv2Z/wPO8e8dYNhw4\nRVFFDRsOurcKaZdkS9YWj9Yjjlli/Jl2cT+eWJpLcqJ+mY5VQSVrY8xCABHJBs4Ia0Qxwl/PxTgt\nwbQ6xadrSEwQvt7n3YSzrMrWftozifuyf84UjhRV0CO9LaN6pbPAPlb1Q1cP4zsjemCxGsqqLLz+\n9QF6N3AcjzvHD+DO8QMadIyKLtqDMcT8zT2nWp7KmlqSE+MZ+dDSJp/rxRvPAaBHet0X1+uze5HS\nJp5vj+jhrMbIaJ/EH6cMbfLnqdgT0mQtItOAaQC9e/cO5amj0rgBXVi1y30IyyNFFc5fYtUy7S0s\n48UVe3grJ48eHZIDHxDAHyYPZtLwbl7r4+J0cllVJ6QVWMaYucaYbGNMdkZGRihPHZX+cs1ZXute\n/eoAN7+8LgLRqHDYePAUy3YUcML+kNAYw/gnV/BWTh4ARxo4M7iDY1S9139+PtMu7h+aYFWLptUg\nTeCrNyPAl3tPcLK82jlHnYpNxhiued7Wfj4tOYEOKYncfGHfRp9v88yJHCmqICM1idtfs41JrdVm\nKljBNt1LEJFkIB6IF5FkEWn1ib6+5lALN+Q1YySqqY4WVzDtXznOB4Jg65nqUFJp4dDJCh76cFvQ\n53zvjrEs/tU4wNYMLy05kcHd0uicmuScXEBTtQpWsAn3fuABl+UbgQeBmaEOKJYkxPn/W/fwou1c\neVZ3eqa35c11B9l/4jS/nzS4yZ/59Ge7WL3rOG/9YkyTz6XqPLU0l6XbCvhw0xH6ZaTys/nr3BJ3\nQ+1+5EoS7L0G98+Z4rXd0fBTWw+pYAVVsjbGzDTGiMdrZphji3qOttaOXmWe/rlqLwC/f2cLLyzf\n43MfY4zfNtu+PPVJLmt1lL9GO1ZSyfPLd2OMIb+40tnj1FEbUV5dyw/+78ugE/XK317GUz8Y6bU+\nIb7+X63HvjeCq0b2YOQZ3h1flPJFW8g3QfvkRNokxPGnKUN9lp7aBtEiZPo7W+g74yPKqyzaVb0Z\n3PvWJh5bvJO+Mz7igtmfMW/1PqCu7njr4frn1ByYmeq23KtTW649+wz2z5nCtWfbWm68d4d3T0JP\n/TNSefqG0bRJ0F9BFRz9n9IEbRLiyH34Sq47x3c/oeeX76HotPtgPjn7T/KGvbMDwJv27unDHljC\ngx9spbzKwoETDRsuUwVmjOHAiXJWe8wW/sHmI6w/cJJN9omPF248XO957rl8oNuya+eoJ78/kj1/\nmczIXlpaVqGnyTrMrvUYje97L37J9IVbfM5Q/cqXBxj2wBIueXw5HwQYjc1f1cn7m45wuKii8QHH\niA83HyFr+iKO2cfhqLLUUtfdOD8AAA0pSURBVGPvgv35zmN8vuMYv37zG7KmL2Lf8XL6zvjI58iI\nm/OKue6FL9l+tMRr28zv1HU+eeoHI/nj5CFMGdHduW5Mv85u+4uIjsGhwqbVt+gIN3+Dyi/afJTn\nfuj/uG8OFfGdkT1YkVvI2P6dvepALVZDYrxgtRoe+nAbP8juxcDMVO5esJEeHZJZM+PyUF5G1HFU\nXzy/fA8zrxrGiJlLqbJYWX//BK927tPf2QzA8TLfQ5b6MmVEd350QR8uGdQVS62VAZneA/kvmHZB\nE65AqYbRZN2MHOMYO1RZav3uW2s1vLXuEL97ZzM3jelDl9Qkbh6b5dxebbGSGB9HYVkV89fsZ+nW\nfJbeewnQ+I4asaTMPmfh/DX7OVJUQZV9WNJzHv7Ua19fY3YE8vDVw0mMj/M5SP+b0y7gaCv4Gavo\nosm6GV317Bduy6fK/c9a/cm2AucIbf/60jak5j9W7nVur7JYaZdUN1BQrTFU1vhP/rFkV0GpW0l2\nV0Ep/ztSzM78Mn4/aRClVRZ2uUxUvHRbQYM/47kfnu2zKsohrZ65Cs/3qP5Qqjloso6gC2Z/5neb\nr3rnUpfmZNUWK6t3HefFFbYmgQUlVT6T9bPLdnFe387ECYzu3dGtTvUnL61lcLf2zJg8pCmXERIz\nFm7mjI4pHC2u4LWvDvLPm7KZMDQTgCv+utK536ZDRXy590SDz39h/86s2WM7bspZ3ZkyojvDe15K\nzv5TrNlzgkeuGU5SQhx5pyrYnFesdc8q6miyjlHffe4L8j0GuS+ucC+p7z9ezhNLc53Ld48/k3sn\nDgJgZW4hK+yvGZOHYKm1hv0B2anyatJTEt1aUDhiXrDWfdKGW/+Vw9m90/n1Fe6tLxqTqO+bOJA7\nxw9g1a5Csjq3o5d9eNE+ndvRp3M7t9Y8vTqlOLcrFU00WYfQst9cwuc7C5nl0SX5t98axONLdob0\nszwTNcCUp1c73289Uuy2DLD+YF336ZteWuu27cI5yzhWWsWGP13hd0yT3cfKOFFWxfn9OnO62kJ5\nVS0Z7W3z/23JK+brfSe4dVw/dh8rY8JTK/jo7nEM7ZHGuv0n+XhLPi99sY/LBmWQ0iaBq0b1YOPB\nIuc3A182HCzix/PW+t1+ZtdUdturQ87L6sTa/Sf5+bi+dOvQllsu6sub6w5yblYn+mXY2kaPG9Dy\nBxdTLZc0pPdcQ2RnZ5ucnJywnDvaZU1f5Hy/b/ZkKmpqWZl7nF+8tt5r36tG9mj0pKmN9dot53Pj\nvK+dy8vvu5RLn1juXL7hvF4sWHuIV352HvsKyxjTvwvz1+x3DoY//+ZzeXJpLlsOF/O360dRdLqa\nmR/Y/kDl3D+BN9cdcv5xGtkrnU2H3B+sNlZmWhJXDu/urMvfMWsSt726nkHd2vP7SYNZtauQSwZm\n+J0YQqlYICLrjTHZXus1WYde1vRFdExJZOEvx7q1JnBN4hOGdOXGC/qwfGeh11RPT98wmrsXbGRg\nZiolFRbySyq5cng3Pv5ffnNdQtT587eHcvPYLESE5z7fzTeHivjHTV7/n5WKef6StVaDhMH+OVMw\nxniV8M7r24m1+05y+eCu/GrCQIb37MDnO455HX/VyB5cNbIHAKWVNdTUGtLbJvL0sl1k9+nEgx9s\ndWsN4erTey9mwlMrfW6LVteM7snvJg3iqaW5bM8v4Z3bL2T/8dOktInn6c92ceVZ3Rg/ONO5/x2X\nnRnBaJWKDC1ZN6OaWis1tVZS2tT9jVy96zg3zvuaxb8ax6S/rQJ8j9Lm6u2cQ/z2P5udy/+4KZuh\nPdLYklfMpOHdeP3rg9z/7hbevWMsbRPjuWvBRnbkl/LCj87m4UXbOVxUwbSL+zHXpSkg2JLmf126\nW398zziu/Psqv3GMOKMDewvLKauyMLp3OhtdJncdeUYHBnVrT/+MVMYNyGBojzQ25xWRmZbsHOs7\nM63ps6wo1dJoNUgMWG2fIuyiAV3q3c8YQ96pCq55/gvmXDvC2cTNH0utlX3Hy7164TmqZbbMnEhx\nRQ0909sy4akV7LH3utw/ZwovrtjDuVkd6ZHelh1HS5n2ag7zfnIuFw+se1hXU2tFgBvnfc1Xe0+y\nY9YkndZMqUbSZK28bDx4ii2Hi7lpTJbb+h35JdRaDcN6dIhMYEq1YlpnrbyM7t2R0b07eq0f3C0t\nAtEopeqjo+4ppVQM0GStlFIxQJO1UkrFgGBnN+8kIv8VkXIROSAi9YzErJRSKtSCfcD4HFANZAKj\ngEUisskYszVskSmllHIKWLIWkXbAdcCfjDFlxpjVwPvAj8MdnFJKKZtgqkEGAhZjTK7Luk3AMM8d\nRWSaiOSISE5hYWGoYlRKqVYvmGSdCnjOJloMeE1KZ4yZa4zJNsZkZ2TocJRKKRUqwdRZlwGevSTS\ngNL6Dlq/fv1xETnQyLi6AMcbeWy00WuJPi3lOkCvJVo15Vr6+FoZTLLOBRJEZIAxZpd93Uig3oeL\nxphGF61FJMdXd8tYpNcSfVrKdYBeS7QKx7UErAYxxpQDC4GHRKSdiIwFrgZeDWUgSiml/Au2U8wv\ngbbAMWABcLs221NKqeYTVDtrY8xJ4LthjsXV3Gb8rHDTa4k+LeU6QK8lWoX8WsI2RKpSSqnQ0bFB\nlFIqBmiyVkqpGKDJWimlYkBUJetYGd1PRJJEZJ49xlIR+UZErnTZfrmI7BCR0yLyuYj08Tj2JREp\nEZF8Ebk3MlfhTUQGiEiliLzmsu6H9ussF5F3RaSTy7aovF8iMlVEttvj2iMi4+zrY+q+iEiWiHwk\nIqfsMT0rIgn2baNEZL39WtaLyCiX40REHhWRE/bXoyIizRj3nfZhJ6pEZL7Htkbfg/qObe5rEZEL\nROQTETkpIoUi8raIdHfZXu89qO/++WWMiZoXtmaBb2Lr4n4Rtm7twyIdl4842wEzgSxsf/C+ja1H\nZxa2nkvFwPeBZOBx4CuXY2cDq4COwBAgH5gU6Wuyx7bUHttr9uVh9uu62H5PXgfeiOb7BVwBHAAu\nsN+bnvZXzN0X4CNgvj3ebsAW4G6gjf0afw0k2dcdANrYj7sN2AmcYb/2bcAvmjHua7G1HnsBmO+y\nvtH3INCxEbiWK+2xpAEpwEvAYpftfu9BoPvnN5ZI/mf0+KG0wzYM60CXda8CcyIdW5Dxb8Y2OuE0\nYI3HdVUAg+3LR4CJLttnuSbACMY/FXgL2x8hR7L+C/C6yz797feofbTeL2ANcIuP9TF3X4DtwGSX\n5ceB/wMmAoext+aybzvoktjWANNctt3SHInNR/wPeyS4Rt+DQMc297X42H42UOrx/9DnPQh0//y9\noqkaJOjR/aKNiGRii38rtng3ObYZWw/QPcAwEekIdHfdThRco4ikAQ8Bnl/9Pa9lD/YETRTeLxGJ\nB7KBDBHZLSJ59qqDtsTgfQH+BkwVkRQR6YmtNLcYW1ybjf233G4zdfG6XSvRcS3QtHvg99gwxxys\ni3EfgqO+exDo/vkUTck66NH9oomIJAL/Bl4xxuzAdh3FHrs5riPVZdlzWyTNAuYZY/I81ge6lmi7\nX5lAIvA9YBy2iTJGA/cTm/dlJbZf4BIgD8gB3qX+a8HH9mIgtTnrrf1oyj0IdM0RIyIjgD8Dv3VZ\nXd89aNS1RFOybtTofpEkInHYvvpXA3faV9d3HWUuy57bIsL+YGMC8FcfmwNdS7Tdrwr7v88YY44a\nY44DTwGTib37EoetFL0Q21f+Ltjqch8l8M/ec3saUOZRkouEptyDaPz/hoicCXwM3GOMWeWyqb57\n0KhriaZk7Rzdz2VdwNH9IsX+F3IettLcdcaYGvumrdjiduzXDltd71ZjzCngqOt2In+Nl2J7MHpQ\nRPKB+4DrRGQD3tfSD9sDkVyi8H7Zf755gGtScryPtfvSCegNPGuMqTLGnABexvaHZyswwqOkPIK6\neN2ulchfi0NT7oHfY8Mcs1/21iifArOMMZ4D29V3DwLdP9+a+6FDgEr8N7C1MGgHjCUKWhfUE+uL\nwFdAqsf6DHvc12F7av0o7k+85wArsJWSBmP7DxqxVgfYnmR3c3k9AfzHfh2Or+Dj7PfkNdxbg0Td\n/cJW974O6Gr/Ga/CVs0TU/fFHtNeYDq2MXzSgf9ia5HjaE1wD7Y/nnfi3hrkF9geTvYEetiTQHO2\nBkmw/4xnY/vmmWxf1+h7EOjYCFxLT2x15vf5Oc7vPQh0//zGEsn/jD4usBO2OrlybE9HfxjpmPzE\n2Qdbia0S21cax+tH9u0TgB3YvpYvB7Jcjk3C1synBCgA7o309Xhc20zsrUHsyz+034ty4D2gUzTf\nL2x11s8DRdiafj0NJMfifcFW574cOIVtIPu3gEz7ttHAevu1bABGuxwnwGPASfvrMVxaHjTT/yHj\n8ZrZ1HtQ37HNfS3AA/b3rr//ZcHeg/run7+XDuSklFIxIJrqrJVSSvmhyVoppWKAJmullIoBmqyV\nUioGaLJWSqkYoMlaKaVigCZrpZSKAZqslVIqBvw/cVDrusCFC5UAAAAASUVORK5CYII=\n",
            "text/plain": [
              "<Figure size 432x288 with 1 Axes>"
            ]
          },
          "metadata": {
            "tags": []
          }
        }
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "lcIzLZj5Seh9",
        "colab_type": "text"
      },
      "source": [
        "#### Encoder GAN Model"
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "colab_type": "code",
        "id": "JPQP0vvCja5o",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 35
        },
        "outputId": "9b1f9836-c39e-452a-b862-a7c4d01521ba"
      },
      "source": [
        "x = tf.random.normal((batch_size,n),dtype=tf.dtypes.float32) \n",
        "x = x/tf.sqrt(2*tf.reduce_mean(tf.square(x)))\n",
        "epsilon = tf.random.uniform([x.shape[0], 1, 1, 1], 0.0, 1.0)\n",
        "print(epsilon.shape)"
      ],
      "execution_count": 21,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "(100, 1, 1, 1)\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "3m0fQ6OXgPf1",
        "colab_type": "text"
      },
      "source": [
        "# GAN Training"
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "_4FU2Q7yh0OU",
        "colab_type": "code",
        "colab": {}
      },
      "source": [
        "def test_encoding(M=16, n=n):\n",
        "    inp = np.arange(0,M)\n",
        "    coding = gan_encoder.predict(inp)\n",
        "    fig = plt.figure(figsize=(4,4))\n",
        "    plt.plot(coding[:,0], coding[:, 1], \"b.\")\n",
        "    plt.xlabel(\"$x_1$\", fontsize=18)\n",
        "    plt.ylabel(\"$x_2$\", fontsize=18, rotation=0)\n",
        "    plt.grid(True)\n",
        "    plt.gca().set_ylim(-2, 2)\n",
        "    plt.gca().set_xlim(-2, 2)\n",
        "    plt.show()\n",
        "\n",
        "\n",
        "\n",
        "def Test_AE(data):\n",
        "    '''Calculate Bit Error for varying SNRs'''\n",
        "    snr_range = np.linspace(0, 15, 31)\n",
        "    bber_vec = [None] * len(snr_range)\n",
        "        \n",
        "    for db in range(len(snr_range)):           \n",
        "        noise_std = EbNo_to_noise(snr_range[db])\n",
        "        code_word = gan_encoder(data)\n",
        "        rcvd_word = real_channel(code_word,noise_std)\n",
        "        dcoded_msg = gan_decoder(rcvd_word)\n",
        "        bber_vec[db] = B_Ber_m(data, dcoded_msg)\n",
        "        if (db % 6 == 0) & (db > 0):\n",
        "            print(f'Progress: {db} of {30} parts')\n",
        "\n",
        "    return (snr_range, bber_vec)"
      ],
      "execution_count": 0,
      "outputs": []
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "6GW2opX7SwMo",
        "colab_type": "text"
      },
      "source": [
        "\n",
        "# AE training"
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "sNHtzAC4SPBq",
        "colab_type": "code",
        "colab": {}
      },
      "source": [
        "def get_gan_encoder(M):\n",
        "  model = keras.models.Sequential([\n",
        "            keras.layers.Embedding(M, M, embeddings_initializer='glorot_normal'),\n",
        "            keras.layers.Dense(M, activation=\"elu\"),\n",
        "            keras.layers.Dense(n, activation=None),\n",
        "            e2,\n",
        "            EncOut,\n",
        "            GenIn])\n",
        "  return model\n"
      ],
      "execution_count": 0,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "C5KjEhDvSWQR",
        "colab_type": "code",
        "colab": {}
      },
      "source": [
        "def get_gan_decoder(M):\n",
        "   model= keras.models.Sequential([\n",
        "                #DecIn,\n",
        "                #d1,\n",
        "                keras.layers.Dense(M, activation=\"elu\"),\n",
        "                keras.layers.Dense(M, activation=\"softmax\")\n",
        "                ])\n",
        "   return model"
      ],
      "execution_count": 0,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "PB4rw6Qhtdbe",
        "colab_type": "code",
        "colab": {}
      },
      "source": [
        "w_generator.trainable =False"
      ],
      "execution_count": 0,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "FiuN3SZYpeTU",
        "colab_type": "code",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 624
        },
        "outputId": "65be79fd-f2da-4874-ef36-67626763b8b8"
      },
      "source": [
        "#%%time\n",
        "gan_decoder = get_gan_decoder(M)\n",
        "gan_encoder = get_gan_encoder(M)\n",
        "\n",
        "gan_AE = tf.keras.models.Sequential([gan_encoder,w_generator,gan_decoder])\n",
        "data, test_data = random_sample(1000000), random_sample(1000)\n",
        "start = time.time()\n",
        "gan_AE.compile(optimizer=tf.keras.optimizers.Adam(lr=0.05),loss='sparse_categorical_crossentropy',metrics=['accuracy'])\n",
        "history = gan_AE.fit(data, data, batch_size=100,steps_per_epoch=20, epochs=10)\n",
        "#time_to_train_gan += time.time()-start\n",
        "#tf.print ('Time for the training is {} sec,'.format( time.time()-start))\n",
        "gan_AE.summary()  "
      ],
      "execution_count": 26,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "Epoch 1/10\n",
            "20/20 [==============================] - 32s 2s/step - loss: 2.6346 - accuracy: 0.1596\n",
            "Epoch 2/10\n",
            "20/20 [==============================] - 31s 2s/step - loss: 1.8424 - accuracy: 0.4593\n",
            "Epoch 3/10\n",
            "20/20 [==============================] - 31s 2s/step - loss: 1.2503 - accuracy: 0.7654\n",
            "Epoch 4/10\n",
            "20/20 [==============================] - 31s 2s/step - loss: 0.8018 - accuracy: 0.9062\n",
            "Epoch 5/10\n",
            "20/20 [==============================] - 31s 2s/step - loss: 0.4901 - accuracy: 0.9562\n",
            "Epoch 6/10\n",
            "20/20 [==============================] - 31s 2s/step - loss: 0.3086 - accuracy: 1.0000\n",
            "Epoch 7/10\n",
            "20/20 [==============================] - 31s 2s/step - loss: 0.1879 - accuracy: 1.0000\n",
            "Epoch 8/10\n",
            "20/20 [==============================] - 31s 2s/step - loss: 0.1189 - accuracy: 1.0000\n",
            "Epoch 9/10\n",
            "20/20 [==============================] - 31s 2s/step - loss: 0.0804 - accuracy: 1.0000\n",
            "Epoch 10/10\n",
            "20/20 [==============================] - 31s 2s/step - loss: 0.0579 - accuracy: 1.0000\n",
            "Model: \"sequential_4\"\n",
            "_________________________________________________________________\n",
            "Layer (type)                 Output Shape              Param #   \n",
            "=================================================================\n",
            "sequential_3 (Sequential)    (None, None)              562       \n",
            "_________________________________________________________________\n",
            "sequential (Sequential)      (None, 2)                 1218      \n",
            "_________________________________________________________________\n",
            "sequential_2 (Sequential)    (None, 16)                320       \n",
            "=================================================================\n",
            "Total params: 2,100\n",
            "Trainable params: 882\n",
            "Non-trainable params: 1,218\n",
            "_________________________________________________________________\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "ngrucnfWBOHl",
        "colab_type": "text"
      },
      "source": [
        "### Training MI"
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "D5B2TUanPC5d",
        "colab_type": "code",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 295
        },
        "outputId": "107ef810-a7b7-47a9-bda7-2b271789009b"
      },
      "source": [
        "gan_encoder.trainable = False\n",
        "gan_decoder.trainable = False\n",
        "\n",
        "test_encoding(M,n)   \n"
      ],
      "execution_count": 33,
      "outputs": [
        {
          "output_type": "display_data",
          "data": {
            "image/png": "iVBORw0KGgoAAAANSUhEUgAAASMAAAEWCAYAAAAtl/EzAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0\ndHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAVJUlEQVR4nO3df6zddX3H8eeLW2wrpTJZ18VhYU2k\n2uKKymbulNjZRYIZgUQ3UTA0org5ZEFxQ9eGQokNGKtuOLZquwLpFBKqTEXNwrhR5CYLOsq8amtU\nWhnCBgL9ke7213t/fM+R4+Hce8+593y/n8/53tcjOen58bnnvj+5Oa9+v9/z/X7eigjMzFI7IXUB\nZmbgMDKzTDiMzCwLDiMzy4LDyMyy4DAysyw4jMwsC8nDSNJcSVsk7ZG0X9LDks6fZPzVkp6QtE/S\nVklzq6zXzMqRPIyAOcDPgTcBLwHWAndJOqN9oKTzgGuB1cDpwFLg+qoKNbPyKMczsCU9AlwfEXe3\nPf8vwKMR8bHG49XA9oj47QRlmlkfzUldQDtJi4EzgbEOL68A7ml5vBNYLOnUiHi67X2uAK4AmDdv\n3uuWLFlSUsXpHT9+nBNOyGEjtxx1nl+d5wawe/fupyJiUTdjswojSScC24HbIuJHHYYsAJ5redy8\nfzLwa2EUEZuBzQDLli2LXbt29b/gTIyMjLBq1arUZZSmzvOr89wAJO3pdmw2kSzpBOAO4DBw5QTD\nDgALWx437+8vsTQzq0AWYSRJwBZgMfC2iDgywdAxYGXL45XAk+27aGY2eLIII+BW4FXABRFxaJJx\ntwOXS1ou6RSKb962VVCfmZUseRhJOh14P3A28ISkA43bJZKWNO4vAYiIbwA3A/cDe4E9wHWpajez\n/kl+ADsi9gCaZMiCtvGbgE2lFmVmlUu+ZWRmBg4jM8uEw8jMsuAwMrMsOIzMLAsOIzPLgsPIzLLg\nMDKzLDiMzCwLDiMzy4LDyMyy4DAysyw4jMwsCw4jM8uCw8jMsuAwMrMsOIzMLAvJw0jSlZIekjQu\nadsk49ZIOtayLO0BSauqq9TMypR82VngceBG4Dxg/hRjRyPijeWXZGZVSx5GEbEDQNI5wGmJyzGz\nRJLvpvXoNZKekrRb0jpJycPUzPpjkD7M3wLOomhPtAK4EzgKbOw0WNIVwBUAixYtYmRkpJoqEzhw\n4IDnN6DqPLdeKSJS1wCApBuB0yJiTZfjLwY+EhGvm2rssmXLYteuXTOsMF9179de5/nVeW4Akr4b\nEed0M3bQdtNaBZP3WzOzAZI8jCTNkTQPGAKGJM3rdCxI0vmSFjfuvxJYB9xTbbVmVpbkYQSsBQ4B\n1wKXNu6vbW9tDawGHpF0ELgX2AF8PEXBZtZ/yQ9gR8R6YP0ELy9oGXcNcE0FJZlZAjlsGZmZOYzM\nLA8OIzPLgsPIzLLgMDKzLDiMzCwLDiMzy4LDyMyy4DAysyw4jMwsCw4jM8uCw8jMsuAwMrMsOIzM\nLAsOIzPLgsPIzLLgMDKzLDiMzCwLycNI0pWSHpI0LmnbFGOvlvSEpH2StkqaW1GZZlay5GEEPA7c\nCGydbJCk8ygW7V8NnA4sBa4vvTozq0TyMIqIHRHxZeDpKYZeBmyJiLGIeAbYAKwpuz4zq0by7iA9\nWMGv90nbCSyWdGpEvCDI3N66Puo8vzrPrVeDFEYLgOdaHjfvn0yHraqI2AxshqK9dZ1bCNe9RXKd\n51fnufUq+W5aDw4AC1seN+/vT1CLmfXZIIXRGLCy5fFK4MlOu2hmNniSh5GkOZLmAUPAkKR5kjrt\nPt4OXC5puaRTKNpib6uwVDMrUfIwogiVQxRf21/auL9W0hJJByQtAYiIbwA3A/cDe4E9wHVpSjaz\nfkt+ADsi1gPrJ3h5QdvYTcCmkksyswRy2DIyM3MYmVkeHEZmlgWHkZllwWFkZllwGJlZFhxGZpYF\nh5GZZcFhZGZZcBiZWRYcRmaWBYeRmWXBYWTWR6OjsHFj8a/1JvlV+2Z1MToKq1fD4cPwohfBfffB\n8HDqqgaHt4zM+mRkpAiiY8eKf+uyzn5VW3veMjLrk1Wrii2i5pZRHdbZr3Jrz2Fk1ifDw8WHdWSk\nCKI67KJ12tora15Z7KZJeqmkL0k6KGmPpHdNMG69pCON5Wibt6VV12s2keFh+OhH6xFE8PzW3tBQ\n+Vt7uWwZfRY4DCwGzga+JmlnRIx1GHtnRFxaaXVms1SVW3vJw0jSScDbgLMi4gDwgKR/Bd5NsUi/\nmSU0PFzNll7yMALOBI5GxO6W53YCb5pg/AWSfgn8ArglIm7tNMjtreujzvOr89x6lUMYLQD2tT33\nHEXb6nZ3UbSsfhJ4PXC3pGcj4gvtA93euj7qPL9+zG10tB4HzXMIo/a21TQev6BtdUT8oOXhg5I+\nA7wdeEEYmc0GdTrRModv03YDcyS9ouW5lRTtrKcSgEqpymwA1OlEy+RhFBEHgR3ADZJOkvQG4ELg\njvaxki6U9Bsq/AFwFXBPtRWb5aPKr97LlsNuGsAHgK3A/wBPA38REWOSzgW+HhHNzrIXN8bNBR4D\nboqI21IUbJaDOp1omUUYRcQvgYs6PP9tWlpcR8Q7q6zLbBBU9dV72ZLvppmZgcPIzDLhMDKzLGRx\nzMisXeuJfDY7OIwsO+0n8n3iEwsdSrOAd9MsO+0n8j388CmpS7IKdBVGkuZLekzSXklz2177vKRj\nki4up0SbbdpP5Dv77GdTl2QV6Go3LSIOSboO+DzFCYqfApC0Ebgc+MuI+GJpVVqWyrpAs/1EvvHx\n9uuorY56OWa0Dbga+KikzwHvpVhv6LqI+IcSarOMlX2BZuuJfIN8vZV1r+tjRhFxjCJ8FlFcD7YJ\n+PuIuKGk2ixjdbpA0/LQ0wHsiPgq8J/Am4E7gb9qfV3SXEmfk/RTSfsl7Zb0wf6Va7mo0wWaloee\nvtqX9A6K5T0A9kdEdHi/J4C3AD8Ffg/4pqQnI+KumRZr5en1+E+dLtC0PHQdRpLeAtwOfAk4ArxH\n0qci4ofNMY3lQNa1/NjDjfWs30ixSqNlaLrHf+pygabloduv9l9PsebQd4BLgLXAcWDjFD93InAu\n8MjMyrQy+fiP5WDKMJK0HLiXYkXGiyJiPCJ+AmwBLmwshjaRWyiWj729H8VaOXz8x3IwaRhJWgJ8\nE3gGOD8iWk/42AAcAm6e4Gc3AcONnzvcn3KtDM3jPxs2DPYayjbYJj1mFBF7gZdP8NrjwIs7vSbp\n08Bq4M0R8dRMi7Ty+fiPpdb3a9Mk/R3wxxRB9L9d/ky37a0l6SZJTzduN0ma9Qvyj40tZOPG4kC0\n2aDq61X7kk4HPgiMAz9ryYlvR8T5k/xot+2tr6BYnnYlRWeQfwN+Bvxj3yYxYEZH4cMfXsnRo4Pf\nqqbf6tJPbLboaxhFxB56bB3UY3vry4BPRsRjjZ/9JPA+ZnEYjYzAkSMncPz489+E+YNXr35is0UO\n6xn10t56ReO11nErOr3pbGlvvXDhQubMWcnRo2LOnGDhwp2MjNTrwtLptIDevn0J4+O/y/HjYnz8\nOFu3Psr4+N5yCpwBt7d+Xg5h1Et76wWN11rHLZCk9rPBZ0t762Ja32Pfvtc2dkdem7agEkynBfTc\nubB9e3PL6ATe856lDA8vLafAGahz6+5e5RBGXbe37jB2IXCgw2Ups8qKFft8blAbX64yeHIIo1+1\nt46IHzeem6i99Vjjtf+YYpyZT1cYMMmXne2lvTXFmdwfkvQ7kl4GfJhinSUzG3DJw6jhA8B8ivbW\nX6ClvbWkAy3j/gn4CvBfwPeBrzWeM7MBl8NuWi/trQP468bNrBQ+PymNLMLILBf9OD/JYTY9DiOz\nFp2WU+klUHyy5fTlcszILAszXU7Fa0NNn7eMzFrM9PykZpg1t4x8/lf3HEZmbWZyfpJPtpw+h5FZ\nn/lky+nxMSMzy4LDyMyy4DAyy9joKLNmFU8fMzLL1Gw7Z8lbRmaZmm3nLDmMzDI12/rZeTfNLFOz\n7Zwlh5El4YtJuzObzllyGFnlZtuBWeuOjxlZ5WbbgVnrjsPIKjfbDsxad5KHUbetrRtj10s6IulA\nyy2//jM2qeaB2Q0bvItmz8vhmFG3ra2b7oyISyurzkoxmw7MWneSbhm1tLZeFxEHIuIBoNna2sxm\nkdRbRr20tm66QNIvgV8At0TErZ0G1aW99djYQh5++BTOPvtZVqzo3La6yhbJ3dTTb3VuAV3nufUs\nIpLdgHOBJ9qeex8wMsH45cDLgCHgDykC6Z1T/Z4zzzwzBtGDD0bMnx8xNFT8++CDncfdf//9WdXT\nb1XNL4U6zy0iAngousyDUnfTJI1IigluD9Bba2si4gcR8XhEHIuIB4HPAG8vcw4p5fYVeG71WL2U\nupsWEasme71xzKjb1tYdfwWg6VeYt9zWU86tHquXpMeMIuKgpGZr6/dSfJt2IcUu2AtIuhD4FvAs\n8PvAVcDHKiq3crldm5RbPVYvqQ9gQ9HaeitFa+unabS2BpB0LvD1iGh2lb24MXYu8BhwU0TcVn3J\n1cntK/Dc6rH6SB5GMUFr68Zr7e2t31lVXWZWreRnYJuZgcPIzDLhMDKzLDiMzCwLDiMzy4LDyMyy\n4DAysyw4jMwsCw4jM8uCw8jMsuAwMrMsOIzMLAsOIzPLgsPIzLLgMDKzLDiMzCwLDiObkdFR2Lix\n+NdsJlI3cbxS0kOSxiVt62L81ZKekLRP0lZJcyso0yYwOgqrV8O6dcW/DiSbidRbRo8DN1Ksaz0p\nSecB1wKrgdOBpcD1pVZnk3LrIuunpGEUETsi4ssUC/FP5TJgS0SMRcQzwAZgTZn12eSarYuGhty6\nyGYu+YL8PVgB3NPyeCewWNKpEfGCMKtLe+tupGyR/IlPPN/uenx8XylbR3VuAV3nufVqkMJoAfBc\ny+Pm/ZPpsGUVEZuBzQDLli2LVTX+b3tkZIRU86vi16acX9nqPLdelbab1kVr6161t8Ju3u/YCtvM\nBktpW0ZTtbaehjGK1td3NR6vBJ7stItmZoMn9Vf7cyTNA4aAIUnzJE0UkLcDl0taLukUYC2wraJS\nzaxkqb/aXwscovjK/tLG/bUAkpZIOiBpCUBEfAO4Gbgf2AvsAa5LUbSZ9V/SA9gRsR5YP8Fre2lp\nbd14bhOwqfTCzKxyqbeMzMwAh5GZZcJhZGZZcBiZWRYcRmaWBYeRmWXBYWRmWXAYmVkWHEZmlgWH\nkXXFa11b2QZpPSNLpLnW9eHDxYqO990Hw8Opq7K68ZaRTclrXVsVHEY2Ja91bVXwbppNaXi42DUb\nGSmCyLtoVgaHkXVleNghZOXybpqZZcFhZGZZSL0GdtftrSWtkXSssRRt87aqmkrNrGypjxk121uf\nB8zvYvxoRLyx3JLMLIXUa2DvAJB0DnBaylrMLK1BO2b0GklPSdotad0kbY3MbMAM0of5W8BZFC2K\nVgB3AkeBjZ0GS7oCuAJg0aJFte5nXvd+7XWeX53n1itFRDlvLI0Ab5rg5e+0HvuRdCNwWkSs6eH9\nLwY+EhGvm2rssmXLYteuXd2+9cCpe7/2Os+vznMDkPTdiDinm7GD1N76Bb8CUMm/w8wqkvqr/a7b\nW0s6X9Lixv1XAuuAe6qr1szKlPoAdtftrYHVwCOSDgL3AjuAj1dfspmVIfVX++vpsr11RFwDXFNJ\nYWZWudRbRmZmgMPIzDLhMDKzLDiMzCwLDiMzy4LDyMyy4DAysyw4jMwsCw4jM8uCw8jMsuAwMrMs\nOIzMLAsOIzPLgsPIzLLgMDKzLDiMzCwLDiMzy4LDyMyykCyMJM2VtEXSHkn7JT0s6fwpfuZqSU9I\n2idpq6S5VdVrZuVKuWU0B/g5RW+1l1AsxH+XpDM6DZZ0HsXC/auB04GlwPVVFGpm5UsWRhFxMCLW\nR8SjEXE8Ir4K/AyYqCnjZcCWiBiLiGeADcCaiso1s5Jl09660RPtTGBsgiEr+PU+aTuBxZJOjYin\nO7zfr9pbA+OSvt/PejPzm8BTqYsoUZ3nV+e5ASzrdmAWYSTpRGA7cFtE/GiCYQuA51oeN++fDLwg\njCJiM7C58f4PddtidxB5foOrznODYn7dji1tN03SiKSY4PZAy7gTgDuAw8CVk7zlAWBhy+Pm/f19\nL97MKlfallFErJpqjCQBW4DFwFsj4sgkw8eAlcBdjccrgSc77aKZ2eBJfZ7RrcCrgAsi4tAUY28H\nLpe0XNIpFN++bevy92yefokDwfMbXHWeG/QwP0VEmYVM/Iul04FHgXHgaMtL74+I7ZKWAD8Aljda\nXSPpQ8DfAPOBu4E/j4jxSgs3s1IkCyMzs1apd9PMzACHkZllYlaE0XSugxs0kq6U9JCkcUnbUtfT\nD5JeKulLkg42/nbvSl1Tv9Tx79U03c9bFic9VqD1Ori9wFsproN7dUQ8mrKwPnocuBE4j+IAfx18\nluL8s8XA2cDXJO2MiInO0h8kdfx7NU3r8zZrD2BLegS4PiLuTl1LP0m6ETgtItakrmUmJJ0EPAOc\nFRG7G8/dAfx3RFybtLg+qsvfayrdfN5mxW5auy6ug7P0zgSONoOoYSfFNYo2QLr9vM26MOryOjhL\nbwGwr+255yiuRbQB0cvnrRZhVMJ1cFnpdn41034tIo3HvhZxQPT6eavFAewSroPLSjfzq6HdwBxJ\nr4iIHzeeW4l3rQfCdD5vtdgy6lIv18ENHElzJM0DhoAhSfMkDex/NhFxENgB3CDpJElvAC6k+J92\n4NXt79VB75+3iKj9jWKZ2gD+j2Lzv3m7JHVtfZzj+sYcW2/rU9c1wzm9FPgycJDiK+J3pa7Jf6+u\n5jatz9us/WrfzPIym3bTzCxjDiMzy4LDyMyy4DAysyw4jMwsCw4jM8uCw8jMsuAwMrMsOIzMLAsO\nI0tO0nxJj0naK2lu22ufl3RM0sWp6rNqOIwsuSgupLwOeDnwgebzkjYClwMfjIgvJirPKuJr0ywL\nkoYoVnL8LWAp8F7gU8B1EXFDytqsGg4jy4akPwG+Avw78EfALRFxVdqqrCoOI8uKpO8BrwG+SLFk\nSLS9/mfAVRTdQp6KiDMqL9JK4WNGlg1J76BYzRFgf3sQNTwD3AL8bWWFWSW8ZWRZkPQWil20rwBH\ngD8FXh0RP5xg/EXAp71lVB/eMrLkJL2eYonZ7wCXAGuB48DGlHVZtRxGlpSk5cC9FAvwXxQR4xHx\nE4rF3C9srH1ts4DDyJKRtAT4JsVxoPMjorVP2gbgEHBzitqsenXqRmADJiL2Upzo2Om1x4EXV1uR\npeQwsoHSODnyxMZNjXY/ERHjaSuzmXIY2aB5N/DPLY8PAXuAM5JUY33jr/bNLAs+gG1mWXAYmVkW\nHEZmlgWHkZllwWFkZllwGJlZFhxGZpaF/wcta5aIUTS7gwAAAABJRU5ErkJggg==\n",
            "text/plain": [
              "<Figure size 288x288 with 1 Axes>"
            ]
          },
          "metadata": {
            "tags": []
          }
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "ukO76l6yIoPc",
        "colab_type": "code",
        "colab": {}
      },
      "source": [
        "# test msg sequence for normal encoding\n",
        "N_test = 500000\n",
        "test_msg = np.random.randint(M, size=N_test)"
      ],
      "execution_count": 0,
      "outputs": []
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "G5iCDE4dSL35",
        "colab_type": "text"
      },
      "source": [
        "#### decoder GAN Model"
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "7M-S0sbhIoPw",
        "colab_type": "code",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 106
        },
        "outputId": "8a37be52-3e86-4013-ab28-ea2c5898049b"
      },
      "source": [
        "gan_bber_data = Test_AE(test_msg)"
      ],
      "execution_count": 29,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "Progress: 6 of 30 parts\n",
            "Progress: 12 of 30 parts\n",
            "Progress: 18 of 30 parts\n",
            "Progress: 24 of 30 parts\n",
            "Progress: 30 of 30 parts\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "UYdEm0eQIoP2",
        "colab_type": "code",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 352
        },
        "outputId": "7a7e5676-f317-4305-9077-a2af16f27018"
      },
      "source": [
        "# Approximate 16 QAM Error\n",
        "def SIXT_QAM_sim(ebno):\n",
        "    return (3.0/2)*special.erfc(np.sqrt((4.0/10)*10.**(ebno/10)))\n",
        "\n",
        "ebnodbs = np.linspace(0,15,16)\n",
        "fig = plt.figure(figsize=(8, 5))\n",
        "plt.semilogy(gan_bber_data[0], gan_bber_data[1], '^-')\n",
        "plt.semilogy(ebnodbs, SIXT_QAM_sim(ebnodbs), '*-');\n",
        "plt.gca().set_ylim(1e-5, 1)\n",
        "plt.gca().set_xlim(0, 15)\n",
        "plt.ylabel(\"Batch Symbol Error Rate\", fontsize=14, rotation=90)\n",
        "plt.xlabel(\"SNR [dB]\", fontsize=18)\n",
        "plt.legend(['AE with WGAN', '16QAM'],\n",
        "           prop={'size': 14}, loc='upper right');\n",
        "plt.grid(True, which=\"both\")\n",
        "\n",
        "#print('time to train the AE Model with MI',time_to_train_mi)\n",
        "#print('time to train the AE Model with GAN',time_to_train_gan)"
      ],
      "execution_count": 30,
      "outputs": [
        {
          "output_type": "display_data",
          "data": {
            "image/png": "iVBORw0KGgoAAAANSUhEUgAAAgMAAAFPCAYAAADQqc3dAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0\ndHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nOzdd3gU5fbA8e9JLySBACmIFCkR6VK8\ngkgRxQZ6FXtDFMVefsrFCihXvaJe67WBiChXscu1i0RQLCCiUkSQItI7CZB+fn/MEtLZhN1sdud8\nnmee7M7MO3NOZiHvzrxFVBVjjDHGuFdYoAMwxhhjTGBZZcAYY4xxOasMGGOMMS5nlQFjjDHG5awy\nYIwxxricVQaMMcYYl7PKgDHGGONyIVEZEJF/icgcEZkqIpGBjscYY4wJJkFfGRCRzsBhqtoH+A0Y\nGuCQjDHGmKAS9JUBoBfwmef1J0DvAMZijDHGBJ06UxkQketFZL6I5IrIy2W2JYvIuyKyR0TWiMiF\nJTY3AHZ7Xu8CkmspZGOMMSYkRAQ6gBLWA+OBQUBsmW3PAHlAKtAF+FBEflbVxcBOINGzXxKwvXbC\nNcYYY0JDnbkzoKrvqOp7wLaS60UkHjgbuEdVs1X1a+AD4BLPLnOBgZ7Xg4BvailkY4wxJiTUpTsD\nlWkLFKjq7yXW/Qz0BVDVhSKySUTmAH8Cj1R0EBG5CrgKICw2sVtEUgqxERCGeBWEouwtOPA+LgJE\nvCyrviwrhHlXFFXYU3BgVsr4CMHL05YvGykV/qYqWlcE7MkvXdbbWqcC2SXK1ov0LmbV8uWq83vK\nKlE2wctzVlbW6/MCu/MOlE2Mql7Mu0qUTYqqOOYKr08FZasT887cA2XrR3tftkhLl20QXTrmqg5T\npLCjTFlvzlu2XHK0EOblh7FIYXtOibIxVZ+z5KYihW0lyjY8SNmy561JWQEKy5RtFFu9827dp8XH\nalhJ2YoOV1imbHXPu+WgZcsfrEi1gnLl96s4XmXrPkVLlA339h89zr+ffQVKXISQGOV9uXJlo6tX\ndneusvcQyu7etonCvbsqLCh1bQpjERkPNFXVYZ73fYA3VTWtxD4jgItUtV9NzhGd3kbTL3uchJgI\nGidEl95Yya9jS1YuWbkH/ionREfQqGzZSmwtU7ZedDiN6kVXerqSl2T7nlyycwuL38dHh5McH1Vu\nv4re79ybx568EmWjwqkfF0VF17zsml378tlbomxsZDiJsaXrjiUPU7J8Vk4+OflFxe+jI8JIiIms\n4CzlY87OLSC3oHTZ+Oiy5y1/nD25BeQVHlgfFS7ERkUU71u8pYKYc/ILKHFKwsMgOiK8yjj3yyso\npMRpCROICA8rdQL1vFAttZrCorr1b88YE9o2TLmZ3A3LK6wMBMOdgWwOtAnYLxHIqu6BRGQwMDgq\nrTUAOXkF3NghkvrRVX9V2JlTxO2zC0qty8kv4KaONSubm1/IzZ3Uy7KFpdbl5Rfyf52pWdmCQm7v\nCvWjwyspVXnZgsJC7ugW5eV5i0qtKyoq4u4eYV6WzStX9t6e4VWWdcrll1qnqoz7W0SNrk8Y8EBv\nb3PdV6oyEC7wcJ9or8uWqDMRGQYT+sZWWLZkBWhHrvKPCsr+q09shd8WSlY5duQod31dvuw/j4sl\nqcQ3HK2g7K5c5Z5vypcd1yuWpDLnLVt52pWrjP22fNkxx3pirqRiqSi7cpX7v8spVWGLDIO7j4kp\nl2+peHOUf/5Qvtwdx8QUf5urrJK3K1d5aF75sv/oEUPC/rIVF2VXrjJhfumyEWFwe/cDZSuzO095\npIKyt3Vzypb+3ZSWlac89mP5sjcfHV3uvGXzzspTnliQS4mbgUSEwY1do6lXsmwF1ykrT3n6pzJl\nBa6vomzJ8z6zsHzZ67qUKVtBuf9UUO7aMuUqur7ZecqzP5cve03nMmUrOfenq/P5ZYvzJSBcoFOj\ncE5q4d3wNp+uzufXrb4p27EaZT/zlK1KMFQGfgciRKSNqi73rOsMLK7ugVR1BjAjOr3NCABEmL8v\nhfGDOlRZ7u53fwVZS6mPh5WtM2WDLV5/lV2Qm8L4U2pW9pe8FMafWrOySwpSGH96zcr+VpjC+JMO\nXlYqKLtcUxl/ctWfi4rKrdRUr35PFZVdLamMP636ZUWEtWGpXv2eKir7V3gq4wfXrOzGiDSuH+JF\n2bC1lKzZigibI9O48YwalA0TtkalcfOZNSu7LTqNW6ooW1m57dFp3FrDc+6ISeP/DlJ28+4c/v3w\nrOKihQpLdyjPX9WblISYg5Z93Idlf9uhvOBl2SdKlK1MnWlAKCIRIhIDhAPhIhIjIhGqugd4B7hP\nROJFpDdwBjD1UM+ZX6gsWLPjoPst+HMn+WV+k1a27pQNtnitbO2UDbZ4raz3ZQMV75Mzl1NU5nZD\noSpPzlwRVGUrUmfaDIjIWGBMmdXjVHWsiCQDLwEn4vQ2GK2q02pwjsHA4PT09BHTplW7eNDKzs6m\nXr16gQ6jVrgpV7B8Q5mbcoXgyPfeb/byZ1b5v5nNEoT7esfV+bJVtRmoM5WB2pSRkaHLli0LdBi1\nJjMzk379+gU6jFrhplzB8g1lbsoV3JVvoHIVkR9VtXtF24KhzYAxxoSM3bt3s3nzZvLz86vcLykp\niaVLl9ZSVIHnpnz9kWtkZCQpKSkkJpZtb+8dV90ZsMcEoc9NuYLlG2zCw8NJTEzksMMOIyoqqsrx\nRgoLCwkPr7rnTyhxU76+zlVVycvLY926dezevZvCwop7DvTv37/SOwOuqgzsZ48JQpebcgXLN9is\nWLGCJk2aEBdX9XNegKysLBISEmohqrrBTfn6K9e9e/eyfv16WrduXeH2qh4T1JneBMYYE+ry8/OJ\njS079YoxvhEbG3vQx0+VcWVlIG7vOsjaFOgwjDEu5O1Q5MZU16F8tlz1mGB/m4Fu6WEj3h8zlOUZ\n1wQ6pFoR7M9Zq8NNuYLlG2ySkpIqvYVblpueoYO78vVnritWrGDXrl0VbrM2A2V0bxKu86/y/Ici\n4XDqBGjQHOq3gPqHQ4R3cw4Ei2B/zlodbsoVLN9gs3TpUtq1a+fVvm56hg7e5ZuZmUn//v3ZsmUL\njRo1qnS/Fi1acP3113Pbbbf5Okyf8Oe1reozZm0GKiJhEFXP+fnhrfDq2fB0NxifCo+2g5dOhneu\nhlkPwE+vwao5sPNPKKp6fGdjjAlVCxYsIDw8nN69e1e4XUQqXJ577jmfnL9Xr15s2LCBhg0bAvDy\nyy/75E7RsmXLEBG+/vrrUusHDhxIWFgYW7ZsKbW+adOm3HPPPcXvs7KyGDNmDB06dCAuLo7k5GS6\ndevGAw88wNatW8ud74MPPiA8PJyLLrqo3LbVq1cjIjRs2LDcN/x+/fpx/fXXH0qqlXLnOAP7n6t0\nOg9OfQSyNsDONbBjTemfq7+GX96g1PjkYRGQ1BTqN/fcTWgODVoceB/f+MDxS8raCG9dDkNfhoTU\nWkjSGGN8a+LEiVx77bW88sorlX4DffHFFzn99NNLrUtKSvLJ+aOiokhLSzv4jtWUkZFBeno6mZmZ\nHHfccQDk5eUxd+5cmjZtyldffcXQoUMBWL58OevWrWPAgAEA7Nixgz59+rBz507GjRtH9+7dqV+/\nPitWrOCVV17hpZdeYtSoUaXON2XKFEaNGsWTTz7Jjh07aNCgQbmY9u7dy0MPPcSDDz7o83wr4qrK\nwP42A4enN2Zd+gCiVi9m8ezZJfZId5YGf4MGQEuQonyic7cSu28TMTmbicnZREzOJmK3biDmr4VE\n5ZeuuRWGRZETk0JOTCo5Mansi3V+pmyaQ+Ot37J+2o213lYhOzubzMzMWj1noLgpV7B8g01SUhJZ\nWd5NuFpYWFjlvluycrn9vd945O/taFQvylchVmrfvn1MmzaNTz75hF27dvHss8/yz3/+s9x+0dHR\nxMfHl1pXUFBQYS6TJk3iP//5Dz/++COFhYV88MEHnHHGGYwdO5Zbb70VgCuvvJKYmBiefvpp5syZ\nw2mnncaqVatYsmQJl19+OXCg4dzo0aO58847UVV27drF8OHDeeutt0hISOCaa67hpptuqjS/4447\nji+++KJ4n2+++Ybk5GTOO+88Pv30UwYNGgTAxx9/TExMDB07diQrK4vbb7+d1atXs2DBAtLT04uP\n17NnT3r27Imqlsp93bp1zJkzhxdeeIFvv/2WSZMmcfXVVxdvz87OBmDkyJE88cQTDBs2jCZNmgDO\nZyIvL6/Kz0VOTk7N/o2oquuWtm3bqs/kZqtuWqL628eq3z2v+smdqv+9UPXZ3qoPHK46JrHiZWwD\n1W+eUl32ieq2P1QLC3wXUxmzZs3y27HrGjflqmr5BpslS5Z4ve/u3bur3H7XO79oi9H/07ve/fVQ\nw/LKK6+8op06dVJV5zo0btxY8/LySu0D6Jtvvun1MZcuXaqAbtiwQXfv3q133XWXNmrUSAcNGlS8\nT9OmTXXq1KnF5wV0y5Ytmpubq48//rjGxcXphg0bdMOGDZqVlaWqqs2bN9fk5GR96qmndPny5frk\nk08qoHPnzq00lhdffFFjYmI0JydHVVXHjh2rF110kX722Wfarl274v3OP/987d+/v6qqFhYWav36\n9fXqq6/2Ouf77rtPTz31VFVVnTx5snbp0qXU9lWrVimg8+bN0549e+rw4cOLt/Xt21evu+66Ko9f\n1WcMmK+V/F101Z0Bv4iKh5R2zlKRLcvgkzth9WwozHMaLMbWd9oefHbXgf3CoyD5CGjYGhq1hUZt\noGEbaNQaYsvfQjLGhIZxMxazZP3ucuuranGeV1DEwr92ogqvfb+Gxet2ERXhfROwo5okMmZw+2rF\nOWnSJC655BIA+vbtS1xcHO+//37x7fP9LrnkEoYNG1Zq3bfffkvHjh3LHfPII48kLS2NWbNmcfrp\np5OZmcltt93G/fffT0FBAatXr+avv/6qsNFoVFQUSUlJiEiFjw5OOumk4ufrN9xwA08++SQzZ87k\n2GOPrTC//v37k5OTw3fffUffvn2ZNWsWF198Mb169WLFihVs3LiRtLQ0MjMzue666wDYsmULO3fu\nJCMjo9SxevXqxS+//AJAnz59+PjjjwHny/fkyZMZN24cAEOHDuW6665j/vz5dO9evl3fww8/zAkn\nnMCtt95K+/bVu17VZZUBf2ucAfWbQVEBRMQ4FYKjzoTTH4O922Hrctj6O2xbDltXOK9//8TZf7+4\nRp4KQmtPBaGN875+cwg/yCXM2kiXn+6Ebu9aWwVjQsS6nfsONGVS533LRvFVljkUK1as4Ouvv2b/\nMO4iwkUXXcSkSZPKVQYmTJjAySefXGpds2bNKj123759yczMZMCAAcybN4+3336bZ599lnnz5rF4\n8WJatWpF06ZNqx1zp06dSr1v0qQJmzdvrnT/Vq1a0axZM2bNmsUxxxzDd999x8SJE4mPj6d79+5k\nZmbSuXNnNm7cSP/+/as89xtvvEFubi7jxo1j7dq1xetnzpzJjh07OOWUUwCoV68eZ555JpMmTaqw\nMtC3b18GDRrEHXfcwQcffFCd9KvNKgO1Yc9m6HY5dL8c5k+GbM+AR3HJ0OwYZympMN9pxLhtuVNZ\n2F9R+O0j2FuiZWpYJCS3LFFBaHPgdVyys89XD5O0awl89S+nAmKMqVMq+4ZeWfezzbtz6PPwrJJ1\nAXbvy+epC7uSkhDjlxgnTpxIYWFhqT/q6umWvnbtWg4//PDi9WlpaV6PpQBOC/nHHnuM77//ntat\nW5Oamkq/fv2YNWsWS5YsqXFX0sjIyFLvRYSioqIqy/Tv35/MzEz69u1L48aNi/PYX2HZsWMH9erV\no2fPngA0btyY+vXr89tvv5U6zv7fR1JSUqnKwMSJE9m5c2epOxmqSkJCAo8++miFw1Q/9NBDdOnS\nhTlz5lQj++qzykBtOP+1A6+9+YMcHuncBWjUGjJOKb1t3w6nYrC/orD1d9i2AlZ87tx1qIAAzJ/k\nLBHRcHfltWNjTN325MzlFJUZH6ZQlSdnrmD8mR18fr6CggKmTJnCgw8+WK6XwCWXXMLkyZO59957\na3z8fv36cc011zB9+vTiP/z9+vXjtdde47fffquyNX1UVFSlk/LURP/+/bn66qv56KOP6Nu3b6kY\nb7zxRrZv385xxx1XXNEICwvjvPPOY+rUqdx9992lKkVlbd++nffee48pU6aQkZFRqpHlCSecwFtv\nvcWll15arlzHjh259NJLGTVqFNHR/hsDx1WVgRKzFgZ1i2RHE4hoAml9IQ2kqJDo3M3E7V1H3N51\n1MtaSfKOn4jM30XJjo65EkP2k/3ZE9+C7Hotya7Xgn2xh6FhoTHyV7C3Nq8uyze4+KI3wbzV28gv\nLF0ZyC9U5q3a6vWxq+PDDz9k69atnH/++cX9+/f7+9//zksvvcTNN99c3KJ/48aNrFixotR+8fHx\nlY4HcNhhh5Gamsobb7zBSy+9RFZWFt27d2fEiBEUFBTQvXv34rz27t0LOJ+D6OhoUlJSyMnJ4f33\n36dz587ExsYSFxeHqpKbm1vq9+FNS/wePXqQm5vL888/zwMPPFC8b6dOnVi5ciUbNmzg9ttvL3WM\nO+64o/jRwp133snRRx9NfHw8v/32G19++SUpKSlkZWXx4osvkpCQwJAhQwBKtQcZPHgwzz//PH//\n+9+LexPs2bOn+DyjRo3i6KOPBqBt27bWm8BXi097E9RlH9ysOra+FoxtqDomSfXFgarvXK36n96q\n4xoe6Nlwf4rqc8ervnet6rfPqq6ao7p3R6Cjr5Fgb21eXZZvcPFlb4LaMnjwYD3xxBMr3PbHH38o\noJ9++qmqOr0JKlruuuuuKs9x3nnnqYjo5s2bi9c1b95cW7VqVWq/kr0J9hs5cqQ2bNhQAR0zZkxx\n2QkTJpQq601LfFXVI444QgFdvnx5qfXHHHNMcSv/snbt2qV33XWXtmvXTmNiYjQmJkY7duyod9xx\nh27atElVVTt27KgjRoxQ1fLXdubMmQrosmXLSvUmKGnUqFEK+K03gSuHI3bNFMavXwT1UplHR3rw\nq9NWYf8ji4I85xHDpkWw8VfPz0Wl2yQkHQ6pHSCtg+dnR2jQEsLq7sCVwT5cbXVZvsHFhiOunJvy\nrYvDEbvqMYHreP7w78nMhH6Xl94WEeX8kU/rAJ3Pd9apOhWGjYtg06+en4th+WegnudykfGQelSJ\nSkJH5310mQ+2jbhojDFBwyoD5gARSEhzljYDD6zPz4Etv5W+g7D4Hfhx8oF9GrSE1PbO3YPUDrDk\nffjzO+vFYIwxQcAqA+bgImOgSRdn2U8Vdv3lVA72VxA2LYLf/le67P5eDOGRMGpV+TsIxhhjAs5V\nbQZK9CYYsX/wDDeozTngY/ZtoM3vz9Fg5yLCtACF4t4MShh74puxOzGjeNkb18SZOdJHgn2+++qy\nfINLUlKS133w/TnnfV3kpnz9meuKFSvKzXa4X//+/SttM+CqysB+rmlA6FHrja5m3AILXnaGWC7M\ng84XQoe/w9p58Nc8+Gs+5Ho+rDH1oWl3aNrT87M7xNR8hrNgb2BWXZZvcLEGhJVzU77WgNC4Q0Uj\nLrYe6CwARUXOoElrf/BUDuZB5oOw/z5C4wxo2sNZDu8JjTLqdA8GY4wJdlYZML53sBEXw8KcP/iN\nM+BoZ+ITcnbDuh+duwZ//eC0PfhpqrMtOhEO63agcnBYtwPDLZdk8zAYY0yNWGXA1A0xidCqv7OA\n00Bx2x+eOweeOwhzHgH1jC3esPWBRwuH94TG7WweBmOMqSGrDJi6SeTA/AxdLnDW5WbD+p8OPFpY\n/hn8XLohqM3DYIwx1WeVARM8outByz7OAs7dgx2rYfkX8P2zsH0lxfO6Shg0ORq+fhyO6Adpnazd\ngTHGVML+dzTBS8SZwvmYEdCyL4hQKJE4jRDbQc4u+GIMvNAXJrSC6ZfBjy87FQhjTLXMnj2bIUOG\ncNhhhyEivPzyy+X2+f333znrrLOoX78+cXFxHH300SxdurTUPt9//z1DhgwhOTmZ6OhojjzySMaN\nG0dOTk6F533ssccIDw/nrrvuKrctMzMTESExMbF4EqP9li5dioggImzdurVcWVOaVQZMaPD0YFjQ\nbQJ0H+5UEq79Fv5vGZz1ojMV9NofYMZN8ERnZ5lxEyx+F/ZuD3T0xtR52dnZdOjQgSeeeILY2Nhy\n21etWkXv3r1p2bIlX375JYsWLWL8+PGlxoX44IMP6NOnDw0bNuSLL77g999/Z8yYMbzwwguceeaZ\n5OWVn4Z90qRJjB49mpdffrnS6Yrr16/Pm2++Wa5cs2bNDjFrF6lsBqNQXlwza6FHsM/0Vh1V5lpU\npLp5mep3z6tOu0D1gaaemRuTnFkbP7tXdcWXqnn7ai3eQ+Wma6sa/Pn6dNbC3RtUXzpZdffGQ4yq\n+uLj43Xy5Mml1l1wwQV64YUXVlpmz5492qhRIz3jjDPKbfvxxx9VRPThhx8utX7u3LmakpKieXl5\n2qpVK50xY0ap7ftnMbznnnv0+OOPL16fl5enKSkpeu+995ab5bAu8OeMlDWdtdBVbQZKjEAY1HOi\nV1ewzwFfHd7l2hbS2yKpV5CQtZwGO36mwY6fSZz7NGHfPE5hWBS7ktqxo0FndjToQna9lj4dJdGX\n3HRtIfjzTUpKqnIu+pIKCwur3Df6i/FErvmW/C/uJ3fgg74K0Ws5OTnF8RUVFTFjxgxuueUWBg4c\nyMKFC2nWrBk33ngjZ599NgAzZsxg69atXH/99eXyatOmDX379uXVV19l5MiRxeufffZZzjrrLHJy\ncjjnnHN47rnn6Nu3b/H2/Y8GzjzzTCZMmMDPP//MEUccwYwZM4iLi6Nnz56A87mJjo726++jOg52\nbQ9FTk5Ojf6NuKoyoKozgBkZGRkjgnkUs+oK9lHbqqP6uZ5w4GVuNqyZS/jKTJJXZpK88hXgFYhN\nhpbHOw0Rj+jnPIIoKYAzNLrp2kLw57t06dLyI899PNqZBKyMgsICIsIr+C/6z2+cxrMeUT9PJern\nqU4bmma9vQskrSOc8lB1Qi8nJiamOJeNGzeSnZ3No48+yv33388jjzzCl19+yZVXXknjxo057bTT\nWLt2LQDdunWrcPS9I488kilTphRvy87O5t1332XWrFkkJCRw5ZVX0q5dO/bs2UNaWhoAcXFxADRr\n1owhQ4Ywffp0/vnPfzJt2jSuuOIK4uPjAahXr16dGt3QnyMQxsTE0LVr12qXc1VlwJgqRdeDtic5\nC0DWJlj1FazMhD9mwZL3nPUNWhyoGLTsC189bDM0mtrTpAfsWAX7tjnjbkgYxDV0Zg4NkKIiZ/yP\nM844g1tvvRWALl26MH/+fJ5++mlOO+00r44TFRVV/Pr111+nadOmdO/ujJ7bqlUrevTowZQpU/jH\nP/5RruwVV1zBFVdcwciRI/n888957rnnWLFixaGm5hpWGTCmMgmp0OlcZ1GFbSsOVAwWveP0TCjJ\nxjcwNVHJN/R9VX173D//R0SMM/9HuyEBrYg2atSIiIgIjjrqqFLr27Vrx+uvvw5A27ZtAViyZAm9\ne5e/g7Fs2bLifQAmTpzIsmXLiIg48GeqqKiILVu2VFgZGDhwIGFhYVx66aUMGDCApk2bWmWgGurm\ng1Bj6hoRaNQGeo6AC6Y50zFf8AaktC/dniAyDjpdAGvmQlHFLZ+NOWT75/+48gvnZ/amgIYTFRVF\njx49KDsB3O+//07z5s0BGDRoEI0aNWLChAnlyi9YsIDMzEyGDRsGwOLFi/n+++/57LPPWLhwYfHy\n/fffs3r1ambPnl3uGGFhYQwbNozMzEyuuOIK3ycZ4uzOgDE1ER4BGSfD75/ClqUQHu18Q4trCD//\n1/nWFp8C7U53vrW1OA7CIwMdtQkVB5v/ww+ys7OLv2kXFRXx559/snDhQpKTk2nWrBmjRo3i3HPP\npU+fPgwYMIBZs2bx+uuv8957zuO1uLg4Jk2axNChQxk+fDg33HADDRs2ZO7cudx2220MHDiQq6++\nGnDuCnTt2pWBAweWi+OEE05g4sSJHH/88eW23X333dxwww0kJ1cwd4mpkt0ZMOZQ7P+GNmKmM75B\nemcY9QcMfQma94Kf34CpZ8IjbeC9a2HZJ1CQG+iojam2+fPn07VrV7p27cq+ffsYM2YMXbt25d57\n7wWcFv0vvPACjzzyCB07duSpp57ilVdeKdVeYMiQIcyePZstW7YwYMAAmjdvzgUXXMDQoUOZPn06\n4eHh5OXl8eqrrzJ06NAK4zjnnHN466232LVrV7ltkZGRNGrUiDAbbbTaREu0SnWLjIwMLXs7K5QF\newvs6qhzuebvgxUzYekMWPYx5O6CqASnkWK7IdDmRIiKr/Hh61y+fhbs+VY113xZ/mxxXlcUFhZy\n0UUXMWfOHD788EO6dOkS6JBqhT+vbVWfMRH5UVW7V7TNHhMY40+RsZ5HBadDQR6smg1L34ffPoRF\nb0NELLQ+wakYZJwMMUmBjtiYWhMeHs5rr73GE088wdy5c11TGaiLrDJgTG2JiII2A53ltH/Dn9/C\n0g+cuwa//Q/CIp3uikcNgYzTIL5hoCM2xu/Cw8O59dZb/TYIj/FO0FcGRCQJ+Bw4Cvibqi4KcEjG\nHFx4xIEZGE/+F6yb71QMlnwAH9wAchM07w1HnQFHng6J6YGO2BgTwkKhlcVe4DTgrUAHYkyNhIXB\n4T3hpPFw089w9Ww47lanu9hHt8FjR8Kkk2DuU7BjzYFyWRvp8tOdzuBIxhhzCKp9Z0BEUoEtqlrk\nh3iqTVXzgS0iEuhQjDl0Ik6PhPTOcMI9sGWZc7dg6fvw2d3Okt7ZaWOweQlJu5bYyIdBRlWx/6+M\nPxxKhwCv7gyISKSIPCwiWcA6oIVn/b9E5FpvTyYi14vIfBHJFZGXy2xLFpF3RWSPiKwRkQu9T8OY\nENU4A/reDiO/hhsXwon3w4Zf4Mv7YdHbCOqMejg2CcanBDpacxCRkZHs27cv0GGYELVv3z4iI2s2\nnom3jwnGAIOBi4GSnaR/AIZV43zrgfHASxVsewbIA1KBi4BnRaQ9gIikiUhmBUtaNc5tTHBLbgm9\nb4T/+81pYBhW4h+9hEGbk2B5XFkAACAASURBVJ2RD13YXThYpKSksG7dOvbu3XtI3+KMKUlV2bt3\nL+vWrSMlpWZfCrx9THABMFxVvxKRko8HFgFtKylTjqq+AyAi3YGm+9eLSDxwNtBBVbOBr0XkA+AS\nYLSqbgT6eXseY0JaQhrUSwUtpDAskvCiAmjUFv740nmc0LANHH0pdLkQ4hsFOlpTQmJiIgDr168n\nPz+/yn1zcnKIiYmpjbDqBDfl649cIyMjSU1NLf6MVZe3lYEmwJoK1kdU4xhVaQsUqOrvJdb9DPSt\nZP9SROQjoAuQISLPq+rLFexzFXAVQOPGjYN6TvTqCvY54KvDLbm2X7OEvPRBrKjfh9Y75xCVt4Ol\nx4wjZfM3pG/4jKTP76Hoi3FsbXQMG9JPYkeDTqXnUAhSbrm+4ORar169QIdRa9yUr79y/euvv2pc\n1ts/5IuB44HVZdafC/xY47MfUA/YXWbdLsCrIZpU9VQv9nkBeAGcEQiDeRSz6gr2UduqwzW5enJc\nnpnJYedeB0BjAE4G7ofNSwlb8AopP/+XlF++gfrN4ehLoMtFkNgkQEEfOtdcX9yVK7gr37qYq7eV\ngXHAqyJyOBAOnCMiRwIX4nTrO1TZQNl7G4mAjUJhTE2ktIOTH4QTxjgDGi2YAl+Oh1kPQJtB0O0y\naH2iM96BMcb1vJ6bQEQGAXcC3XAaHi4A7lPVz6p9UpHxQFNVHeZ5Hw/sANqr6nLPuleA9ao6urrH\nr+K8g4HB6enpI6ZNm+arw9Z5dvstdFUn39i9G0jb+DnpG2YSlb+T3KhkNqQPZGPaQHJiU/0cqW+4\n6fq6KVdwV76ByrV///6Vzk1QqxMVicj+NgZjcBoQjsBpK1AgIq8DClyJ8/z/I6CXqi72dRw2UVHo\nclOuUMN8C/Ph909gwSuw/HNn3RH9nLsFGac5wybXUW66vm7KFdyVb6ByrWqiIm/HGVgpIuUGSheR\n+iKyshqx3A3sA0bjdFPc51kHcC0QC2wG/gtc44+KgDGuFx4J7QbDRW/CLYug32jYuhzeHAaPtXMG\nNtq6PNBRGmNqkVd3BjzdCdNUdXOZ9anAn6oa7af4fMoeE4Q+N+UKPsxXC0nevpD0DZ/TcNsPhGkh\nO5OOYkP6SWxp3Iui8LrxT9xN19dNuYK78g26xwQicpbn5VvAFTgt/PcLB04A+qtqho9irRX2mCB0\nuSlX8FO+WZvg52nOY4TtK51plTud54xdkNYRsjbCW5fD0JchoXbbGrjp+ropV3BXvnXxMcHBmhLv\nn/xHgUlltuXjdDX8v0OKzhhTtySkwnG3QO+bYfUcp1Lw4xT44QVocjRExjrTL9ucCMaEDG8fE6wC\neqjqVv+H5D/2mCD0uSlXqL18I/J302vuMMK0sNy2wrBI5hxfO5OGuun6uilXcFe+QfeYIFTZY4LQ\n5aZcoZbzzdoIn94FS2dAoWeKkrBI6HElHH87xJdrY+xzbrq+bsoV3JVvMD4mKHmQBsApQDOgVN8j\nVb3vkCI0xtR9CWkQnQhF+RARAwW5kHgYfP+cM6hR9+Fw7PWQmB7oSI0x1eTtY4K/AR/izFjYGGca\n43TP+9Wq2smfQfqKPSYIfW7KFWo/3/aLHiQvqgHrmwyiyfpPicrbwaqWF9Psz7dI3TQblTA2pA9k\n7eFn+WUgIzddXzflCu7KN2gfE4jIHOAn4CacOQQ6A3twxgOYpKqv+S5c/7PHBKHLTblCHct3+yr4\n5glY+BoUFTo9EPrcCo3a+OwUdSpfP3NTruCufOviYwJvpzHrBDytTs2hEIhW1U3AP4CxPonSGBPc\nklvC4Mfhpp/hmKth8bvwdA+Yfhls+CXQ0RljquBtZSCvxOtNQHPP62yc6Y2NMcaR2MSZJOmWRc6d\ngT++hOf7wLTzYO28QEdnjKmAt5WBBUAPz+tMYLyIXAY8CViV3xhTXnwjOOFeuPlX6H83rP0BJg2E\nKYNh1WxwYU8mY+oqb9sMdAcSVHWWiDQGXgF6A78Dw1U1KCoE1oAw9LkpVwiufMML9pG+4TMOX/su\n0Xk72JWYwZrm57I9uRuIeHWMYMr3ULkpV3BXvkHbgDDUWAPC0OWmXCFI883PgYWvwtdPwK4/nSGO\n+9wG7YZAWNU3K4My3xpyU67grnyDuQFhZQeOFZHRh3IMY4zLRMY4AxXduADOfBby98Gbl8F/joGF\n/3WmWDbG1KqDVgZEpJGInCYiJ4lIuGddpIjcjDM3wW1+jtEYE4rCI6HLhXDdDzB0MoRHwXsj4amj\nYf5LzqBGxphaUWVlQER6AcuBGcDHwDciciROo8HrgftxRiQ0xpiaCQuHDmfByK/hgjcgPgX+dws8\n0Rm+fQby9jj7ZW2ky093OrMqGmN86mBTGM8EtgDjgcuBW4CVwH3AVA2yBgfWgDD0uSlXCNF8Vam/\n8xear3mTBjt/JS8ykb+aDiFm30bSN85kffoglmdcE+go/S4kr20V3JRv0DUgFJGtQF9VXSwicUAW\ncL6qvumfUGuHNSAMXW7KFVyQ79of4KVBoEXlt0VEw92baz+mWhLy17YMN+UbjA0Ik3HuDKCqe4G9\nOMMSG2OM/x3eE25dCq0Hguz/70qgZV+46deAhmZMKPGmN0EDEUkWkYaAAome98WLn2M0xrhZQhok\nOU2TiiQCUFj1FXx0G+z8M7CxGRMivJnCeEmJ1wLMK/NegXBfBmWMMaXs2QzdLudHOtJDF8Kab2H5\n587S51bodaPTZdEYUyMHqwz0r5UojDGmKuc7E6PuycyEfpc763b9BZ/dDbP+CT+96syHkHGq16MZ\nGmMOqLIyoKpf1VYgxhhTLUlN4ZyXoftw+GgUvH4htDoBTvmXT6dNNsYNXDUcsXUtDH1uyhUs3/2k\nqIAm6z+m5apphBXl8VfTIaxpfg6FEXEBiNI37NqGrqDrWhiqrGth6HJTrmD5lpO9Gb4Y58x9kJAO\nJ94HHc8JykcHdm1DVzB2LTTGmOBRLwXOfAaunOn0QnhnBEw+FTZaN0RjqmKVAWNM6GnaHa78EgY/\nCVuXwfPHw4f/B3u3BzoyY+okbyYqihSRjSLSvjYCMsYYnwgLg26XwQ0/Qo8RzuRHT3WD+ZOhqDDQ\n0RlTpxy0MqCq+UA+zngCxhgTXGIbwKkPw9VzIKUd/O9meHGAM9SxMQbw/jHBU8AdIuLNIEXGGFP3\npHWAYR/C2ZOchoaTToR3r7FZEI3BuxEIAfoAfYF1IrII2FNyo6oO8XVgxhjjcyLQcSi0PRnmPAJz\nn4alM6DfaDjmagiPDHSExgSEt3cGtgJvAx8BfwLbyizGGBM8ouvBwLFw3ffQ7G/w2V3wbG/4Y1ag\nIzMmIFw1zoANOhT63JQrWL4+oUrDbfNovWISsTkb2dLoWFa0Hk5uTApRuds5askjLDnqdvKiG/j2\nvAdh1zZ0Bf2gQyJyBHAUTmPCpaq60jch1i4bdCh0uSlXsHx9Kj8Hvn0KZj/qvD/uFti9Dn6aCt0u\nh9Mf8895K2HXNnTVxUGHvGozICKJwCTgbKDowGp5G7hCVbN8EqkxxgRKZAwcfzt0Oh+e6ASZDxzY\nNn+Ss0REw92bAxejMX7ibZuBJ4BOOLMYxnqWEzzrHvdPaMYYEwD1D4dbl0LL43FmaQckHNqfBTfZ\nSIYmNHlbGRgCXKmqX6lqvmfJBK4CzvRbdMYYEwgJaZDc2ul9IOGghbBiJuzZEujIjPELbysDsVTc\na2A7EOO7cIwxpo7Ys9lpK3D1V5BxChTkOIMV/fAiuKjhtXEHb8cZ+Aa4X0QuUdW9ACISD4wD5vor\nOGOMCZjzXzvw+oLXYc9WeO8a+Og2+ONLOOMZiEsOXHzG+JC3dwZuBf6GM+jQVyLyFbAWOAa42V/B\nGWNMnRHfCC6cDic/BCu+cMYlWDUn0FEZ4xNeVQZU9VegDTAKmO9ZRgFtVHWx/8Izxpg6RAT+dg1c\n8TlExcGUwfDleCgsCHRkxhySgz4mEJFI4FXgTlV90f8hGWNMHdekC1z1FXz8D5g9AVbNhrMnQv1m\ngY7MmBrxdtbCk6jDsxaKSE8R+VZEZovIfz0VGGOM8Z/oenDmM87ER5uWwHPHweL3Ah2VMTXibZuB\nd4Cz/BnIIVoLDFDV44HVwBmBDccY4xodh8LIOdCwNbx5Gcy4CfL2BjoqY6rF294EfwJ3i0gfnPYC\nZWctrN1xOstQ1Q0l3uZxYJREY4zxv+SWMPxTp/3AN4/Dmm9h6EvOtMnGBAFv7wwMA3bgjDg4HLih\nxHJ9dU4oIteLyHwRyRWRl8tsSxaRd0Vkj4isEZELq3ns5jiPNGZUp5wxxhyy8Eg4cRxc8i7k7LQx\nCUxQ8erOgKq29OE51wPjgUE4gxmV9AzON/tUoAvwoYj8rKqLRSQNeL2C452vqhs98ydMBYZ52jkY\nY0ztazUARn5TYkyCWXDG0zYmganTDnpnQEQiRWSjiLT3xQlV9R1VfY8yIxp6BjE6G7hHVbNV9Wvg\nA+AST7mNqtqvgmWjiETgVBTGqap7piM0xtRN9Ro7YxIMehCWf+aMSbD660BHZUylvO1NkI//exO0\nBQpU9fcS634GvKmEXIAzANI9IpIpIuf5I0BjjPFaWBgcey1c+QVExjpjEsx6wMYkMHWSqBfPs0Rk\nFNARuFxVffJJFpHxQFNVHeZ53wd4U1XTSuwzArhIVfv54HxX4UysROPGjbtNnz79UA8ZNLKzs6lX\nr16gw6gVbsoVLN9gEV6wjzbLXyBt05fsSmzHkqNuJTcmpcoywZprTbkp30Dl2r9//x9VtXtF27zt\nTdAH6IszHPEiyvcmGHJoIQKQDSSWWZcIZPng2KjqC8ALABkZGdqvXz9fHDYoZGZm4pZ83ZQrWL5B\nZeAp8Mt0kv53K8cuvA2GPAVHVd4LOqhzrQE35VsXc/X2zsDkqrar6uXVPnH5OwPxOD0W2qvqcs+6\nV4D1qjq6usev5JyDgcHp6ekjpk2b5otDBgWrcYcuyzf4xOzbwFFLHiUxaznr0wexovUVFIVHl9sv\nFHKtDjflWxfvDHhVGfAlT2O/CGAM0BQYgdNWoEBEXsdpm3AlTm+Cj4Bevp7/ICMjQ5ctc087w7pY\nC/UXN+UKlm/QKsiDWePhmyeg8ZHOmASppZtHhUyuXnJTvoHKVUQqrQxU2YBQRNqKiFSxPVJEBlQz\nnruBfcBo4GLP67s9267F6W64GfgvcI1NhGSMCTkRUXDifXDxO7B3uzMmwbyJNiaBCZgq7wyISCGQ\nrqqbPe//BPqo6hrP+1Sc2/jhtRHsobLHBKHPTbmC5RsKIvN2cuRvT9Bw+wK2NDqGZRk3EFaUT8av\n/2JZx9HkRTcIdIi1IhSvbWWC7jGBiBQBaSUqA1lAZ1Vd6XmfCmxQVW9HMqwT7DFB6HJTrmD5hoyi\nIvjuP/DFWKiXAumd0WUfI92Hw+kBHe291oTsta1A0D0m8JLd1zLGmEMRFga9rgcR2L0Oln2EoDB/\nEoxNgvFVd0M05lD54s6APSao4+z2W+iyfENLVO52Wq94kcZbvkMooogwtqQcxx+thof844JQv7Yl\n1cXHBAcbZ0CBBiJSUOJ9fRHZP8h2UA22raozgBkZGRkj3HI7Cuz2WyizfENQ3hzY+h1FGkYYRaTm\nriZ1wMnOKIYhzBXX1qMu5nqwxwQCLAG2eJZ6wLwS762lvzHG+NKezdDtcn7s/hg0OxZ2/QmvnOH0\nOjDGTw52Z6B/rURhjDHGcf5rAOzJzITTP4HF78E7I2DyKXDx25DUNLDxmZBU64MOBZK1GQh9bsoV\nLN9QVjLX+jt+pcOiBygMj+WXTmPYU695gKPzPbde29pUp0YgrAusa2HoclOuYPmGsnK5blwErw2F\n/L1w/n+hRe+AxeYPrr62tcTfXQuNMcb4W1oHuOIziE+BqX+HpTMCHZEJIVYZMMaYYFG/mVMhSO8E\n0y+FeZMCHZEJEa56TGBtBkKfm3IFyzeUVZVrWGEuRy2ZQKNt81jd/FxWt7jQGbAoiNm19T9rM1CG\ntRkIXW7KFSzfUHbQXAsL4H83w09T4ehL4bR/Q/jBOojVXXZt/a+qNgOVfnJE5ElvT6CqN9YkMGOM\nMTUUHgFDnoKENJg9AbK3OFMhR8UFOjIThKqqRnb08hjuu7VgjDF1gQgMuBvqpcJHtzuDE134BsQF\n1eCwpg6otDKgqjbgkDHGBIOeI5wKwdtXwkuDnMGJ6jcLdFQmiFS7zYCI1ANUVff4JyT/sQaEoc9N\nuYLlG8pqkmvSzsV0/PWfFIZHewYnauGf4PzArq3/+aQBoYhcB/wDOMyz6i/gX6r6H59EWYusAWHo\nclOuYPmGshrnumkxvDoU8vbABdOgxXE+j80f7Nr63yEPOiQidwIPAZOAkzzLZOAhERntq0CNMcYc\notT2zlgECWkw9SxY8n6gIzJBwNtBh0YCV6nqOFWd6VnGAtd4FmOMMXVF/cNh+CfQpAtMvwx+eDHQ\nEZk6ztvKQArO1MVl/QCk+i4cY4wxPhGXDJe+DxmnwEe3wcz7wYXjyhjveFsZ+B24sIL1FwLuefhu\njDHBJDIWzp0KR18Gcx6BD653Bisypgxvh6saC0wXkeOBbzzregN9gXP8EJcxxhhfCI+AwU9AQjp8\n9ZAzONE5kyEqPtCRmTqkOr0JugG3AO08q5YCj6rqT36Kzeesa2Hoc1OuYPmGMn/k2mTdx7RZ/gJZ\nCa35teM95Ecl+vT4h8Kurf/Z3ARlWNfC0OWmXMHyDWV+y3XpDHjrCmdQoovfhgbNfX+OGrBr63+H\n3LXQc5AYERkuIo94luEiEuu7MI0xxvhdu8FOw8I9m2HSSbDx10BHZOoAb8cZOBr4A3gU6OlZHgFW\nerYZY4wJFs2PheGfQlg4TD4VVs0OdEQmwLy9M/ACTsPBpqp6vKoeDxwOzPZsM8YYE0xS2jmDEyU2\ngVfPhkXvQNZGmHwKZG0KdHSmlnlbGWgPjC05H4Hn9X2ebcYYY4JNUlO4/GM4rBu8NdwZoOjP7+Cr\nfwU6MlPLvK0M/AY0qWB9Os4YBMYYY4JRXDKsXwAorP0OtAjmT4KxSTA+JdDRmVpSaWVARJL3L8Dd\nwJMicr6ItPAs5wOPA3fVVrDGGGP84KZfoMNQkHDnfXgkdDwHbrLGhW5R1aBDW4GS/Q4FmFZinXh+\nvg+E+z40Y4wxtSIhDaITAQUJg8J82LcTEmy0ebeoqjLQv9aiMMYYE1h7NkO3y6HTefD6BfDHTFg7\nDw7vEejITC1w1aBDNgJh6HNTrmD5hrJA5hqZt5OuP40mMj+bn7o+xN74pn4/p11b//PJCIQiEgV0\nwJnBsFRbA1X96FCDrE02AmHoclOuYPmGsoDnun2lMyhRRKynC2K6X08X8HxrUV0cgdCriYpE5ERg\nKk5FoCzF2gwYY0xoST4CLnoTXj4dXjsHLv8QYpICHZXxE2+7Fj4D/A9oCcQBsSWWOP+EZowxJqCa\ndIXzpsKWpfD6RVCQG+iIjJ94WxlIBx5Q1TWqmqOquSUXfwZojDEmgFoNgDOfhdVz4N2roago0BEZ\nP/DqMQHOXYFewEo/xmKMMaYu6nSuM1Tx5/dAvVQ4+SEQOXg5EzS8rQyMBF4TkW7AIiC/5EZVfcXX\ngRljjKlDet3gVAi+ewYS0uG4mwMdkfEhbysDg4ATgFOBvZQejEgBqwwYY0woE4GTxkP2RvhijDNQ\nUefzAx2V8RFv2ww8AjwNJKhqPVVNKLEk+jE+Y4wxdUVYmNN+oGVfeP86WPFFoCMyPuJtZaA+8FzJ\nWQuNMca4UEQ0nPeqMwXyG5fCugWBjsj4gLeVgbeBgf4MpKZEJFVE5orIVyLypYj4d2QMY4xxu5hE\nuOgtiG/ojEGw7Y9AR2QOkbdtBlYC/xSR44FfKN+A8DFfB1YNW4HjVLVIRIYBVwDjAxiPMcaEvoQ0\nuPhdeOkkePUsuOJzqGdTHgcrbysDw4EsnO6FvcpsUyBglQFVLSzxNgFYHKhYjDHGVRq1hgunw5TB\n8NpQGPYhRCcEOipTA149JlDVllUsR3h7MhG5XkTmi0iuiLxcZluyiLwrIntEZI2IXFiN43YRke+B\n6wF7gGWMMbWlaXc4ZwpsXATTL4WCvEBHZGrAq8qAiJwpIt62L6jKepxb+C9VsO0ZIA9IBS4CnhWR\n9p7zp4lIZgVLGoCqLlTVY4B7gDt8EKcxxhhvtT0JhjwJf3zp9DKwUQqDjrePCV4DskRkCvCSqtZo\nyj9VfQdARLoDxXNiikg8cDbQQVWzga9F5APgEmC0qm4E+lV0TBGJUtX9VdFdOOMgGGOMqU1dL3YG\nJfryfqc9wUn3BzoiUw1eTWEsIgnAhcDlQA/gW2ASML0m3Q1FZDzQVFWHed53Bb5R1bgS+9wG9FXV\nwQc5Vk+ccRAKgRxguKpuqGC/q4CrABo3btxt+vTp1Q07aNk84aHL8g1dQZmrKm2Wv8Bh6z9iRavh\n/HX4GV4XDcp8ayhQufbv37/SKYxR1WotQHvgUWAjsBt4EfhbNY8xHni5xPs+wMYy+4wAMqsbnzdL\n27Zt1U1mzZoV6BBqjZtyVbV8Q1nQ5lpYoPr6xapjElV/edPrYkGbbw0EKldgvlbyd7Ha7QBUdTHw\nb+AFIAo4D5gjIt+LSKfqHs8jGyg7kmEiTg8GY4wxwSIsHM56EZr3hndHwsqvAh2R8YJXjwkARCQS\n+DtON8MTgO+BicAbQAPgAeAYVW3nxbHKPiaIB3YA7VV1uWfdK8B6VR1dzZyqOu9gYHB6evqIadOm\n+eqwdZ7dfgtdlm/oCvZcI/Kz6bLwTmJyNrOwywNkJ1Td8SzY862OuviYwNs2A08BF+CMKTAVmKiq\nS8rsk4bzx7vSuw0iEoHTaHEMTgPCEUCBqhaIyOue418JdAE+Anp57kT4VEZGhi5bVqM2kEEpMzOT\nfv36BTqMWuGmXMHyDWUhkeuudTDpJCjKhys+gwYtKt01JPL1UqByFZFDrgzMxGkb8I4eaLlfdp8I\noLeqVnpPSETG4lQEShqnqmNFJBmny+GJwDacXgQ+/fpudwZCn5tyBcs3lIVKrnF7/qTrT3eQH5nI\nT10fIj8qqcL9QiVfbwTtnYFQY3cGQpebcgXLN5SFVK5/fgevnAGp7eGyGRAVX26XkMr3IOrinYEq\nGxCKyOH7B/4psa6/Z0KgH0TEZ8/zjTHGhKhmf4OzJ8H6n+DNYVCYf9AipnYdrDfBYzgD/wAgIs2A\nGUAKsAG4T0Ru8F94xhhjQkK70+G0R2H5ZzDjZnDhXem6rMrHBCKyBrhYVed43t+B05ugnafR323A\nhap6dK1Ee4iszUDoc1OuYPmGslDNtcWq/9JizeusaXYOq464uHh9qOZbkaBrMyAi+4AMVf3T8/5T\n4BdVvd3zvi3wvao28H3Y/mNtBkKXm3IFyzeUhWyuqjDjJlgwBU59BHqOAEI43woEXZsBYCfQsMT7\nHsB3Jd4r3s9vYIwxxu1E4LTHoO0p8NHtsPg9yNpIl5/uhKxNgY7OtQ52Z+A9nCGHhwPnAC8Daaq6\nw7P9NGCCqh7l/1APnT0mCH1uyhUs31AW6rmGFebS+ed7SMhaybbko2m07QfWpw9iecY1gQ7N74Lx\nMUEnYCZQH+cuwgOqek+J7VOBLFW91rch+5c9JghdbsoVLN9Q5opc70+Bwtzy6yOi4e7NtR9PLQm6\nxwSq+gvQDhiKMxrgPWV2eR2Y4JMojTHGuMvNv0DGqQfeR8RCx3Pgpl8DF5NLHfR5v6puBd6vZNuH\nPo/IGGOMOySkQb00QACFgn0QnQgJqYGOzHVcNQKhtRkIfW7KFSzfUOaWXNsvepC8qAawbyeH7fiW\nnYkZLDz64UCH5VdB12YgVFmbgdDlplzB8g1lbsoVYPbMzzh+2VjI3gTXfOPcNQhRQddmwBhjjKkN\nReFRMPQlyMuGd0dCUVGgQ3IVqwwYY4ypG1KOhEEPwMpZ8O3TgY7GVapdGRCR+iKSXHLxR2DGGGNc\nqPtwOPJ0mHmfM7GRqRVeVQZEpLmIfOwZnngbsMWzbPX8NMYYYw6dCAx5CuIbw1tXQG52oCNyBa8a\nEIrIlzgDDz0CrMcZhriYqn7ll+h8zHoThD435QqWbyhzU65QPt/6O36l88/3sDGtP8uOvCmAkfle\n0PYmEJFs4G+qusjXwQWC9SYIXW7KFSzfUOamXKGSfGfeD3MegbMnQcehAYnLH4K5N8EqINp3IRlj\njDEH0W80NO0B/7sFdqwOdDQhzdvKwE3AgyLS2p/BGGOMMcXCI+Hsic7rt0dAYUFg4wlhlVYGRCRL\nRHaLyG7gPaAfsExE9u5fX2K7McYY43sNWsDp/4a/foCvHgp0NCGrqrkJrq+1KIwxxpjKdBwKK2bC\n7EfgiH7Q4rhARxRyKq0MqOqU2gzEGGOMqdSpD8Pa7+Cdq2Dk1xBnQ9z4kre9Cc4B8lT1/TLrzwAi\nVfUtP8XnU9a1MPS5KVewfEOZm3IF7/JN2L2crj+NZlvD7ixuP9oZkyAI1cWuhajqQRdgMTCogvUD\ngUXeHKMuLW3btlU3mTVrVqBDqDVuylXV8g1lbspVtRr5fv246phE1XmT/BqPPwXq2gLztZK/i972\nJjgCqKhj/grPNmOMMcb/jr0BjugPn9wJm38LdDQhw9vKwA6gTQXr2wJZvgvHGGOMqUJYGPz9OYiK\ng7eGQ35OoCMKCd5WBt4H/i0ibfevEJEM4DGcbofGGGNM7UhIgzOfhc2L4fN7Ax1NSPC2MvAPYBew\nRETWishanHYEu4Hb/RWcMcYYU6G2g+CYkfDD87Dsk0BHE/SqGmegmKruBnqLyIlAF8/qn4CZnkYJ\nxhhjTO0aOA5WfwPvXwvXzHXuGJga8XYK40tFJFpVP1fVCZ7lCyBSRC71c4zGGGNMeZExMHQS5O2F\nd6+GoqJARxS0vH1MMBlIqmB9gmebMcYYU/saZ8DJD8LKTJj7ZKCjCVreDjpUBKSq6pYy67viPCoI\niqGgbNCh0OemXMHyAudhYAAAFTdJREFUDWVuyhUOMV9V2i/+Fw23/cBPXf9FVmJFnd/qjro46FCV\nlQER+RVQoD3OOAMlp4wKB5oDH6nqub4L1/8yMjJ02bKKhk0ITW6aF91NuYLlG8rclCv4IN+92+G5\n4yA8CkbOgegEn8Xma4G6tiJSaWXgYA0I9w8z3AH4EMgusS0PWA28fagBGmOMMYckLhnOehGmnA4f\n3e6MRWC8VmVlQFXHAYjIauANVbXRHYwxxtRNLXpDn9tg9sPQ6gTodE6gIwoaXjUgVNUpVhEwxhhT\n5/X9Bxx+DPzvFti+KtDRBA1vuxZGicg4EfldRHJEpLDk4u8gjTHGGK+ER8DZE0HC4O0roTA/0BEF\nBW+7Ft4PXAY8ChThjDr4DLANuNY/oRljjDE1UL8ZDH4c1s2HzAcDHU1Q8LYycC4wUlWfBwqB91X1\nRmAMcKK/gjPGGGNqpMNZ0PVimPMYrJod6GjqPG8rA6nAEs/rbKC+5/UnwEm+DsoYY4w5ZKc8DA1b\nwTtXO10PTaW8rQz8CTTxvF4BDPK8PhbY5+ugjDHGmEMWFQ9nT4I9W+D968Gm0qmUt5WBd4ETPK+f\nAMaJyCrgZWCiH+KqNhG5QES2HHxPY4wxrtGkCwwcC8s+hPmTAh1NneXtrIV3lHj9loj8BfQCflfV\n//krOG+JSDhwDrA20LEYY4ypY/52Lf/f3r2HWVXXexx/f7iIiKCQgIh5K0XBDMNLR0OxoPKcPHZC\nn2NqRy2zzNST1dFOIKhEkaU9XtLoGOYtUzM175pKqGlgeUO0wkuZ4iUJAQERvueP3xocp5lhz8ze\ne+1Z6/N6nv3IrL3X+n2/Duz93et3Y+FdcNs3Yau9YOjIvCNqOJXeGXiHiHggIs5qhEIg82ngatJM\nBzMzs7f16JFWJOzTH37xOVjt3u2WKl1noE+zPw/P1hw4U9LYjjQm6cuS5klaJeniFs8NkvRLScsl\nPSfp0Aqv2ZM02+HnHYnFzMxKZOMh8MkL4eUn4PbJeUfTcNrtJpA0ArgW2FHSo8BhwB3AANK38K9I\nOigirquwvReAaaQBiH1bPHc+ab+DocBo4CZJj0TEfEmbA1e2cr1DsmtdFRFrJVUYhpmZlc724+GD\nx8ED58MWu8LDl8FBF0P/oXlHlrv13Rn4HvAi8O/A48DNpOmEmwADgR8Bp1TaWERcmxUOf29+XFI/\nYCIwOSKWRcS9wA3AZ7LzFkXEuFYei4CRwH9JuhXYXpI3tDYzs9aNnwKb75KWK37utzB7Rt4RNYT1\nbWH8CjAhIh6W1B9YAuweEQ9lz+8IPBARm7Z5kdavOw3YMiKOzH7eFbgvIjZq9pqvAftGxAEduO68\nNvdqlo4BjgEYPHjwmKuuuqojIXdrZdoXvUy5gvMtsjLlCvXLd+xvDqLn2n9eonhNj97M2eeaVs6o\nvrx+t/vtt1+ntzB+F+nWPhGxVNJyYHGz5xcD1dg0emPg9RbHlnT02m0lmT03E5gJMGLEiPA+4cVU\nplzB+RZZmXKFOuY75nG4bRI8cR2sXQ09N4CRB9Lzo99iXJ26Cxrxd1vJAMKWtw5qsWrDMtI4hOYG\nAEtr0JaZmZVV/83TrIJYAwjWvAm9Niz9uIH1dROsJQ0YXJUd2h+YDbyR/dwHGB8RPTvU6D93E/Qj\n3WUYFRF/yo5dArwQERWPSaig3QOAA4YNG/b5K664olqXbXhlut1YplzB+RZZmXKF+uY76vFv8+YG\nA3m9/wh2euoHLO+7BXP3vKAubUNjdhOsrxiYVUkDEXFUJa+T1IvUNTEF2BL4PPBWRLwl6UrSXYej\nSbMJbgb2ioj5lVy7I0aMGBFPPfVUtS/bsBrxllStlClXcL5FVqZcIcd8r/0CzL8WjnsQBm1Xlybz\nylVS58YMVPoh3wGTSIVAk8OB04CppK2QfwK8TJptcGwtCgEzM7N1xk+FBb9K4wg+XZ47xi21e2eg\naNxNUHxlyhWcb5GVKVfIN9+tnruG7Z65lEd2OY3Fg0bXvL1u101QVO4mKK4y5QrOt8jKlCvknO/q\nlfDDPdNAwi/eCz1717S5Ruwm6NTeBGZmZoXRe0P46LfglSdhbjl3NizVnQF3ExRfmXIF51tkZcoV\nGiDfCHZ5dAr9l/6Z3+1xIas3aDnbvXrcTdAg3E1QXGXKFZxvkZUpV2iQfF9eABfsDWOOgE+cXbNm\n3E1gZmbWqIbsBLsfDQ9dDIseyzuauirVnQF3ExRfmXIF51tkZcoVGiffXquXseeDX2R5v615ePQ0\nqMFuuO4maBDuJiiuMuUKzrfIypQrNFi+c/8PbvoqHHwxjPqPql/e3QRmZmaNbsxRMHRnuH0yrF6R\ndzR14WLAzMysuR494ePfgSV/hfvOyTuaunAxYGZm1tK2Y2HkgXDv2bDk+byjqblSjRnwAMLiK1Ou\n4HyLrEy5QmPm22fly+zxu+N4dbM9WTDya1W7rgcQNggPICyuMuUKzrfIypQrNHC+d0+H2TPgqFtg\n672qckkPIDQzM+tO9j4RBgyHW06GtWvyjqZmXAyYmZm1ZYN+MOF0WPQo/OHSvKOpGRcDZmZm7dl5\nImz1L/DrM2DFP/KOpiZKNWbAAwiLr0y5gvMtsjLlCo2f78ZLn2bMQyfx/JYHsPC9n+vStTyAsEF4\nAGFxlSlXcL5FVqZcoZvke8MJ8PDlcOz9MHhEpy/jAYRmZmbd1YcnQ+9+cOs3oGBfpF0MmJmZVWLj\nwTDuZFj4a/jjbXlHU1UuBszMzCq1xzGw2Q5w2zfgrVV5R1M1LgbMzMwq1bM3fOzb8NrT8OCFeUdT\nNS4GzMzMOmL78bDDx2H2mbD0pbyjqYpSzSbw1MLiK1Ou4HyLrEy5QvfLt+8bL7D73ON5aei+PLXj\nCR0611MLG4SnFhZXmXIF51tkZcoVumm+t0+G+8+Bz98Fw8dUfJqnFpqZmRXFPl+HfkOyfQvW5h1N\nl7gYMDMz64wNB8D4KfD8XHjs6ryj6RIXA2ZmZp31/kNhiw/AnVNg1bK8o+k0FwNmZmad1aMH7D8D\nlr4I956VdzSd5mLAzMysK969B+zyn3D/efDaM3lH0ykuBszMzLpq/FTo0Qtun5R3JJ3iYsDMzKyr\nBmwBY0+CJ2+EhXfnHU2HlWqdAS86VHxlyhWcb5GVKVcoRr491rzJ7nOPZ22PDZi32w+IHj1bfZ0X\nHWoQXnSouMqUKzjfIitTrlCgfBfcCD8/DPY/E/Y8ptWXeNEhMzOzItvx32DbfeHub8Ebr+UdTcVc\nDJiZmVWLlKYarloKd03LO5qKuRgwMzOrpiE7we5Hw0OzYNHjeUdTERcDZmZm1TbuFNhwU7j1FOgG\nY/NcDJiZmVXbRoPgw9+EZ+fAE9fnHc16uRgwMzOrhQ8cCUNGpa2OV6/IO5p2uRgwMzOrhZ690mDC\nJX+B+8/NO5p2uRgwMzOrlW3HwsgDYc5ZsOT5vKNpU7cvBiRtI+kVSfdkj8F5x2RmZrbOhDOAgDum\n5B1Jm7p9MZCZHRHjsscreQdjZma2zsCtYa8T4PFrYMGNjP7D/8LSl/KO6h2KUgzsLWmOpOmSlHcw\nZmZm7/Ch/4YBw+GG49lkyRMwe0beEb1DXYsBSV+WNE/SKkkXt3hukKRfSlou6TlJh1Z42ReB9wL7\nAEOAT1U3ajMzsy767rbw+t9gxWuIgHkXwdRNYNqQvCMD6n9n4AVgGvCTVp47H3gTGAocBlwgaRSA\npM2bjQlo/tg8IlZFxPJIOy5dC7y/TrmYmZlV5sRHYeeDQNnHbq++8L6D4cTH8o0r06uejUXEtQCS\ndgO2bDouqR8wEdg5IpYB90q6AfgMcEpELALGtXZNSf0jYmn241hgQe0yMDMz64T+m0OfAW+vRvjW\nyvRz/6H5xpVplDEDOwBvRcQfmx17BBhVwbkfkvSQpDnAcOCKWgRoZmbWJctfht0+y/K+W6ZCYFnj\nDCKs652BdmwMvN7i2BKg//pOjIhbgFvW9zpJxwBNm0uvktQ9do+ojs2AV/MOok7KlCs43yIrU65Q\nrnyzXK+AT9f1++vWbT3RKMXAMmBAi2MDgKWtvLZTImImMBNA0ryI2K1a1250Zcq3TLmC8y2yMuUK\n5cq3EXNtlG6CPwK9JG3f7Nj7gfk5xWNmZlYa9Z5a2EvShkBPoKekDSX1iojlpJkAp0vqJ2lv4EDg\n0nrGZ2ZmVkb1vjMwCVgBnAIcnv15Uvbcl4C+wMvAz4BjI6JWdwZm1ui6japM+ZYpV3C+RVamXKFc\n+TZcroqmaQ5mZmZWSo0yZsDMzMxy4mLAzMys5EpVDHRh/4NuR1IfSRdleS6V9LCk/fOOq9YkbS9p\npaTL8o6l1iQdImlB9vd5oaSxecdUK9lW5TdLWixpkaTzJDXK1OguWc+eLR+R9KSkNyTdLanNeeLd\nQVu5SvqgpDskvZZtSX+1pGE5hloV7f1um73mVEkhaXydw3uHUhUDtLP/QQH1Av4K7AtsQhqoeZWk\nbXKMqR7OB+bmHUStSZoAzACOIi3OtQ/wdK5B1dYPSYOLhwGjSX+vv5RrRNXT6p4tkjYjzbKaDAwC\n5gE/r3t01dXW/jQDSYPqtiEtjLMUmFXXyGqjvf14kPQe4GDShnu5KkRlXYn17X+Qa3A1kE3XnNrs\n0I2SngHGAM/mEVOtSToE+AdwP2knyyI7DTg9Ih7Ifv5bnsHUwbbAeRGxElgk6VYqW6684bW1Zwtp\nB9b5EXF19vxU4FVJO0bEk3UPtArayjVbSXYdSecBs+sbXfW187ttcj5wMqnYzVWZ7gx0Zf+Dbk/S\nUNL/g0Iu5CRpAHA6cFLesdSapJ7AbsBgSX+W9Hx227xv3rHV0A+AQyRtJGk4sD9wa84x1doo0nsU\nsK7AX0g53rP2oaDvVU0kHQysioib844FylUMdHr/g+5OUm/gcuCn3fUbRQXOAC6KiOfzDqQOhgK9\ngYNIO3WOBnbl7TU7iug3pA/B14HnSbfMr8s1otrbmPQe1Vzh37Mk7QKcCnw971hqRVJ/YDpwYt6x\nNClTMVDz/Q8akaQepJUc3wS+nHM4NSFpNDAeODvvWOpkRfbfcyPixYh4FTgL+NccY6qZ7O/wraT+\n836kTV4GksZMFFnp3rMkvZe08dyJETEn73hqaCpwaUQ8m3Mc65SpGCjd/geSBFxE+iY5MSJW5xxS\nrYwjDTz6i6RFwNeAiZJ+n2dQtRIRi0nfjpuvGFbk1cMGAVuRxgysioi/kwaXFbL4aWY+6T0KWDfu\n6T0U9D0rmylxJ3BGRBR9KfqPACdkM2MWAe8mDfA+Oa+ASlMMlHT/gwuAnYADImLF+l7cjc0kvUmO\nzh4XAjcBH8szqBqbBRwvaYikgcBXgBtzjqkmsjsfzwDHZvubbAocATyab2TV0daeLcAvgZ0lTcye\nPxV4tDt39bWVazYO5C5SwXdhvlFWTzu/248AO/P2e9YLwBdIAwrzERGleZC+YVwHLAf+Ahyad0w1\nzHVr0rfFlaTbjU2Pw/KOrQ65TwUuyzuOGufYmzQC+R/AIuAcYMO846phvqOBe4DFpD3vrwKG5h1X\nlXKbmv1bbf6Ymj03HniS1DV0D7BN3vHWIldgSvbn5u9Vy/KOt5a/2xavexYYn2es3pvAzMys5ErT\nTWBmZmatczFgZmZWci4GzMzMSs7FgJmZWcm5GDAzMys5FwNmZmYl52LAzHIj6chsL/emx+EVnneP\npGe72PYpLdoe15XrmXVnLgbMCkjSdpJmSnpS0huSFktaIOmnkvZr8dpnsw/De9u41sXZ85s1O9by\nQ3ytpCWS7pN0ZCdCnk7aTvy+TpzbFNM9LWIKSa9KelDSF7PdHpv7VdbmzM62aVYUvfIOwMyqK9s7\nfTawGriEtJZ9X2B74KOkjW7ubuXUvSUdGBHXd6C5c4C5pC8W7waOBmZJ2iIipnfgOndExD0deH1b\nVmUxAIi0L8chvL0097pd4iJiPjA/Wx72mCq0bdZtuRgwK54pwEbA6Ih4pOWTkjZv5ZznsnOmS7ox\nItZU2NaciLim2bVnkTYF+x9JMzpwnWp5KyIua35A0nnA08CRNNCWsWaNxN0EZsWzPfD31goBgIhY\n1MrhZcA0YCTpQ7NTIuIFYAGwCTC4s9dpImmgpB9nt/uXZ10BYzoY00rgNdI23mbWChcDZsWzEHiX\npE918LwLSbsDniapb2caltSbtN3wWtImSp2WXes20m3/m4Gvk+463Als2c55m2WPwZJGSpoBjAJ+\n1JV4zIrM3QRmxTMNmAD8QtKfgHtJ/fr3RMSCtk6KiDclTQIuJ91O/04FbfXPBhY2jRk4BRgCXJ19\nI++Ko4DdgdMjYkrTQUlPAGeTujZa6ge80uLYGuC0iJjaxXjMCst3BswKJiJ+C4wBfkq6XX8Uabvj\nJyT9RtJ27Zz+M+D3wMmSBlXQ3E9IH74vAfOAicCPgc92PoN1Pkn6IP9+i+MXAK+3cc5KUiHU9Dgc\nuB6YIunUKsRkVkguBswKKCIei4gjI2IosA1wBDAHGAtcL2mDNs4L0rf7TYFvVtDU6aQP3U9kf14F\nDKM6/fPbAS9GxDs++CNiFWlAYGvWRMSdzR6XR8RE4FZgqqSRVYjLrHBcDJgVXEQ8FxGXAPuS5vHv\nDOzRzuvvIPXLHydpq/Vc/rHsQ/em7Fb+UbxdGDSS20hTDcflHIdZQ3IxYFYS2bf+B7Mfh6/n5ScD\nGwBndLCNK0lrHHxF0jYdDLGlp4FhkgY0PyipD+muQUf0zv7bv4sxmRWSiwGzgpE0IVtIp+XxvqRF\nhwCeaO8aEfF74EpSn/v7OhjCaaRCYlIHz2vpeqAn8NUWx48FBvzzy1snScCB2Y8PdTEms0LybAKz\n4jmbNLXwBuAx4A3SSP9DgR2ASyLisQquM4k0IPADHWk8Iu6WdB9whKTpEdFW//76zCKtDHiqpG2B\n3wK7AgeTpk+29v7Vq8X+BkOATwF7A7cDv+5kLGaF5mLArHhOIn0T/hDpw3xTYAnwKDADuLiSi0TE\n05IuBE7oRAxnkAbtTSaNI+iwbKrjBOBM0syCiaQpkhOA75EGRrbUB7i02c8rgT+TBkN+P+sqMbMW\n5H8bZpaXbFOjWaQP+/uApdlsgXq03Ze0LsEhwLnAflXaH8Gs2/GYATNrBNeR1is4uI5tnpi1eW4d\n2zRrSL4zYGa5kTSMtFRwk8fb2DuhFm1vTdrHoclDEbG4Hm2bNRoXA2ZmZiXnbgIzM7OSczFgZmZW\nci4GzMzMSs7FgJmZWcm5GDAzMys5FwNmZmYl52LAzMys5P4fGy/E/1iEHA0AAAAASUVORK5CYII=\n",
            "text/plain": [
              "<Figure size 576x360 with 1 Axes>"
            ]
          },
          "metadata": {
            "tags": []
          }
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "bBes21qLlcS8",
        "colab_type": "code",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 35
        },
        "outputId": "89ca03e7-8088-4a51-8335-f1ca17b09a45"
      },
      "source": [
        "print(range(11))"
      ],
      "execution_count": 31,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "range(0, 11)\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "PuvTdY-IfWvx",
        "colab_type": "code",
        "colab": {}
      },
      "source": [
        ""
      ],
      "execution_count": 0,
      "outputs": []
    }
  ]
}