{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {
    "collapsed": true,
    "pycharm": {
     "name": "#%% md\n"
    }
   },
   "source": [
    "# The credit fraud dataset - Synthesizing the minority class\n",
    "In this notebook it's presented a practical exercise of how to use the avilable library GANs to synthesize tabular data.\n",
    "For the purpose of this exercise it has been used the Credit Fraud dataset from Kaggle, that you can find here:https: //www.kaggle.com/mlg-ulb/creditcardfraud"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 28,
   "metadata": {
    "pycharm": {
     "is_executing": true,
     "name": "#%%\n"
    }
   },
   "outputs": [],
   "source": [
    "import importlib\n",
    "\n",
    "import pandas as pd\n",
    "import numpy as np\n",
    "import sklearn.cluster as cluster\n",
    "import matplotlib.pyplot as plt\n",
    "\n",
    "from models.gan import model\n",
    "importlib.reload(model)\n",
    "\n",
    "from models.gan.model import GAN\n",
    "from preprocessing.credit_fraud import *\n",
    "\n",
    "model = GAN"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 29,
   "metadata": {
    "pycharm": {
     "name": "#%%\n"
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Dataset columns: ['Time', 'V1', 'V2', 'V3', 'V4', 'V5', 'V6', 'V7', 'V8', 'V9', 'V10', 'V11', 'V12', 'V13', 'V14', 'V15', 'V16', 'V17', 'V18', 'V19', 'V20', 'V21', 'V22', 'V23', 'V24', 'V25', 'V26', 'V27', 'V28', 'Amount']\n"
     ]
    }
   ],
   "source": [
    "#Read the data\n",
    "data = pd.read_csv('data/data_processed.csv', index_col=[0])\n",
    "data_cols = list(data.columns[ data.columns != 'Class' ])\n",
    "label_cols = ['Class']\n",
    "\n",
    "print('Dataset columns: {}'.format(data_cols))\n",
    "sorted_cols = ['V14', 'V4', 'V10', 'V17', 'Time', 'V12', 'V26', 'Amount', 'V21', 'V8', 'V11', 'V7', 'V28', 'V19', 'V3', 'V22', 'V6', 'V20', 'V27', 'V16', 'V13', 'V25', 'V24', 'V18', 'V2', 'V1', 'V5', 'V15', 'V9', 'V23', 'Class']\n",
    "data = data[ sorted_cols ].copy()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 30,
   "metadata": {
    "pycharm": {
     "name": "#%%\n"
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Dataset info: Number of records - 492 Number of varibles - 31\n",
      "   count\n",
      "0    384\n",
      "1    108\n"
     ]
    }
   ],
   "source": [
    "#Before training the GAN do not forget to apply the required data transformations\n",
    "#To ease here we've applied a PowerTransformation\n",
    "data = transformations(data)\n",
    "\n",
    "#For the purpose of this example we will only synthesize the minority class\n",
    "train_data = data.loc[ data['Class']==1 ].copy()\n",
    "\n",
    "print(\"Dataset info: Number of records - {} Number of varibles - {}\".format(train_data.shape[0], train_data.shape[1]))\n",
    "\n",
    "algorithm = cluster.KMeans\n",
    "args, kwds = (), {'n_clusters':2, 'random_state':0}\n",
    "labels = algorithm(*args, **kwds).fit_predict(train_data[ data_cols ])\n",
    "\n",
    "print( pd.DataFrame( [ [np.sum(labels==i)] for i in np.unique(labels) ], columns=['count'], index=np.unique(labels) ) )\n",
    "\n",
    "fraud_w_classes = train_data.copy()\n",
    "fraud_w_classes['Class'] = labels"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "pycharm": {
     "name": "#%% md\n"
    }
   },
   "source": [
    "# GAN training\n",
    "\n",
    "Below you can try to train your own generators using the available GANs architectures. You can train it either with labels (created using KMeans) or with no labels at all. \n",
    "\n",
    "Remeber that for this exercise in particular we've decided to synthesize only the minority class from the Credit Fraud dataset."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 31,
   "metadata": {
    "pycharm": {
     "name": "#%%\n"
    }
   },
   "outputs": [],
   "source": [
    "#Define the GAN and training parameters\n",
    "noise_dim = 32\n",
    "dim = 128\n",
    "batch_size = 128\n",
    "\n",
    "log_step = 100\n",
    "epochs = 5000+1\n",
    "learning_rate = 5e-4\n",
    "models_dir = './cache'\n",
    "\n",
    "train_sample = fraud_w_classes.copy().reset_index(drop=True)\n",
    "train_sample = pd.get_dummies(train_sample, columns=['Class'], prefix='Class', drop_first=True)\n",
    "label_cols = [ i for i in train_sample.columns if 'Class' in i ]\n",
    "data_cols = [ i for i in train_sample.columns if i not in label_cols ]\n",
    "train_sample[ data_cols ] = train_sample[ data_cols ] / 10 # scale to random noise size, one less thing to learn\n",
    "train_no_label = train_sample[ data_cols ]\n",
    "\n",
    "gan_args = [batch_size, learning_rate, noise_dim, train_sample.shape[1], dim]\n",
    "train_args = ['', epochs, log_step]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 33,
   "metadata": {
    "pycharm": {
     "is_executing": true,
     "name": "#%%\n"
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0 [D loss: 0.705997, acc.: 45.31%] [G loss: 0.663525]\n",
      "WARNING:tensorflow:Model was constructed with shape Tensor(\"input_7:0\", shape=(128, 32), dtype=float32) for input (128, 32), but it was re-called on a Tensor with incompatible shape (432, 32).\n",
      "generated_data\n",
      "1 [D loss: 0.678726, acc.: 50.00%] [G loss: 0.669143]\n",
      "2 [D loss: 0.665418, acc.: 50.00%] [G loss: 0.662125]\n",
      "3 [D loss: 0.676714, acc.: 50.00%] [G loss: 0.635445]\n",
      "4 [D loss: 0.734472, acc.: 50.00%] [G loss: 0.589705]\n",
      "5 [D loss: 0.780536, acc.: 50.00%] [G loss: 0.580895]\n",
      "6 [D loss: 0.795835, acc.: 48.83%] [G loss: 0.629809]\n",
      "7 [D loss: 0.762679, acc.: 45.70%] [G loss: 0.754075]\n",
      "8 [D loss: 0.722075, acc.: 29.69%] [G loss: 0.903090]\n",
      "9 [D loss: 0.667899, acc.: 61.33%] [G loss: 1.034845]\n",
      "10 [D loss: 0.637842, acc.: 60.55%] [G loss: 1.157126]\n",
      "11 [D loss: 0.627896, acc.: 60.94%] [G loss: 1.148153]\n",
      "12 [D loss: 0.633042, acc.: 60.94%] [G loss: 1.105542]\n",
      "13 [D loss: 0.650867, acc.: 65.23%] [G loss: 0.971893]\n",
      "14 [D loss: 0.657101, acc.: 76.56%] [G loss: 0.868371]\n",
      "15 [D loss: 0.648712, acc.: 89.45%] [G loss: 0.823496]\n",
      "16 [D loss: 0.635049, acc.: 90.62%] [G loss: 0.799988]\n",
      "17 [D loss: 0.630502, acc.: 73.83%] [G loss: 0.755666]\n",
      "18 [D loss: 0.640778, acc.: 54.69%] [G loss: 0.713664]\n",
      "19 [D loss: 0.656893, acc.: 48.05%] [G loss: 0.669913]\n",
      "20 [D loss: 0.670864, acc.: 49.22%] [G loss: 0.655636]\n",
      "21 [D loss: 0.675279, acc.: 49.22%] [G loss: 0.653515]\n",
      "22 [D loss: 0.670422, acc.: 48.83%] [G loss: 0.669898]\n",
      "23 [D loss: 0.653587, acc.: 48.83%] [G loss: 0.718723]\n",
      "24 [D loss: 0.643774, acc.: 50.78%] [G loss: 0.773866]\n",
      "25 [D loss: 0.630972, acc.: 59.38%] [G loss: 0.819457]\n",
      "26 [D loss: 0.611089, acc.: 78.91%] [G loss: 0.861762]\n",
      "27 [D loss: 0.619663, acc.: 72.27%] [G loss: 0.893804]\n",
      "28 [D loss: 0.640829, acc.: 55.08%] [G loss: 0.836550]\n",
      "29 [D loss: 0.681983, acc.: 33.98%] [G loss: 0.763735]\n",
      "30 [D loss: 0.702252, acc.: 28.91%] [G loss: 0.733090]\n",
      "31 [D loss: 0.680943, acc.: 32.03%] [G loss: 0.793396]\n",
      "32 [D loss: 0.638666, acc.: 57.81%] [G loss: 0.876553]\n",
      "33 [D loss: 0.595042, acc.: 73.83%] [G loss: 0.952805]\n",
      "34 [D loss: 0.588190, acc.: 75.78%] [G loss: 0.943770]\n",
      "35 [D loss: 0.634701, acc.: 68.36%] [G loss: 0.878327]\n",
      "36 [D loss: 0.665770, acc.: 58.20%] [G loss: 0.813296]\n",
      "37 [D loss: 0.706418, acc.: 51.95%] [G loss: 0.781123]\n",
      "38 [D loss: 0.724977, acc.: 46.48%] [G loss: 0.838987]\n",
      "39 [D loss: 0.633367, acc.: 66.41%] [G loss: 0.934962]\n",
      "40 [D loss: 0.616486, acc.: 76.17%] [G loss: 0.914525]\n",
      "41 [D loss: 0.612142, acc.: 72.66%] [G loss: 0.877021]\n",
      "42 [D loss: 0.604888, acc.: 73.05%] [G loss: 0.846837]\n",
      "43 [D loss: 0.595616, acc.: 71.09%] [G loss: 0.840786]\n",
      "44 [D loss: 0.596308, acc.: 65.23%] [G loss: 0.846665]\n",
      "45 [D loss: 0.607072, acc.: 60.94%] [G loss: 0.840759]\n",
      "46 [D loss: 0.610253, acc.: 62.89%] [G loss: 0.850046]\n",
      "47 [D loss: 0.652381, acc.: 52.73%] [G loss: 0.831791]\n",
      "48 [D loss: 0.718699, acc.: 43.75%] [G loss: 0.768919]\n",
      "49 [D loss: 0.784891, acc.: 34.38%] [G loss: 0.691103]\n",
      "50 [D loss: 0.823904, acc.: 30.47%] [G loss: 0.623173]\n",
      "51 [D loss: 0.836898, acc.: 30.47%] [G loss: 0.688086]\n",
      "52 [D loss: 0.756118, acc.: 32.42%] [G loss: 0.809737]\n",
      "53 [D loss: 0.720253, acc.: 39.84%] [G loss: 1.010127]\n",
      "54 [D loss: 0.685523, acc.: 59.38%] [G loss: 1.024283]\n",
      "55 [D loss: 0.693720, acc.: 60.94%] [G loss: 0.971958]\n",
      "56 [D loss: 0.688350, acc.: 62.89%] [G loss: 0.917142]\n",
      "57 [D loss: 0.676993, acc.: 64.84%] [G loss: 0.899749]\n",
      "58 [D loss: 0.678179, acc.: 63.67%] [G loss: 0.860112]\n",
      "59 [D loss: 0.694381, acc.: 57.03%] [G loss: 0.818999]\n",
      "60 [D loss: 0.691374, acc.: 58.20%] [G loss: 0.749724]\n",
      "61 [D loss: 0.684572, acc.: 59.38%] [G loss: 0.727800]\n",
      "62 [D loss: 0.759864, acc.: 53.52%] [G loss: 0.701854]\n",
      "63 [D loss: 0.748150, acc.: 41.80%] [G loss: 0.684588]\n",
      "64 [D loss: 0.691595, acc.: 45.70%] [G loss: 0.718405]\n",
      "65 [D loss: 0.652284, acc.: 61.72%] [G loss: 0.817149]\n",
      "66 [D loss: 0.631424, acc.: 66.02%] [G loss: 0.829368]\n",
      "67 [D loss: 0.626476, acc.: 67.97%] [G loss: 0.803602]\n",
      "68 [D loss: 0.693286, acc.: 57.03%] [G loss: 0.723229]\n",
      "69 [D loss: 0.731528, acc.: 52.34%] [G loss: 0.728399]\n",
      "70 [D loss: 0.706829, acc.: 53.52%] [G loss: 0.813890]\n",
      "71 [D loss: 0.696377, acc.: 55.86%] [G loss: 0.856457]\n",
      "72 [D loss: 0.726955, acc.: 49.22%] [G loss: 0.856223]\n",
      "73 [D loss: 0.726334, acc.: 46.48%] [G loss: 0.873440]\n",
      "74 [D loss: 0.721425, acc.: 51.95%] [G loss: 0.853529]\n",
      "75 [D loss: 0.716419, acc.: 49.61%] [G loss: 0.868509]\n",
      "76 [D loss: 0.693751, acc.: 57.03%] [G loss: 0.883439]\n",
      "77 [D loss: 0.689861, acc.: 53.12%] [G loss: 0.854034]\n",
      "78 [D loss: 0.674652, acc.: 59.38%] [G loss: 0.841300]\n",
      "79 [D loss: 0.668063, acc.: 59.38%] [G loss: 0.829428]\n",
      "80 [D loss: 0.656394, acc.: 61.33%] [G loss: 0.813333]\n",
      "81 [D loss: 0.644218, acc.: 62.11%] [G loss: 0.832922]\n",
      "82 [D loss: 0.635672, acc.: 61.33%] [G loss: 0.860685]\n",
      "83 [D loss: 0.624960, acc.: 62.50%] [G loss: 0.865571]\n",
      "84 [D loss: 0.621409, acc.: 63.67%] [G loss: 0.867522]\n",
      "85 [D loss: 0.635991, acc.: 57.03%] [G loss: 0.874113]\n",
      "86 [D loss: 0.641145, acc.: 58.59%] [G loss: 0.860121]\n",
      "87 [D loss: 0.662668, acc.: 55.86%] [G loss: 0.825621]\n",
      "88 [D loss: 0.673410, acc.: 53.12%] [G loss: 0.810534]\n",
      "89 [D loss: 0.690926, acc.: 50.00%] [G loss: 0.794043]\n",
      "90 [D loss: 0.703855, acc.: 46.48%] [G loss: 0.760478]\n",
      "91 [D loss: 0.723397, acc.: 43.75%] [G loss: 0.759648]\n",
      "92 [D loss: 0.690207, acc.: 50.39%] [G loss: 0.782468]\n",
      "93 [D loss: 0.681420, acc.: 53.12%] [G loss: 0.799852]\n",
      "94 [D loss: 0.667709, acc.: 58.98%] [G loss: 0.802741]\n",
      "95 [D loss: 0.663979, acc.: 59.77%] [G loss: 0.809889]\n",
      "96 [D loss: 0.646946, acc.: 66.41%] [G loss: 0.802969]\n",
      "97 [D loss: 0.654988, acc.: 63.67%] [G loss: 0.821092]\n",
      "98 [D loss: 0.638357, acc.: 66.80%] [G loss: 0.840139]\n",
      "99 [D loss: 0.641091, acc.: 70.70%] [G loss: 0.857268]\n",
      "100 [D loss: 0.649723, acc.: 68.36%] [G loss: 0.876566]\n",
      "WARNING:tensorflow:Model was constructed with shape Tensor(\"input_7:0\", shape=(128, 32), dtype=float32) for input (128, 32), but it was re-called on a Tensor with incompatible shape (432, 32).\n",
      "generated_data\n",
      "101 [D loss: 0.650478, acc.: 71.09%] [G loss: 0.877052]\n",
      "102 [D loss: 0.670941, acc.: 64.06%] [G loss: 0.893847]\n",
      "103 [D loss: 0.655939, acc.: 69.53%] [G loss: 0.873235]\n",
      "104 [D loss: 0.680627, acc.: 62.11%] [G loss: 0.842607]\n",
      "105 [D loss: 0.702811, acc.: 55.47%] [G loss: 0.856117]\n",
      "106 [D loss: 0.687399, acc.: 58.59%] [G loss: 0.848018]\n",
      "107 [D loss: 0.696323, acc.: 50.00%] [G loss: 0.855598]\n",
      "108 [D loss: 0.694026, acc.: 48.83%] [G loss: 0.859507]\n",
      "109 [D loss: 0.688666, acc.: 50.39%] [G loss: 0.871170]\n",
      "110 [D loss: 0.680081, acc.: 55.08%] [G loss: 0.878243]\n",
      "111 [D loss: 0.670671, acc.: 57.42%] [G loss: 0.903731]\n",
      "112 [D loss: 0.638997, acc.: 65.62%] [G loss: 0.958053]\n",
      "113 [D loss: 0.610548, acc.: 67.58%] [G loss: 0.983237]\n",
      "114 [D loss: 0.593641, acc.: 73.83%] [G loss: 0.982540]\n",
      "115 [D loss: 0.599292, acc.: 73.83%] [G loss: 0.934894]\n",
      "116 [D loss: 0.619143, acc.: 70.31%] [G loss: 0.871132]\n",
      "117 [D loss: 0.630256, acc.: 62.11%] [G loss: 0.804856]\n",
      "118 [D loss: 0.670037, acc.: 54.69%] [G loss: 0.790979]\n",
      "119 [D loss: 0.685217, acc.: 51.17%] [G loss: 0.727117]\n",
      "120 [D loss: 0.686870, acc.: 46.09%] [G loss: 0.706138]\n",
      "121 [D loss: 0.730991, acc.: 37.50%] [G loss: 0.707133]\n",
      "122 [D loss: 0.707953, acc.: 39.45%] [G loss: 0.739241]\n",
      "123 [D loss: 0.698698, acc.: 46.09%] [G loss: 0.768378]\n",
      "124 [D loss: 0.686962, acc.: 48.44%] [G loss: 0.821781]\n",
      "125 [D loss: 0.663789, acc.: 62.50%] [G loss: 0.915383]\n",
      "126 [D loss: 0.648235, acc.: 62.11%] [G loss: 0.980229]\n",
      "127 [D loss: 0.644733, acc.: 66.02%] [G loss: 0.995405]\n",
      "128 [D loss: 0.649354, acc.: 64.06%] [G loss: 0.964900]\n",
      "129 [D loss: 0.652510, acc.: 69.92%] [G loss: 0.944536]\n",
      "130 [D loss: 0.657968, acc.: 70.70%] [G loss: 0.917873]\n",
      "131 [D loss: 0.666232, acc.: 71.09%] [G loss: 0.877040]\n",
      "132 [D loss: 0.660665, acc.: 71.88%] [G loss: 0.848143]\n",
      "133 [D loss: 0.669009, acc.: 63.28%] [G loss: 0.860708]\n",
      "134 [D loss: 0.669875, acc.: 65.23%] [G loss: 0.856517]\n",
      "135 [D loss: 0.669069, acc.: 69.14%] [G loss: 0.817083]\n",
      "136 [D loss: 0.690098, acc.: 57.42%] [G loss: 0.782348]\n",
      "137 [D loss: 0.685209, acc.: 57.42%] [G loss: 0.789208]\n",
      "138 [D loss: 0.667878, acc.: 61.72%] [G loss: 0.834830]\n",
      "139 [D loss: 0.669666, acc.: 57.03%] [G loss: 0.877269]\n",
      "140 [D loss: 0.654367, acc.: 57.81%] [G loss: 0.901602]\n",
      "141 [D loss: 0.658674, acc.: 59.77%] [G loss: 0.875875]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "142 [D loss: 0.674553, acc.: 55.47%] [G loss: 0.827785]\n",
      "143 [D loss: 0.705194, acc.: 45.31%] [G loss: 0.776109]\n",
      "144 [D loss: 0.730318, acc.: 37.89%] [G loss: 0.763589]\n",
      "145 [D loss: 0.737484, acc.: 36.72%] [G loss: 0.797294]\n",
      "146 [D loss: 0.710896, acc.: 46.88%] [G loss: 0.839641]\n",
      "147 [D loss: 0.669995, acc.: 64.45%] [G loss: 0.932175]\n",
      "148 [D loss: 0.624906, acc.: 67.97%] [G loss: 1.064448]\n",
      "149 [D loss: 0.590154, acc.: 69.92%] [G loss: 1.129902]\n",
      "150 [D loss: 0.583595, acc.: 73.44%] [G loss: 1.023695]\n",
      "151 [D loss: 0.588880, acc.: 75.00%] [G loss: 0.969657]\n",
      "152 [D loss: 0.602711, acc.: 73.44%] [G loss: 0.938855]\n",
      "153 [D loss: 0.603706, acc.: 73.83%] [G loss: 0.903461]\n",
      "154 [D loss: 0.636310, acc.: 62.50%] [G loss: 0.808761]\n",
      "155 [D loss: 0.665782, acc.: 49.22%] [G loss: 0.767276]\n",
      "156 [D loss: 0.693030, acc.: 38.28%] [G loss: 0.722557]\n",
      "157 [D loss: 0.695195, acc.: 35.94%] [G loss: 0.713708]\n",
      "158 [D loss: 0.697298, acc.: 38.28%] [G loss: 0.721151]\n",
      "159 [D loss: 0.703566, acc.: 40.23%] [G loss: 0.764550]\n",
      "160 [D loss: 0.674518, acc.: 51.17%] [G loss: 0.842290]\n",
      "161 [D loss: 0.620708, acc.: 55.86%] [G loss: 0.957958]\n",
      "162 [D loss: 0.539392, acc.: 76.95%] [G loss: 1.115093]\n",
      "163 [D loss: 0.493719, acc.: 83.20%] [G loss: 1.159814]\n",
      "164 [D loss: 0.472741, acc.: 86.33%] [G loss: 1.138010]\n",
      "165 [D loss: 0.480393, acc.: 83.98%] [G loss: 1.072968]\n",
      "166 [D loss: 0.545802, acc.: 77.73%] [G loss: 0.940032]\n",
      "167 [D loss: 0.636922, acc.: 66.02%] [G loss: 0.875162]\n",
      "168 [D loss: 0.708426, acc.: 58.59%] [G loss: 0.844790]\n",
      "169 [D loss: 0.700005, acc.: 57.03%] [G loss: 0.861305]\n",
      "170 [D loss: 0.711447, acc.: 49.61%] [G loss: 0.859598]\n",
      "171 [D loss: 0.807584, acc.: 40.23%] [G loss: 0.927603]\n",
      "172 [D loss: 0.794743, acc.: 38.28%] [G loss: 1.178186]\n",
      "173 [D loss: 0.730028, acc.: 53.12%] [G loss: 1.360699]\n",
      "174 [D loss: 0.736807, acc.: 48.05%] [G loss: 1.279357]\n",
      "175 [D loss: 0.713264, acc.: 51.17%] [G loss: 1.223217]\n",
      "176 [D loss: 0.686524, acc.: 54.69%] [G loss: 1.147047]\n",
      "177 [D loss: 0.682560, acc.: 53.91%] [G loss: 1.122489]\n",
      "178 [D loss: 0.639881, acc.: 59.38%] [G loss: 1.118152]\n",
      "179 [D loss: 0.631581, acc.: 62.50%] [G loss: 1.032568]\n",
      "180 [D loss: 0.640407, acc.: 62.89%] [G loss: 0.985702]\n",
      "181 [D loss: 0.640306, acc.: 65.62%] [G loss: 0.918212]\n",
      "182 [D loss: 0.659661, acc.: 63.28%] [G loss: 0.902816]\n",
      "183 [D loss: 0.632585, acc.: 68.36%] [G loss: 0.899401]\n",
      "184 [D loss: 0.625166, acc.: 66.80%] [G loss: 0.920232]\n",
      "185 [D loss: 0.616695, acc.: 68.75%] [G loss: 0.957885]\n",
      "186 [D loss: 0.618095, acc.: 69.92%] [G loss: 0.927362]\n",
      "187 [D loss: 0.629681, acc.: 62.89%] [G loss: 0.914231]\n",
      "188 [D loss: 0.627924, acc.: 65.23%] [G loss: 0.900934]\n",
      "189 [D loss: 0.626851, acc.: 67.58%] [G loss: 0.934206]\n",
      "190 [D loss: 0.600160, acc.: 68.36%] [G loss: 0.985827]\n",
      "191 [D loss: 0.587479, acc.: 68.36%] [G loss: 0.994842]\n",
      "192 [D loss: 0.601516, acc.: 66.80%] [G loss: 0.989319]\n",
      "193 [D loss: 0.584061, acc.: 69.92%] [G loss: 0.927907]\n",
      "194 [D loss: 0.632389, acc.: 69.53%] [G loss: 0.877204]\n",
      "195 [D loss: 0.652724, acc.: 65.62%] [G loss: 0.849141]\n",
      "196 [D loss: 0.690305, acc.: 60.16%] [G loss: 0.917344]\n",
      "197 [D loss: 0.668836, acc.: 62.11%] [G loss: 0.947597]\n",
      "198 [D loss: 0.679289, acc.: 58.20%] [G loss: 0.915517]\n",
      "199 [D loss: 0.710677, acc.: 48.05%] [G loss: 0.835190]\n",
      "200 [D loss: 0.717374, acc.: 42.58%] [G loss: 0.802860]\n",
      "WARNING:tensorflow:Model was constructed with shape Tensor(\"input_7:0\", shape=(128, 32), dtype=float32) for input (128, 32), but it was re-called on a Tensor with incompatible shape (432, 32).\n",
      "generated_data\n",
      "201 [D loss: 0.706705, acc.: 46.88%] [G loss: 0.784524]\n",
      "202 [D loss: 0.692213, acc.: 49.22%] [G loss: 0.787994]\n",
      "203 [D loss: 0.662103, acc.: 59.77%] [G loss: 0.852543]\n",
      "204 [D loss: 0.621150, acc.: 68.36%] [G loss: 0.933039]\n",
      "205 [D loss: 0.621188, acc.: 67.97%] [G loss: 0.902069]\n",
      "206 [D loss: 0.622703, acc.: 66.41%] [G loss: 0.896634]\n",
      "207 [D loss: 0.644745, acc.: 61.33%] [G loss: 0.857450]\n",
      "208 [D loss: 0.645940, acc.: 59.77%] [G loss: 0.841081]\n",
      "209 [D loss: 0.654876, acc.: 55.86%] [G loss: 0.847325]\n",
      "210 [D loss: 0.652452, acc.: 59.38%] [G loss: 0.832549]\n",
      "211 [D loss: 0.655730, acc.: 54.30%] [G loss: 0.840635]\n",
      "212 [D loss: 0.663213, acc.: 50.78%] [G loss: 0.846441]\n",
      "213 [D loss: 0.647366, acc.: 61.33%] [G loss: 0.875400]\n",
      "214 [D loss: 0.627167, acc.: 64.45%] [G loss: 0.931750]\n",
      "215 [D loss: 0.609330, acc.: 65.23%] [G loss: 0.957603]\n",
      "216 [D loss: 0.598883, acc.: 65.23%] [G loss: 0.995056]\n",
      "217 [D loss: 0.586573, acc.: 66.02%] [G loss: 0.991850]\n",
      "218 [D loss: 0.592384, acc.: 64.84%] [G loss: 0.945102]\n",
      "219 [D loss: 0.602209, acc.: 63.28%] [G loss: 0.926890]\n",
      "220 [D loss: 0.610486, acc.: 63.28%] [G loss: 0.905333]\n",
      "221 [D loss: 0.658309, acc.: 56.25%] [G loss: 0.897051]\n",
      "222 [D loss: 0.656848, acc.: 57.03%] [G loss: 0.911230]\n",
      "223 [D loss: 0.673666, acc.: 55.86%] [G loss: 0.968511]\n",
      "224 [D loss: 0.660692, acc.: 53.12%] [G loss: 1.068233]\n",
      "225 [D loss: 0.703369, acc.: 48.05%] [G loss: 1.202802]\n",
      "226 [D loss: 0.656186, acc.: 56.25%] [G loss: 1.179464]\n",
      "227 [D loss: 0.678010, acc.: 54.69%] [G loss: 1.046578]\n",
      "228 [D loss: 0.680929, acc.: 56.25%] [G loss: 0.978911]\n",
      "229 [D loss: 0.661666, acc.: 59.77%] [G loss: 0.919019]\n",
      "230 [D loss: 0.658415, acc.: 60.94%] [G loss: 0.912087]\n",
      "231 [D loss: 0.638484, acc.: 65.62%] [G loss: 0.903029]\n",
      "232 [D loss: 0.637609, acc.: 64.84%] [G loss: 0.912843]\n",
      "233 [D loss: 0.631995, acc.: 66.80%] [G loss: 0.868675]\n",
      "234 [D loss: 0.644265, acc.: 68.75%] [G loss: 0.842701]\n",
      "235 [D loss: 0.638742, acc.: 66.41%] [G loss: 0.838256]\n",
      "236 [D loss: 0.650487, acc.: 66.02%] [G loss: 0.885451]\n",
      "237 [D loss: 0.629442, acc.: 69.14%] [G loss: 0.894328]\n",
      "238 [D loss: 0.652901, acc.: 55.86%] [G loss: 0.884284]\n",
      "239 [D loss: 0.678525, acc.: 61.33%] [G loss: 0.883216]\n",
      "240 [D loss: 0.675291, acc.: 55.86%] [G loss: 0.976384]\n",
      "241 [D loss: 0.663974, acc.: 59.77%] [G loss: 1.049444]\n",
      "242 [D loss: 0.634650, acc.: 63.28%] [G loss: 1.090034]\n",
      "243 [D loss: 0.652852, acc.: 61.72%] [G loss: 1.036784]\n",
      "244 [D loss: 0.661498, acc.: 61.72%] [G loss: 0.947316]\n",
      "245 [D loss: 0.673319, acc.: 61.33%] [G loss: 0.866781]\n",
      "246 [D loss: 0.690439, acc.: 60.55%] [G loss: 0.808295]\n",
      "247 [D loss: 0.688058, acc.: 54.69%] [G loss: 0.778114]\n",
      "248 [D loss: 0.699226, acc.: 40.62%] [G loss: 0.741400]\n",
      "249 [D loss: 0.708043, acc.: 38.67%] [G loss: 0.760190]\n",
      "250 [D loss: 0.684587, acc.: 48.83%] [G loss: 0.794664]\n",
      "251 [D loss: 0.650398, acc.: 61.33%] [G loss: 0.846968]\n",
      "252 [D loss: 0.652888, acc.: 60.55%] [G loss: 0.860220]\n",
      "253 [D loss: 0.665226, acc.: 58.20%] [G loss: 0.884385]\n",
      "254 [D loss: 0.642959, acc.: 58.20%] [G loss: 0.902876]\n",
      "255 [D loss: 0.655920, acc.: 62.50%] [G loss: 0.862844]\n",
      "256 [D loss: 0.660866, acc.: 59.38%] [G loss: 0.837683]\n",
      "257 [D loss: 0.659044, acc.: 62.11%] [G loss: 0.805222]\n",
      "258 [D loss: 0.676117, acc.: 57.03%] [G loss: 0.798538]\n",
      "259 [D loss: 0.664534, acc.: 57.42%] [G loss: 0.805651]\n",
      "260 [D loss: 0.662955, acc.: 57.03%] [G loss: 0.806651]\n",
      "261 [D loss: 0.654808, acc.: 60.94%] [G loss: 0.823302]\n",
      "262 [D loss: 0.650253, acc.: 59.77%] [G loss: 0.818892]\n",
      "263 [D loss: 0.650512, acc.: 60.16%] [G loss: 0.795239]\n",
      "264 [D loss: 0.657741, acc.: 59.38%] [G loss: 0.812497]\n",
      "265 [D loss: 0.651711, acc.: 60.16%] [G loss: 0.843656]\n",
      "266 [D loss: 0.657604, acc.: 61.33%] [G loss: 0.832234]\n",
      "267 [D loss: 0.655225, acc.: 59.38%] [G loss: 0.842837]\n",
      "268 [D loss: 0.659001, acc.: 56.25%] [G loss: 0.852814]\n",
      "269 [D loss: 0.662984, acc.: 60.16%] [G loss: 0.835915]\n",
      "270 [D loss: 0.660067, acc.: 60.55%] [G loss: 0.832627]\n",
      "271 [D loss: 0.648139, acc.: 63.67%] [G loss: 0.839206]\n",
      "272 [D loss: 0.653217, acc.: 62.89%] [G loss: 0.823721]\n",
      "273 [D loss: 0.661563, acc.: 62.89%] [G loss: 0.768906]\n",
      "274 [D loss: 0.677432, acc.: 56.64%] [G loss: 0.779397]\n",
      "275 [D loss: 0.667413, acc.: 56.25%] [G loss: 0.767192]\n",
      "276 [D loss: 0.666810, acc.: 56.25%] [G loss: 0.767786]\n",
      "277 [D loss: 0.667108, acc.: 57.03%] [G loss: 0.771507]\n",
      "278 [D loss: 0.672102, acc.: 53.91%] [G loss: 0.783759]\n",
      "279 [D loss: 0.661907, acc.: 57.03%] [G loss: 0.805544]\n",
      "280 [D loss: 0.644970, acc.: 62.89%] [G loss: 0.862093]\n",
      "281 [D loss: 0.647930, acc.: 62.11%] [G loss: 0.859840]\n",
      "282 [D loss: 0.646826, acc.: 61.72%] [G loss: 0.822706]\n",
      "283 [D loss: 0.661521, acc.: 53.52%] [G loss: 0.821657]\n",
      "284 [D loss: 0.685089, acc.: 41.80%] [G loss: 0.783051]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "285 [D loss: 0.693662, acc.: 37.11%] [G loss: 0.804863]\n",
      "286 [D loss: 0.667689, acc.: 54.69%] [G loss: 0.885857]\n",
      "287 [D loss: 0.642965, acc.: 60.55%] [G loss: 0.981873]\n",
      "288 [D loss: 0.617147, acc.: 60.94%] [G loss: 1.042374]\n",
      "289 [D loss: 0.619943, acc.: 61.33%] [G loss: 1.039297]\n",
      "290 [D loss: 0.625420, acc.: 62.89%] [G loss: 0.962278]\n",
      "291 [D loss: 0.642015, acc.: 66.80%] [G loss: 0.897271]\n",
      "292 [D loss: 0.649908, acc.: 65.62%] [G loss: 0.820338]\n",
      "293 [D loss: 0.655330, acc.: 66.41%] [G loss: 0.797043]\n",
      "294 [D loss: 0.647047, acc.: 67.58%] [G loss: 0.764338]\n",
      "295 [D loss: 0.649811, acc.: 66.80%] [G loss: 0.765815]\n",
      "296 [D loss: 0.646034, acc.: 64.84%] [G loss: 0.768596]\n",
      "297 [D loss: 0.643233, acc.: 63.67%] [G loss: 0.754955]\n",
      "298 [D loss: 0.647069, acc.: 63.67%] [G loss: 0.771468]\n",
      "299 [D loss: 0.636342, acc.: 64.45%] [G loss: 0.778726]\n",
      "300 [D loss: 0.637147, acc.: 64.84%] [G loss: 0.780960]\n",
      "WARNING:tensorflow:Model was constructed with shape Tensor(\"input_7:0\", shape=(128, 32), dtype=float32) for input (128, 32), but it was re-called on a Tensor with incompatible shape (432, 32).\n",
      "generated_data\n",
      "301 [D loss: 0.629892, acc.: 65.23%] [G loss: 0.799522]\n",
      "302 [D loss: 0.624341, acc.: 66.80%] [G loss: 0.817445]\n",
      "303 [D loss: 0.630491, acc.: 67.19%] [G loss: 0.829532]\n",
      "304 [D loss: 0.626305, acc.: 65.62%] [G loss: 0.827337]\n",
      "305 [D loss: 0.626905, acc.: 67.19%] [G loss: 0.818862]\n",
      "306 [D loss: 0.627860, acc.: 64.84%] [G loss: 0.809194]\n",
      "307 [D loss: 0.641573, acc.: 60.55%] [G loss: 0.791872]\n",
      "308 [D loss: 0.635748, acc.: 65.62%] [G loss: 0.806007]\n",
      "309 [D loss: 0.628018, acc.: 66.02%] [G loss: 0.786560]\n",
      "310 [D loss: 0.639976, acc.: 64.45%] [G loss: 0.804622]\n",
      "311 [D loss: 0.621745, acc.: 66.80%] [G loss: 0.812138]\n",
      "312 [D loss: 0.626944, acc.: 65.23%] [G loss: 0.852814]\n",
      "313 [D loss: 0.604406, acc.: 69.14%] [G loss: 0.874440]\n",
      "314 [D loss: 0.595426, acc.: 73.05%] [G loss: 0.882215]\n",
      "315 [D loss: 0.591229, acc.: 73.05%] [G loss: 0.882710]\n",
      "316 [D loss: 0.593739, acc.: 75.78%] [G loss: 0.892162]\n",
      "317 [D loss: 0.627047, acc.: 65.62%] [G loss: 0.909564]\n",
      "318 [D loss: 0.645192, acc.: 57.03%] [G loss: 0.906833]\n",
      "319 [D loss: 0.683744, acc.: 46.48%] [G loss: 0.917171]\n",
      "320 [D loss: 0.685760, acc.: 46.09%] [G loss: 0.953952]\n",
      "321 [D loss: 0.666502, acc.: 52.73%] [G loss: 1.065498]\n",
      "322 [D loss: 0.604838, acc.: 66.80%] [G loss: 1.194081]\n",
      "323 [D loss: 0.572030, acc.: 68.75%] [G loss: 1.208452]\n",
      "324 [D loss: 0.599567, acc.: 68.75%] [G loss: 1.104813]\n",
      "325 [D loss: 0.595048, acc.: 70.70%] [G loss: 0.981321]\n",
      "326 [D loss: 0.617457, acc.: 71.09%] [G loss: 0.899945]\n",
      "327 [D loss: 0.654464, acc.: 64.06%] [G loss: 0.842006]\n",
      "328 [D loss: 0.651341, acc.: 66.80%] [G loss: 0.836187]\n",
      "329 [D loss: 0.656600, acc.: 63.28%] [G loss: 0.840053]\n",
      "330 [D loss: 0.655462, acc.: 65.62%] [G loss: 0.809487]\n",
      "331 [D loss: 0.667736, acc.: 62.89%] [G loss: 0.797971]\n",
      "332 [D loss: 0.661399, acc.: 62.11%] [G loss: 0.795679]\n",
      "333 [D loss: 0.684730, acc.: 57.81%] [G loss: 0.758002]\n",
      "334 [D loss: 0.688137, acc.: 57.42%] [G loss: 0.756685]\n",
      "335 [D loss: 0.713611, acc.: 49.22%] [G loss: 0.769495]\n",
      "336 [D loss: 0.716775, acc.: 43.36%] [G loss: 0.821360]\n",
      "337 [D loss: 0.636272, acc.: 64.84%] [G loss: 1.185513]\n",
      "338 [D loss: 0.605830, acc.: 60.55%] [G loss: 1.115098]\n",
      "339 [D loss: 0.618387, acc.: 60.16%] [G loss: 1.009366]\n",
      "340 [D loss: 0.629858, acc.: 61.72%] [G loss: 0.959408]\n",
      "341 [D loss: 0.624634, acc.: 63.28%] [G loss: 0.944904]\n",
      "342 [D loss: 0.615630, acc.: 63.67%] [G loss: 0.955384]\n",
      "343 [D loss: 0.613414, acc.: 64.84%] [G loss: 0.938495]\n",
      "344 [D loss: 0.600706, acc.: 67.19%] [G loss: 0.932646]\n",
      "345 [D loss: 0.603497, acc.: 68.75%] [G loss: 0.917567]\n",
      "346 [D loss: 0.603602, acc.: 70.70%] [G loss: 0.875120]\n",
      "347 [D loss: 0.594976, acc.: 75.39%] [G loss: 0.840637]\n",
      "348 [D loss: 0.588373, acc.: 76.95%] [G loss: 0.850827]\n",
      "349 [D loss: 0.594345, acc.: 75.00%] [G loss: 0.838351]\n",
      "350 [D loss: 0.599376, acc.: 75.00%] [G loss: 0.864592]\n",
      "351 [D loss: 0.587706, acc.: 77.73%] [G loss: 0.861167]\n",
      "352 [D loss: 0.578922, acc.: 76.56%] [G loss: 0.869358]\n",
      "353 [D loss: 0.589227, acc.: 72.66%] [G loss: 0.833676]\n",
      "354 [D loss: 0.619635, acc.: 68.75%] [G loss: 0.812797]\n",
      "355 [D loss: 0.645746, acc.: 64.84%] [G loss: 0.804227]\n",
      "356 [D loss: 0.626097, acc.: 64.45%] [G loss: 0.842331]\n",
      "357 [D loss: 0.669788, acc.: 59.38%] [G loss: 0.876176]\n",
      "358 [D loss: 0.670696, acc.: 58.98%] [G loss: 0.949283]\n",
      "359 [D loss: 0.672134, acc.: 59.77%] [G loss: 0.966322]\n",
      "360 [D loss: 0.685962, acc.: 57.81%] [G loss: 0.941601]\n",
      "361 [D loss: 0.678890, acc.: 56.25%] [G loss: 0.901401]\n",
      "362 [D loss: 0.683246, acc.: 53.12%] [G loss: 0.911576]\n",
      "363 [D loss: 0.678907, acc.: 51.56%] [G loss: 1.044162]\n",
      "364 [D loss: 0.632444, acc.: 64.45%] [G loss: 1.014070]\n",
      "365 [D loss: 0.616605, acc.: 67.58%] [G loss: 0.998194]\n",
      "366 [D loss: 0.642565, acc.: 66.80%] [G loss: 0.981649]\n",
      "367 [D loss: 0.642306, acc.: 64.84%] [G loss: 0.932689]\n",
      "368 [D loss: 0.647982, acc.: 65.62%] [G loss: 0.930492]\n",
      "369 [D loss: 0.651608, acc.: 62.89%] [G loss: 0.920735]\n",
      "370 [D loss: 0.648522, acc.: 64.06%] [G loss: 0.938203]\n",
      "371 [D loss: 0.638029, acc.: 67.97%] [G loss: 0.951037]\n",
      "372 [D loss: 0.622486, acc.: 67.97%] [G loss: 0.951785]\n",
      "373 [D loss: 0.633914, acc.: 66.41%] [G loss: 0.904268]\n",
      "374 [D loss: 0.621380, acc.: 67.19%] [G loss: 0.889620]\n",
      "375 [D loss: 0.624443, acc.: 67.58%] [G loss: 0.871327]\n",
      "376 [D loss: 0.607973, acc.: 71.09%] [G loss: 0.862457]\n",
      "377 [D loss: 0.606828, acc.: 69.92%] [G loss: 0.874710]\n",
      "378 [D loss: 0.619237, acc.: 67.97%] [G loss: 0.864685]\n",
      "379 [D loss: 0.642718, acc.: 64.84%] [G loss: 0.885903]\n",
      "380 [D loss: 0.616560, acc.: 65.23%] [G loss: 0.810579]\n",
      "381 [D loss: 0.656677, acc.: 56.64%] [G loss: 0.807332]\n",
      "382 [D loss: 0.661650, acc.: 59.38%] [G loss: 0.764865]\n",
      "383 [D loss: 0.700446, acc.: 55.47%] [G loss: 0.803113]\n",
      "384 [D loss: 0.686755, acc.: 55.08%] [G loss: 0.853666]\n",
      "385 [D loss: 0.688479, acc.: 53.12%] [G loss: 0.881744]\n",
      "386 [D loss: 0.632953, acc.: 62.50%] [G loss: 1.012548]\n",
      "387 [D loss: 0.626236, acc.: 63.67%] [G loss: 0.980294]\n",
      "388 [D loss: 0.625286, acc.: 62.50%] [G loss: 0.908303]\n",
      "389 [D loss: 0.638870, acc.: 60.55%] [G loss: 0.881004]\n",
      "390 [D loss: 0.637007, acc.: 58.20%] [G loss: 0.853866]\n",
      "391 [D loss: 0.641685, acc.: 55.08%] [G loss: 0.874524]\n",
      "392 [D loss: 0.629779, acc.: 63.28%] [G loss: 0.868115]\n",
      "393 [D loss: 0.630313, acc.: 62.89%] [G loss: 0.880149]\n",
      "394 [D loss: 0.624016, acc.: 63.28%] [G loss: 0.865046]\n",
      "395 [D loss: 0.618378, acc.: 62.50%] [G loss: 0.858081]\n",
      "396 [D loss: 0.622571, acc.: 66.41%] [G loss: 0.838796]\n",
      "397 [D loss: 0.634318, acc.: 65.23%] [G loss: 0.835239]\n",
      "398 [D loss: 0.619529, acc.: 64.45%] [G loss: 0.840538]\n",
      "399 [D loss: 0.617870, acc.: 66.41%] [G loss: 0.846622]\n",
      "400 [D loss: 0.624018, acc.: 67.97%] [G loss: 0.839319]\n",
      "WARNING:tensorflow:Model was constructed with shape Tensor(\"input_7:0\", shape=(128, 32), dtype=float32) for input (128, 32), but it was re-called on a Tensor with incompatible shape (432, 32).\n",
      "generated_data\n",
      "401 [D loss: 0.631165, acc.: 63.67%] [G loss: 0.821755]\n",
      "402 [D loss: 0.620902, acc.: 68.75%] [G loss: 0.860856]\n",
      "403 [D loss: 0.603916, acc.: 72.27%] [G loss: 0.874091]\n",
      "404 [D loss: 0.610902, acc.: 70.70%] [G loss: 0.905422]\n",
      "405 [D loss: 0.595869, acc.: 73.83%] [G loss: 0.896700]\n",
      "406 [D loss: 0.596327, acc.: 73.44%] [G loss: 0.943667]\n",
      "407 [D loss: 0.589006, acc.: 74.22%] [G loss: 1.022711]\n",
      "408 [D loss: 0.580815, acc.: 73.83%] [G loss: 1.015937]\n",
      "409 [D loss: 0.607809, acc.: 70.31%] [G loss: 0.969513]\n",
      "410 [D loss: 0.621555, acc.: 67.58%] [G loss: 0.932684]\n",
      "411 [D loss: 0.617560, acc.: 69.14%] [G loss: 0.939404]\n",
      "412 [D loss: 0.633344, acc.: 67.58%] [G loss: 0.985076]\n",
      "413 [D loss: 0.613676, acc.: 68.75%] [G loss: 0.986117]\n",
      "414 [D loss: 0.604313, acc.: 68.36%] [G loss: 0.958849]\n",
      "415 [D loss: 0.611063, acc.: 66.02%] [G loss: 0.953401]\n",
      "416 [D loss: 0.628486, acc.: 62.89%] [G loss: 0.888742]\n",
      "417 [D loss: 0.618622, acc.: 64.84%] [G loss: 0.873949]\n",
      "418 [D loss: 0.665192, acc.: 54.30%] [G loss: 0.924007]\n",
      "419 [D loss: 0.627194, acc.: 62.89%] [G loss: 0.966298]\n",
      "420 [D loss: 0.603086, acc.: 66.80%] [G loss: 1.108615]\n",
      "421 [D loss: 0.591010, acc.: 71.48%] [G loss: 1.122281]\n",
      "422 [D loss: 0.596334, acc.: 69.53%] [G loss: 1.008715]\n",
      "423 [D loss: 0.608556, acc.: 63.67%] [G loss: 0.966467]\n",
      "424 [D loss: 0.629824, acc.: 61.72%] [G loss: 0.869961]\n",
      "425 [D loss: 0.655767, acc.: 57.42%] [G loss: 0.843533]\n",
      "426 [D loss: 0.688206, acc.: 46.88%] [G loss: 0.896209]\n",
      "427 [D loss: 0.631007, acc.: 63.28%] [G loss: 0.988942]\n",
      "428 [D loss: 0.605171, acc.: 67.19%] [G loss: 1.082604]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "429 [D loss: 0.596991, acc.: 67.58%] [G loss: 1.020143]\n",
      "430 [D loss: 0.609405, acc.: 64.84%] [G loss: 1.005740]\n",
      "431 [D loss: 0.622950, acc.: 63.67%] [G loss: 0.940245]\n",
      "432 [D loss: 0.624112, acc.: 61.72%] [G loss: 0.875756]\n",
      "433 [D loss: 0.639999, acc.: 61.33%] [G loss: 0.822337]\n",
      "434 [D loss: 0.642473, acc.: 58.98%] [G loss: 0.831458]\n",
      "435 [D loss: 0.645025, acc.: 57.42%] [G loss: 0.856150]\n",
      "436 [D loss: 0.624581, acc.: 62.11%] [G loss: 0.881781]\n",
      "437 [D loss: 0.604433, acc.: 65.23%] [G loss: 0.903968]\n",
      "438 [D loss: 0.616213, acc.: 59.77%] [G loss: 0.908546]\n",
      "439 [D loss: 0.606887, acc.: 65.23%] [G loss: 0.924371]\n",
      "440 [D loss: 0.597285, acc.: 66.80%] [G loss: 0.881923]\n",
      "441 [D loss: 0.625471, acc.: 63.28%] [G loss: 0.891334]\n",
      "442 [D loss: 0.608584, acc.: 68.36%] [G loss: 0.870383]\n",
      "443 [D loss: 0.608285, acc.: 66.41%] [G loss: 0.841694]\n",
      "444 [D loss: 0.606771, acc.: 67.19%] [G loss: 0.838457]\n",
      "445 [D loss: 0.600563, acc.: 66.80%] [G loss: 0.831316]\n",
      "446 [D loss: 0.607603, acc.: 66.02%] [G loss: 0.849211]\n",
      "447 [D loss: 0.595461, acc.: 70.70%] [G loss: 0.843753]\n",
      "448 [D loss: 0.589885, acc.: 69.14%] [G loss: 0.864680]\n",
      "449 [D loss: 0.598411, acc.: 67.58%] [G loss: 0.909186]\n",
      "450 [D loss: 0.603643, acc.: 63.67%] [G loss: 0.912297]\n",
      "451 [D loss: 0.589613, acc.: 69.14%] [G loss: 0.921310]\n",
      "452 [D loss: 0.614774, acc.: 65.62%] [G loss: 0.885597]\n",
      "453 [D loss: 0.625500, acc.: 64.84%] [G loss: 0.852705]\n",
      "454 [D loss: 0.685344, acc.: 57.81%] [G loss: 0.871806]\n",
      "455 [D loss: 0.640991, acc.: 61.33%] [G loss: 0.927608]\n",
      "456 [D loss: 0.631568, acc.: 61.33%] [G loss: 0.981334]\n",
      "457 [D loss: 0.598311, acc.: 67.19%] [G loss: 1.067230]\n",
      "458 [D loss: 0.569505, acc.: 68.36%] [G loss: 1.098397]\n",
      "459 [D loss: 0.576935, acc.: 67.19%] [G loss: 1.087168]\n",
      "460 [D loss: 0.583226, acc.: 67.19%] [G loss: 0.958732]\n",
      "461 [D loss: 0.615808, acc.: 61.33%] [G loss: 0.922547]\n",
      "462 [D loss: 0.613380, acc.: 62.89%] [G loss: 0.883022]\n",
      "463 [D loss: 0.652378, acc.: 56.25%] [G loss: 0.875475]\n",
      "464 [D loss: 0.659213, acc.: 56.25%] [G loss: 0.862133]\n",
      "465 [D loss: 0.700013, acc.: 50.39%] [G loss: 0.908086]\n",
      "466 [D loss: 0.663119, acc.: 53.91%] [G loss: 0.994754]\n",
      "467 [D loss: 0.620880, acc.: 59.77%] [G loss: 1.057378]\n",
      "468 [D loss: 0.617802, acc.: 57.81%] [G loss: 1.077256]\n",
      "469 [D loss: 0.644586, acc.: 57.81%] [G loss: 0.956090]\n",
      "470 [D loss: 0.642834, acc.: 57.03%] [G loss: 0.865464]\n",
      "471 [D loss: 0.637064, acc.: 63.28%] [G loss: 0.860914]\n",
      "472 [D loss: 0.627759, acc.: 66.80%] [G loss: 0.843220]\n",
      "473 [D loss: 0.625755, acc.: 66.02%] [G loss: 0.836850]\n",
      "474 [D loss: 0.625295, acc.: 65.23%] [G loss: 0.844551]\n",
      "475 [D loss: 0.612750, acc.: 65.23%] [G loss: 0.877032]\n",
      "476 [D loss: 0.623502, acc.: 65.23%] [G loss: 0.888030]\n",
      "477 [D loss: 0.605902, acc.: 67.58%] [G loss: 0.875420]\n",
      "478 [D loss: 0.606761, acc.: 63.67%] [G loss: 0.888692]\n",
      "479 [D loss: 0.611268, acc.: 63.67%] [G loss: 0.859874]\n",
      "480 [D loss: 0.614405, acc.: 66.02%] [G loss: 0.873010]\n",
      "481 [D loss: 0.613332, acc.: 64.45%] [G loss: 0.862903]\n",
      "482 [D loss: 0.621305, acc.: 64.06%] [G loss: 0.885015]\n",
      "483 [D loss: 0.609856, acc.: 64.45%] [G loss: 0.903082]\n",
      "484 [D loss: 0.643823, acc.: 58.59%] [G loss: 0.889878]\n",
      "485 [D loss: 0.607301, acc.: 62.89%] [G loss: 0.909457]\n",
      "486 [D loss: 0.623411, acc.: 62.50%] [G loss: 0.846630]\n",
      "487 [D loss: 0.606308, acc.: 62.89%] [G loss: 0.875505]\n",
      "488 [D loss: 0.611349, acc.: 65.62%] [G loss: 0.856541]\n",
      "489 [D loss: 0.622237, acc.: 62.50%] [G loss: 0.835778]\n",
      "490 [D loss: 0.619596, acc.: 61.72%] [G loss: 0.858004]\n",
      "491 [D loss: 0.618424, acc.: 66.41%] [G loss: 0.858765]\n",
      "492 [D loss: 0.619296, acc.: 66.80%] [G loss: 0.851196]\n",
      "493 [D loss: 0.630850, acc.: 65.23%] [G loss: 0.824244]\n",
      "494 [D loss: 0.652925, acc.: 64.45%] [G loss: 0.847246]\n",
      "495 [D loss: 0.620207, acc.: 66.41%] [G loss: 0.884677]\n",
      "496 [D loss: 0.626371, acc.: 64.45%] [G loss: 0.962093]\n",
      "497 [D loss: 0.610715, acc.: 66.80%] [G loss: 0.949581]\n",
      "498 [D loss: 0.616471, acc.: 65.23%] [G loss: 0.903141]\n",
      "499 [D loss: 0.624625, acc.: 62.89%] [G loss: 0.876369]\n",
      "500 [D loss: 0.621488, acc.: 64.84%] [G loss: 0.860132]\n",
      "WARNING:tensorflow:Model was constructed with shape Tensor(\"input_7:0\", shape=(128, 32), dtype=float32) for input (128, 32), but it was re-called on a Tensor with incompatible shape (432, 32).\n",
      "generated_data\n",
      "501 [D loss: 0.627542, acc.: 59.38%] [G loss: 0.884412]\n",
      "502 [D loss: 0.644046, acc.: 58.98%] [G loss: 0.860355]\n",
      "503 [D loss: 0.644848, acc.: 59.38%] [G loss: 0.909965]\n",
      "504 [D loss: 0.638246, acc.: 58.98%] [G loss: 0.957969]\n",
      "505 [D loss: 0.624154, acc.: 59.77%] [G loss: 0.954924]\n",
      "506 [D loss: 0.622967, acc.: 62.50%] [G loss: 0.886514]\n",
      "507 [D loss: 0.643337, acc.: 62.11%] [G loss: 0.827933]\n",
      "508 [D loss: 0.666854, acc.: 60.16%] [G loss: 0.891071]\n",
      "509 [D loss: 0.614038, acc.: 65.23%] [G loss: 0.915311]\n",
      "510 [D loss: 0.596727, acc.: 67.19%] [G loss: 0.950919]\n",
      "511 [D loss: 0.596449, acc.: 67.97%] [G loss: 0.957378]\n",
      "512 [D loss: 0.596027, acc.: 67.19%] [G loss: 0.899324]\n",
      "513 [D loss: 0.609435, acc.: 65.23%] [G loss: 0.853682]\n",
      "514 [D loss: 0.612687, acc.: 65.62%] [G loss: 0.872278]\n",
      "515 [D loss: 0.620551, acc.: 66.41%] [G loss: 0.839194]\n",
      "516 [D loss: 0.633029, acc.: 66.41%] [G loss: 0.860411]\n",
      "517 [D loss: 0.614047, acc.: 62.50%] [G loss: 0.845619]\n",
      "518 [D loss: 0.632587, acc.: 67.97%] [G loss: 0.856121]\n",
      "519 [D loss: 0.619028, acc.: 63.28%] [G loss: 0.849743]\n",
      "520 [D loss: 0.622056, acc.: 63.67%] [G loss: 0.859629]\n",
      "521 [D loss: 0.621148, acc.: 68.36%] [G loss: 0.864572]\n",
      "522 [D loss: 0.607994, acc.: 67.58%] [G loss: 0.872034]\n",
      "523 [D loss: 0.612725, acc.: 69.53%] [G loss: 0.880129]\n",
      "524 [D loss: 0.594607, acc.: 69.53%] [G loss: 0.881365]\n",
      "525 [D loss: 0.599362, acc.: 69.92%] [G loss: 0.883823]\n",
      "526 [D loss: 0.594256, acc.: 67.19%] [G loss: 0.889083]\n",
      "527 [D loss: 0.603149, acc.: 67.19%] [G loss: 0.897665]\n",
      "528 [D loss: 0.605882, acc.: 65.23%] [G loss: 0.922536]\n",
      "529 [D loss: 0.603144, acc.: 66.41%] [G loss: 0.959597]\n",
      "530 [D loss: 0.617628, acc.: 62.89%] [G loss: 0.971523]\n",
      "531 [D loss: 0.616123, acc.: 62.89%] [G loss: 0.924711]\n",
      "532 [D loss: 0.639554, acc.: 61.72%] [G loss: 0.876790]\n",
      "533 [D loss: 0.636611, acc.: 65.23%] [G loss: 0.859638]\n",
      "534 [D loss: 0.615133, acc.: 63.28%] [G loss: 0.848225]\n",
      "535 [D loss: 0.625858, acc.: 66.80%] [G loss: 0.869911]\n",
      "536 [D loss: 0.594600, acc.: 69.92%] [G loss: 0.930764]\n",
      "537 [D loss: 0.598780, acc.: 69.92%] [G loss: 0.930360]\n",
      "538 [D loss: 0.619647, acc.: 67.97%] [G loss: 0.914082]\n",
      "539 [D loss: 0.621154, acc.: 66.41%] [G loss: 0.912873]\n",
      "540 [D loss: 0.645128, acc.: 61.33%] [G loss: 0.911593]\n",
      "541 [D loss: 0.648355, acc.: 60.94%] [G loss: 0.902808]\n",
      "542 [D loss: 0.635271, acc.: 61.72%] [G loss: 0.926465]\n",
      "543 [D loss: 0.635411, acc.: 61.72%] [G loss: 0.996265]\n",
      "544 [D loss: 0.619817, acc.: 67.58%] [G loss: 1.020496]\n",
      "545 [D loss: 0.601353, acc.: 64.84%] [G loss: 1.109436]\n",
      "546 [D loss: 0.584798, acc.: 68.36%] [G loss: 1.157293]\n",
      "547 [D loss: 0.600597, acc.: 65.23%] [G loss: 1.067443]\n",
      "548 [D loss: 0.607658, acc.: 65.23%] [G loss: 0.983354]\n",
      "549 [D loss: 0.623033, acc.: 65.23%] [G loss: 0.918590]\n",
      "550 [D loss: 0.616485, acc.: 65.23%] [G loss: 0.909178]\n",
      "551 [D loss: 0.612476, acc.: 66.80%] [G loss: 0.881383]\n",
      "552 [D loss: 0.612537, acc.: 62.50%] [G loss: 0.911023]\n",
      "553 [D loss: 0.616011, acc.: 64.45%] [G loss: 0.907162]\n",
      "554 [D loss: 0.582770, acc.: 69.14%] [G loss: 0.906114]\n",
      "555 [D loss: 0.587042, acc.: 67.97%] [G loss: 0.895627]\n",
      "556 [D loss: 0.594601, acc.: 66.41%] [G loss: 0.895404]\n",
      "557 [D loss: 0.605856, acc.: 67.58%] [G loss: 0.870455]\n",
      "558 [D loss: 0.629321, acc.: 58.98%] [G loss: 0.865302]\n",
      "559 [D loss: 0.648038, acc.: 58.59%] [G loss: 0.844785]\n",
      "560 [D loss: 0.659716, acc.: 60.16%] [G loss: 0.878055]\n",
      "561 [D loss: 0.660032, acc.: 59.77%] [G loss: 0.917552]\n",
      "562 [D loss: 0.633451, acc.: 59.38%] [G loss: 0.971575]\n",
      "563 [D loss: 0.631864, acc.: 62.50%] [G loss: 1.056069]\n",
      "564 [D loss: 0.582341, acc.: 67.58%] [G loss: 1.084119]\n",
      "565 [D loss: 0.579854, acc.: 69.92%] [G loss: 1.082031]\n",
      "566 [D loss: 0.600906, acc.: 69.53%] [G loss: 1.065271]\n",
      "567 [D loss: 0.564850, acc.: 74.22%] [G loss: 1.063456]\n",
      "568 [D loss: 0.577955, acc.: 71.09%] [G loss: 1.006193]\n",
      "569 [D loss: 0.604628, acc.: 70.70%] [G loss: 0.977787]\n",
      "570 [D loss: 0.617662, acc.: 65.62%] [G loss: 0.977849]\n",
      "571 [D loss: 0.633720, acc.: 65.23%] [G loss: 0.954059]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "572 [D loss: 0.634152, acc.: 65.23%] [G loss: 0.929437]\n",
      "573 [D loss: 0.653172, acc.: 64.45%] [G loss: 0.853222]\n",
      "574 [D loss: 0.660609, acc.: 60.16%] [G loss: 0.881559]\n",
      "575 [D loss: 0.647633, acc.: 62.89%] [G loss: 0.881723]\n",
      "576 [D loss: 0.638087, acc.: 64.45%] [G loss: 0.926188]\n",
      "577 [D loss: 0.631797, acc.: 64.45%] [G loss: 0.960888]\n",
      "578 [D loss: 0.609306, acc.: 64.84%] [G loss: 0.939230]\n",
      "579 [D loss: 0.598581, acc.: 66.41%] [G loss: 0.950608]\n",
      "580 [D loss: 0.605273, acc.: 63.28%] [G loss: 0.960768]\n",
      "581 [D loss: 0.607417, acc.: 64.84%] [G loss: 0.961013]\n",
      "582 [D loss: 0.581382, acc.: 69.53%] [G loss: 0.937490]\n",
      "583 [D loss: 0.598915, acc.: 66.80%] [G loss: 0.896690]\n",
      "584 [D loss: 0.611881, acc.: 66.41%] [G loss: 0.933407]\n",
      "585 [D loss: 0.577232, acc.: 70.70%] [G loss: 0.948057]\n",
      "586 [D loss: 0.585203, acc.: 70.31%] [G loss: 0.923324]\n",
      "587 [D loss: 0.598167, acc.: 68.36%] [G loss: 0.924405]\n",
      "588 [D loss: 0.623950, acc.: 65.23%] [G loss: 0.970802]\n",
      "589 [D loss: 0.587078, acc.: 73.05%] [G loss: 0.928819]\n",
      "590 [D loss: 0.575280, acc.: 76.17%] [G loss: 0.894201]\n",
      "591 [D loss: 0.597455, acc.: 69.92%] [G loss: 0.907207]\n",
      "592 [D loss: 0.594001, acc.: 70.70%] [G loss: 0.940874]\n",
      "593 [D loss: 0.574577, acc.: 73.44%] [G loss: 0.961333]\n",
      "594 [D loss: 0.579020, acc.: 71.88%] [G loss: 0.928692]\n",
      "595 [D loss: 0.576244, acc.: 71.09%] [G loss: 0.876918]\n",
      "596 [D loss: 0.583348, acc.: 69.92%] [G loss: 0.895888]\n",
      "597 [D loss: 0.571300, acc.: 73.44%] [G loss: 0.918171]\n",
      "598 [D loss: 0.591402, acc.: 70.70%] [G loss: 0.940787]\n",
      "599 [D loss: 0.580452, acc.: 73.05%] [G loss: 0.952390]\n",
      "600 [D loss: 0.602926, acc.: 71.09%] [G loss: 0.923577]\n",
      "WARNING:tensorflow:Model was constructed with shape Tensor(\"input_7:0\", shape=(128, 32), dtype=float32) for input (128, 32), but it was re-called on a Tensor with incompatible shape (432, 32).\n",
      "generated_data\n",
      "601 [D loss: 0.595946, acc.: 69.53%] [G loss: 0.973154]\n",
      "602 [D loss: 0.598501, acc.: 64.45%] [G loss: 0.977644]\n",
      "603 [D loss: 0.595850, acc.: 71.48%] [G loss: 0.979310]\n",
      "604 [D loss: 0.589097, acc.: 67.97%] [G loss: 0.995224]\n",
      "605 [D loss: 0.592818, acc.: 67.19%] [G loss: 0.961079]\n",
      "606 [D loss: 0.603565, acc.: 68.75%] [G loss: 0.914194]\n",
      "607 [D loss: 0.606182, acc.: 64.84%] [G loss: 0.863803]\n",
      "608 [D loss: 0.661473, acc.: 64.06%] [G loss: 0.924924]\n",
      "609 [D loss: 0.635152, acc.: 62.50%] [G loss: 1.022620]\n",
      "610 [D loss: 0.616020, acc.: 63.67%] [G loss: 1.058116]\n",
      "611 [D loss: 0.606111, acc.: 66.80%] [G loss: 1.078199]\n",
      "612 [D loss: 0.570694, acc.: 71.09%] [G loss: 1.036864]\n",
      "613 [D loss: 0.561890, acc.: 71.88%] [G loss: 1.040722]\n",
      "614 [D loss: 0.565689, acc.: 73.83%] [G loss: 1.006984]\n",
      "615 [D loss: 0.567890, acc.: 74.61%] [G loss: 0.923887]\n",
      "616 [D loss: 0.578469, acc.: 75.00%] [G loss: 0.936219]\n",
      "617 [D loss: 0.585025, acc.: 72.66%] [G loss: 0.907270]\n",
      "618 [D loss: 0.609157, acc.: 69.92%] [G loss: 0.965922]\n",
      "619 [D loss: 0.606196, acc.: 67.97%] [G loss: 0.964692]\n",
      "620 [D loss: 0.615366, acc.: 65.23%] [G loss: 0.991076]\n",
      "621 [D loss: 0.611451, acc.: 63.67%] [G loss: 0.966150]\n",
      "622 [D loss: 0.639592, acc.: 58.59%] [G loss: 0.984834]\n",
      "623 [D loss: 0.629808, acc.: 60.16%] [G loss: 1.055818]\n",
      "624 [D loss: 0.602723, acc.: 65.62%] [G loss: 1.050943]\n",
      "625 [D loss: 0.580948, acc.: 70.70%] [G loss: 1.037720]\n",
      "626 [D loss: 0.583866, acc.: 67.97%] [G loss: 1.073136]\n",
      "627 [D loss: 0.586763, acc.: 67.97%] [G loss: 1.021845]\n",
      "628 [D loss: 0.586377, acc.: 68.36%] [G loss: 1.004338]\n",
      "629 [D loss: 0.584354, acc.: 70.70%] [G loss: 0.980842]\n",
      "630 [D loss: 0.592380, acc.: 67.97%] [G loss: 0.957548]\n",
      "631 [D loss: 0.580464, acc.: 71.88%] [G loss: 0.911386]\n",
      "632 [D loss: 0.595671, acc.: 66.80%] [G loss: 0.939303]\n",
      "633 [D loss: 0.594359, acc.: 72.27%] [G loss: 0.932117]\n",
      "634 [D loss: 0.607676, acc.: 71.88%] [G loss: 0.959454]\n",
      "635 [D loss: 0.578389, acc.: 71.09%] [G loss: 0.942434]\n",
      "636 [D loss: 0.561677, acc.: 75.78%] [G loss: 0.971211]\n",
      "637 [D loss: 0.557357, acc.: 74.61%] [G loss: 0.964364]\n",
      "638 [D loss: 0.553682, acc.: 75.39%] [G loss: 1.000143]\n",
      "639 [D loss: 0.528201, acc.: 77.34%] [G loss: 1.007894]\n",
      "640 [D loss: 0.550926, acc.: 74.61%] [G loss: 1.021370]\n",
      "641 [D loss: 0.557943, acc.: 75.39%] [G loss: 1.011818]\n",
      "642 [D loss: 0.591048, acc.: 69.14%] [G loss: 1.013217]\n",
      "643 [D loss: 0.590925, acc.: 69.14%] [G loss: 1.002288]\n",
      "644 [D loss: 0.622398, acc.: 64.45%] [G loss: 0.982162]\n",
      "645 [D loss: 0.594955, acc.: 68.36%] [G loss: 0.992594]\n",
      "646 [D loss: 0.614159, acc.: 67.58%] [G loss: 0.956203]\n",
      "647 [D loss: 0.624279, acc.: 64.84%] [G loss: 0.962755]\n",
      "648 [D loss: 0.569854, acc.: 72.27%] [G loss: 1.084928]\n",
      "649 [D loss: 0.544622, acc.: 74.22%] [G loss: 1.167557]\n",
      "650 [D loss: 0.514969, acc.: 77.73%] [G loss: 1.184151]\n",
      "651 [D loss: 0.520609, acc.: 77.34%] [G loss: 1.164668]\n",
      "652 [D loss: 0.514940, acc.: 78.12%] [G loss: 1.155503]\n",
      "653 [D loss: 0.525829, acc.: 78.91%] [G loss: 1.147402]\n",
      "654 [D loss: 0.540556, acc.: 77.34%] [G loss: 1.083728]\n",
      "655 [D loss: 0.557636, acc.: 74.61%] [G loss: 1.076372]\n",
      "656 [D loss: 0.552198, acc.: 75.78%] [G loss: 1.044091]\n",
      "657 [D loss: 0.565802, acc.: 74.22%] [G loss: 1.117311]\n",
      "658 [D loss: 0.566359, acc.: 72.66%] [G loss: 1.148386]\n",
      "659 [D loss: 0.570647, acc.: 74.22%] [G loss: 1.163470]\n",
      "660 [D loss: 0.558437, acc.: 69.53%] [G loss: 1.118187]\n",
      "661 [D loss: 0.563864, acc.: 69.14%] [G loss: 1.167962]\n",
      "662 [D loss: 0.543689, acc.: 74.61%] [G loss: 1.155857]\n",
      "663 [D loss: 0.563674, acc.: 69.53%] [G loss: 1.173210]\n",
      "664 [D loss: 0.553883, acc.: 72.66%] [G loss: 1.097243]\n",
      "665 [D loss: 0.585603, acc.: 66.80%] [G loss: 1.142522]\n",
      "666 [D loss: 0.583667, acc.: 70.70%] [G loss: 1.118774]\n",
      "667 [D loss: 0.566481, acc.: 69.92%] [G loss: 1.114794]\n",
      "668 [D loss: 0.559042, acc.: 72.66%] [G loss: 1.170596]\n",
      "669 [D loss: 0.538460, acc.: 74.22%] [G loss: 1.123493]\n",
      "670 [D loss: 0.555972, acc.: 72.27%] [G loss: 1.182743]\n",
      "671 [D loss: 0.515392, acc.: 77.34%] [G loss: 1.119853]\n",
      "672 [D loss: 0.573178, acc.: 70.70%] [G loss: 1.143036]\n",
      "673 [D loss: 0.566429, acc.: 71.88%] [G loss: 1.133047]\n",
      "674 [D loss: 0.556731, acc.: 72.27%] [G loss: 1.157412]\n",
      "675 [D loss: 0.543860, acc.: 71.09%] [G loss: 1.108541]\n",
      "676 [D loss: 0.562160, acc.: 71.88%] [G loss: 1.079924]\n",
      "677 [D loss: 0.600902, acc.: 68.75%] [G loss: 1.048546]\n",
      "678 [D loss: 0.583829, acc.: 68.75%] [G loss: 1.144066]\n",
      "679 [D loss: 0.601615, acc.: 63.28%] [G loss: 0.961229]\n",
      "680 [D loss: 0.596216, acc.: 64.84%] [G loss: 1.026401]\n",
      "681 [D loss: 0.538985, acc.: 74.61%] [G loss: 1.075860]\n",
      "682 [D loss: 0.549137, acc.: 71.48%] [G loss: 1.182464]\n",
      "683 [D loss: 0.524874, acc.: 72.27%] [G loss: 1.259906]\n",
      "684 [D loss: 0.545226, acc.: 69.14%] [G loss: 1.199762]\n",
      "685 [D loss: 0.524168, acc.: 73.83%] [G loss: 1.184509]\n",
      "686 [D loss: 0.568219, acc.: 69.14%] [G loss: 1.197271]\n",
      "687 [D loss: 0.571087, acc.: 69.53%] [G loss: 1.098460]\n",
      "688 [D loss: 0.574835, acc.: 72.27%] [G loss: 1.044274]\n",
      "689 [D loss: 0.615700, acc.: 64.06%] [G loss: 1.037959]\n",
      "690 [D loss: 0.623575, acc.: 64.06%] [G loss: 1.146184]\n",
      "691 [D loss: 0.596039, acc.: 69.14%] [G loss: 1.172103]\n",
      "692 [D loss: 0.568850, acc.: 69.92%] [G loss: 1.143828]\n",
      "693 [D loss: 0.561012, acc.: 69.53%] [G loss: 1.134805]\n",
      "694 [D loss: 0.552548, acc.: 67.97%] [G loss: 1.112315]\n",
      "695 [D loss: 0.550018, acc.: 71.09%] [G loss: 1.085491]\n",
      "696 [D loss: 0.537858, acc.: 71.48%] [G loss: 1.051137]\n",
      "697 [D loss: 0.551845, acc.: 72.27%] [G loss: 1.038847]\n",
      "698 [D loss: 0.528013, acc.: 75.78%] [G loss: 1.029150]\n",
      "699 [D loss: 0.528556, acc.: 75.39%] [G loss: 1.062670]\n",
      "700 [D loss: 0.489271, acc.: 82.03%] [G loss: 1.085166]\n",
      "WARNING:tensorflow:Model was constructed with shape Tensor(\"input_7:0\", shape=(128, 32), dtype=float32) for input (128, 32), but it was re-called on a Tensor with incompatible shape (432, 32).\n",
      "generated_data\n",
      "701 [D loss: 0.520151, acc.: 73.83%] [G loss: 1.107560]\n",
      "702 [D loss: 0.518301, acc.: 73.83%] [G loss: 1.124587]\n",
      "703 [D loss: 0.545042, acc.: 74.22%] [G loss: 1.118830]\n",
      "704 [D loss: 0.577868, acc.: 70.70%] [G loss: 1.039637]\n",
      "705 [D loss: 0.599828, acc.: 70.31%] [G loss: 1.085769]\n",
      "706 [D loss: 0.582282, acc.: 69.14%] [G loss: 1.119662]\n",
      "707 [D loss: 0.577205, acc.: 71.48%] [G loss: 1.098524]\n",
      "708 [D loss: 0.562378, acc.: 66.80%] [G loss: 1.302999]\n",
      "709 [D loss: 0.494084, acc.: 75.78%] [G loss: 1.424489]\n",
      "710 [D loss: 0.497755, acc.: 73.83%] [G loss: 1.356754]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "711 [D loss: 0.518161, acc.: 74.22%] [G loss: 1.273284]\n",
      "712 [D loss: 0.520405, acc.: 75.00%] [G loss: 1.140904]\n",
      "713 [D loss: 0.562781, acc.: 73.05%] [G loss: 1.100580]\n",
      "714 [D loss: 0.582005, acc.: 73.83%] [G loss: 0.941435]\n",
      "715 [D loss: 0.569238, acc.: 73.44%] [G loss: 0.955247]\n",
      "716 [D loss: 0.565545, acc.: 72.66%] [G loss: 0.940741]\n",
      "717 [D loss: 0.557369, acc.: 70.70%] [G loss: 1.013948]\n",
      "718 [D loss: 0.575613, acc.: 67.97%] [G loss: 1.207865]\n",
      "719 [D loss: 0.538394, acc.: 75.39%] [G loss: 1.226940]\n",
      "720 [D loss: 0.561579, acc.: 71.48%] [G loss: 1.258119]\n",
      "721 [D loss: 0.572659, acc.: 70.70%] [G loss: 1.223702]\n",
      "722 [D loss: 0.556623, acc.: 73.44%] [G loss: 1.153098]\n",
      "723 [D loss: 0.591958, acc.: 68.75%] [G loss: 1.137030]\n",
      "724 [D loss: 0.569967, acc.: 69.92%] [G loss: 1.072368]\n",
      "725 [D loss: 0.560187, acc.: 71.88%] [G loss: 1.042603]\n",
      "726 [D loss: 0.561762, acc.: 70.31%] [G loss: 1.109322]\n",
      "727 [D loss: 0.554560, acc.: 70.70%] [G loss: 1.109180]\n",
      "728 [D loss: 0.558808, acc.: 71.88%] [G loss: 1.024308]\n",
      "729 [D loss: 0.544441, acc.: 71.09%] [G loss: 1.070855]\n",
      "730 [D loss: 0.538402, acc.: 71.88%] [G loss: 1.044511]\n",
      "731 [D loss: 0.546437, acc.: 71.48%] [G loss: 1.010743]\n",
      "732 [D loss: 0.562107, acc.: 70.70%] [G loss: 1.025836]\n",
      "733 [D loss: 0.536276, acc.: 76.17%] [G loss: 1.024049]\n",
      "734 [D loss: 0.570405, acc.: 71.48%] [G loss: 0.989168]\n",
      "735 [D loss: 0.558659, acc.: 73.05%] [G loss: 1.022621]\n",
      "736 [D loss: 0.549255, acc.: 73.44%] [G loss: 1.012215]\n",
      "737 [D loss: 0.545204, acc.: 71.09%] [G loss: 1.106390]\n",
      "738 [D loss: 0.506829, acc.: 75.39%] [G loss: 1.123447]\n",
      "739 [D loss: 0.509047, acc.: 75.00%] [G loss: 1.125521]\n",
      "740 [D loss: 0.519012, acc.: 73.05%] [G loss: 1.175635]\n",
      "741 [D loss: 0.504072, acc.: 76.17%] [G loss: 1.129059]\n",
      "742 [D loss: 0.563183, acc.: 72.27%] [G loss: 1.129328]\n",
      "743 [D loss: 0.648184, acc.: 64.45%] [G loss: 1.145387]\n",
      "744 [D loss: 0.636006, acc.: 61.72%] [G loss: 1.395412]\n",
      "745 [D loss: 0.549531, acc.: 70.70%] [G loss: 1.378634]\n",
      "746 [D loss: 0.588040, acc.: 67.19%] [G loss: 1.132202]\n",
      "747 [D loss: 0.537671, acc.: 73.05%] [G loss: 1.088147]\n",
      "748 [D loss: 0.527513, acc.: 75.78%] [G loss: 1.113081]\n",
      "749 [D loss: 0.524521, acc.: 76.17%] [G loss: 1.133321]\n",
      "750 [D loss: 0.529688, acc.: 75.00%] [G loss: 1.124619]\n",
      "751 [D loss: 0.532587, acc.: 72.66%] [G loss: 1.143474]\n",
      "752 [D loss: 0.541459, acc.: 70.70%] [G loss: 1.150642]\n",
      "753 [D loss: 0.509213, acc.: 76.17%] [G loss: 1.164495]\n",
      "754 [D loss: 0.533909, acc.: 73.44%] [G loss: 1.202161]\n",
      "755 [D loss: 0.526813, acc.: 75.78%] [G loss: 1.227887]\n",
      "756 [D loss: 0.540623, acc.: 73.44%] [G loss: 1.222995]\n",
      "757 [D loss: 0.511205, acc.: 75.00%] [G loss: 1.260000]\n",
      "758 [D loss: 0.500872, acc.: 76.95%] [G loss: 1.237544]\n",
      "759 [D loss: 0.515172, acc.: 73.83%] [G loss: 1.266512]\n",
      "760 [D loss: 0.504341, acc.: 75.78%] [G loss: 1.311458]\n",
      "761 [D loss: 0.497912, acc.: 76.56%] [G loss: 1.268094]\n",
      "762 [D loss: 0.566456, acc.: 71.09%] [G loss: 1.244019]\n",
      "763 [D loss: 0.541106, acc.: 70.70%] [G loss: 1.223387]\n",
      "764 [D loss: 0.593767, acc.: 64.84%] [G loss: 1.299018]\n",
      "765 [D loss: 0.529205, acc.: 74.22%] [G loss: 1.276795]\n",
      "766 [D loss: 0.527434, acc.: 72.66%] [G loss: 1.292750]\n",
      "767 [D loss: 0.528788, acc.: 70.70%] [G loss: 1.192337]\n",
      "768 [D loss: 0.539554, acc.: 72.66%] [G loss: 1.213843]\n",
      "769 [D loss: 0.504243, acc.: 76.56%] [G loss: 1.257295]\n",
      "770 [D loss: 0.477739, acc.: 76.95%] [G loss: 1.259222]\n",
      "771 [D loss: 0.469178, acc.: 80.08%] [G loss: 1.304630]\n",
      "772 [D loss: 0.468264, acc.: 80.86%] [G loss: 1.282411]\n",
      "773 [D loss: 0.527618, acc.: 72.27%] [G loss: 1.248320]\n",
      "774 [D loss: 0.533120, acc.: 69.92%] [G loss: 1.315529]\n",
      "775 [D loss: 0.590938, acc.: 66.80%] [G loss: 1.330068]\n",
      "776 [D loss: 0.568640, acc.: 70.70%] [G loss: 1.726386]\n",
      "777 [D loss: 0.519662, acc.: 73.44%] [G loss: 1.548608]\n",
      "778 [D loss: 0.543554, acc.: 69.92%] [G loss: 1.367502]\n",
      "779 [D loss: 0.516198, acc.: 74.61%] [G loss: 1.239748]\n",
      "780 [D loss: 0.518004, acc.: 75.78%] [G loss: 1.166545]\n",
      "781 [D loss: 0.485227, acc.: 78.91%] [G loss: 1.166936]\n",
      "782 [D loss: 0.514912, acc.: 76.95%] [G loss: 1.166889]\n",
      "783 [D loss: 0.484999, acc.: 78.52%] [G loss: 1.202589]\n",
      "784 [D loss: 0.476123, acc.: 79.30%] [G loss: 1.221263]\n",
      "785 [D loss: 0.465559, acc.: 80.86%] [G loss: 1.213595]\n",
      "786 [D loss: 0.494634, acc.: 79.30%] [G loss: 1.177284]\n",
      "787 [D loss: 0.488943, acc.: 78.91%] [G loss: 1.201738]\n",
      "788 [D loss: 0.508114, acc.: 75.78%] [G loss: 1.318981]\n",
      "789 [D loss: 0.462980, acc.: 78.91%] [G loss: 1.351432]\n",
      "790 [D loss: 0.486003, acc.: 76.95%] [G loss: 1.379848]\n",
      "791 [D loss: 0.481224, acc.: 76.17%] [G loss: 1.389140]\n",
      "792 [D loss: 0.478873, acc.: 78.12%] [G loss: 1.298006]\n",
      "793 [D loss: 0.488533, acc.: 77.34%] [G loss: 1.239298]\n",
      "794 [D loss: 0.495509, acc.: 77.73%] [G loss: 1.209481]\n",
      "795 [D loss: 0.473591, acc.: 78.52%] [G loss: 1.184782]\n",
      "796 [D loss: 0.491446, acc.: 77.34%] [G loss: 1.147490]\n",
      "797 [D loss: 0.501532, acc.: 76.17%] [G loss: 1.178098]\n",
      "798 [D loss: 0.508410, acc.: 75.78%] [G loss: 1.162488]\n",
      "799 [D loss: 0.470562, acc.: 79.30%] [G loss: 1.180347]\n",
      "800 [D loss: 0.533175, acc.: 74.22%] [G loss: 1.162446]\n",
      "WARNING:tensorflow:Model was constructed with shape Tensor(\"input_7:0\", shape=(128, 32), dtype=float32) for input (128, 32), but it was re-called on a Tensor with incompatible shape (432, 32).\n",
      "generated_data\n",
      "801 [D loss: 0.518620, acc.: 71.88%] [G loss: 1.097553]\n",
      "802 [D loss: 0.561478, acc.: 70.70%] [G loss: 1.028748]\n",
      "803 [D loss: 0.625611, acc.: 64.06%] [G loss: 1.143097]\n",
      "804 [D loss: 0.563298, acc.: 65.62%] [G loss: 1.665580]\n",
      "805 [D loss: 0.521466, acc.: 73.83%] [G loss: 1.423118]\n",
      "806 [D loss: 0.559138, acc.: 73.05%] [G loss: 1.312482]\n",
      "807 [D loss: 0.558481, acc.: 71.09%] [G loss: 1.361672]\n",
      "808 [D loss: 0.540841, acc.: 70.31%] [G loss: 1.341768]\n",
      "809 [D loss: 0.555357, acc.: 69.53%] [G loss: 1.314460]\n",
      "810 [D loss: 0.521962, acc.: 73.05%] [G loss: 1.300671]\n",
      "811 [D loss: 0.542129, acc.: 71.88%] [G loss: 1.228991]\n",
      "812 [D loss: 0.544135, acc.: 70.70%] [G loss: 1.206867]\n",
      "813 [D loss: 0.519952, acc.: 73.83%] [G loss: 1.256522]\n",
      "814 [D loss: 0.528042, acc.: 73.83%] [G loss: 1.204348]\n",
      "815 [D loss: 0.520226, acc.: 73.05%] [G loss: 1.185997]\n",
      "816 [D loss: 0.522727, acc.: 72.66%] [G loss: 1.188947]\n",
      "817 [D loss: 0.506735, acc.: 74.22%] [G loss: 1.187681]\n",
      "818 [D loss: 0.510987, acc.: 74.61%] [G loss: 1.167750]\n",
      "819 [D loss: 0.526750, acc.: 72.66%] [G loss: 1.159183]\n",
      "820 [D loss: 0.527624, acc.: 72.27%] [G loss: 1.141671]\n",
      "821 [D loss: 0.513528, acc.: 74.22%] [G loss: 1.137561]\n",
      "822 [D loss: 0.510914, acc.: 73.44%] [G loss: 1.121909]\n",
      "823 [D loss: 0.536992, acc.: 73.05%] [G loss: 1.122027]\n",
      "824 [D loss: 0.528738, acc.: 73.83%] [G loss: 1.196425]\n",
      "825 [D loss: 0.502291, acc.: 74.22%] [G loss: 1.187738]\n",
      "826 [D loss: 0.518023, acc.: 73.05%] [G loss: 1.193283]\n",
      "827 [D loss: 0.484998, acc.: 76.17%] [G loss: 1.203103]\n",
      "828 [D loss: 0.479755, acc.: 78.12%] [G loss: 1.244957]\n",
      "829 [D loss: 0.494622, acc.: 75.00%] [G loss: 1.206838]\n",
      "830 [D loss: 0.504346, acc.: 73.44%] [G loss: 1.265429]\n",
      "831 [D loss: 0.526939, acc.: 71.09%] [G loss: 1.178281]\n",
      "832 [D loss: 0.567015, acc.: 69.53%] [G loss: 1.186733]\n",
      "833 [D loss: 0.576471, acc.: 67.19%] [G loss: 1.305450]\n",
      "834 [D loss: 0.502514, acc.: 75.78%] [G loss: 1.348770]\n",
      "835 [D loss: 0.502215, acc.: 75.00%] [G loss: 1.284933]\n",
      "836 [D loss: 0.492632, acc.: 76.17%] [G loss: 1.265265]\n",
      "837 [D loss: 0.477342, acc.: 77.73%] [G loss: 1.202951]\n",
      "838 [D loss: 0.475567, acc.: 79.30%] [G loss: 1.248912]\n",
      "839 [D loss: 0.468399, acc.: 80.08%] [G loss: 1.224012]\n",
      "840 [D loss: 0.468410, acc.: 78.91%] [G loss: 1.210511]\n",
      "841 [D loss: 0.489122, acc.: 78.52%] [G loss: 1.214653]\n",
      "842 [D loss: 0.472670, acc.: 78.12%] [G loss: 1.192660]\n",
      "843 [D loss: 0.511617, acc.: 76.17%] [G loss: 1.276894]\n",
      "844 [D loss: 0.500432, acc.: 76.95%] [G loss: 1.260143]\n",
      "845 [D loss: 0.505357, acc.: 74.61%] [G loss: 1.371881]\n",
      "846 [D loss: 0.485265, acc.: 76.17%] [G loss: 1.424402]\n",
      "847 [D loss: 0.475849, acc.: 77.34%] [G loss: 1.422649]\n",
      "848 [D loss: 0.474328, acc.: 77.34%] [G loss: 1.441845]\n",
      "849 [D loss: 0.496788, acc.: 76.17%] [G loss: 1.392797]\n",
      "850 [D loss: 0.501927, acc.: 76.17%] [G loss: 1.428967]\n",
      "851 [D loss: 0.493460, acc.: 75.78%] [G loss: 1.407150]\n",
      "852 [D loss: 0.507794, acc.: 72.66%] [G loss: 1.274038]\n",
      "853 [D loss: 0.540966, acc.: 73.44%] [G loss: 1.238170]\n",
      "854 [D loss: 0.531195, acc.: 72.66%] [G loss: 1.181135]\n",
      "855 [D loss: 0.534157, acc.: 72.27%] [G loss: 1.148044]\n",
      "856 [D loss: 0.514121, acc.: 76.56%] [G loss: 1.246157]\n",
      "857 [D loss: 0.460397, acc.: 80.86%] [G loss: 1.361576]\n",
      "858 [D loss: 0.456315, acc.: 80.86%] [G loss: 1.438326]\n",
      "859 [D loss: 0.461726, acc.: 79.30%] [G loss: 1.451533]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "860 [D loss: 0.468620, acc.: 80.08%] [G loss: 1.393657]\n",
      "861 [D loss: 0.462564, acc.: 79.30%] [G loss: 1.381697]\n",
      "862 [D loss: 0.466460, acc.: 80.08%] [G loss: 1.386559]\n",
      "863 [D loss: 0.443892, acc.: 79.69%] [G loss: 1.489055]\n",
      "864 [D loss: 0.437099, acc.: 82.03%] [G loss: 1.475512]\n",
      "865 [D loss: 0.455188, acc.: 78.91%] [G loss: 1.498565]\n",
      "866 [D loss: 0.476300, acc.: 76.17%] [G loss: 1.425505]\n",
      "867 [D loss: 0.498241, acc.: 76.17%] [G loss: 1.443000]\n",
      "868 [D loss: 0.462221, acc.: 78.52%] [G loss: 1.402377]\n",
      "869 [D loss: 0.443390, acc.: 80.08%] [G loss: 1.340365]\n",
      "870 [D loss: 0.447590, acc.: 78.91%] [G loss: 1.362276]\n",
      "871 [D loss: 0.472991, acc.: 79.30%] [G loss: 1.346676]\n",
      "872 [D loss: 0.485674, acc.: 75.78%] [G loss: 1.369192]\n",
      "873 [D loss: 0.520273, acc.: 74.22%] [G loss: 1.316821]\n",
      "874 [D loss: 0.510982, acc.: 75.78%] [G loss: 1.460603]\n",
      "875 [D loss: 0.499174, acc.: 78.12%] [G loss: 1.456308]\n",
      "876 [D loss: 0.478363, acc.: 78.91%] [G loss: 1.299353]\n",
      "877 [D loss: 0.468748, acc.: 77.73%] [G loss: 1.373995]\n",
      "878 [D loss: 0.435498, acc.: 80.86%] [G loss: 1.464255]\n",
      "879 [D loss: 0.419162, acc.: 81.25%] [G loss: 1.452590]\n",
      "880 [D loss: 0.433177, acc.: 78.12%] [G loss: 1.528226]\n",
      "881 [D loss: 0.451291, acc.: 77.73%] [G loss: 1.545155]\n",
      "882 [D loss: 0.473605, acc.: 77.73%] [G loss: 1.452157]\n",
      "883 [D loss: 0.501847, acc.: 75.00%] [G loss: 1.370684]\n",
      "884 [D loss: 0.506595, acc.: 75.78%] [G loss: 1.347079]\n",
      "885 [D loss: 0.491840, acc.: 75.78%] [G loss: 1.404136]\n",
      "886 [D loss: 0.486168, acc.: 75.00%] [G loss: 1.416428]\n",
      "887 [D loss: 0.475725, acc.: 76.95%] [G loss: 1.450620]\n",
      "888 [D loss: 0.449467, acc.: 78.52%] [G loss: 1.470923]\n",
      "889 [D loss: 0.453944, acc.: 78.12%] [G loss: 1.466392]\n",
      "890 [D loss: 0.451375, acc.: 81.25%] [G loss: 1.359003]\n",
      "891 [D loss: 0.449406, acc.: 80.86%] [G loss: 1.417320]\n",
      "892 [D loss: 0.426690, acc.: 81.64%] [G loss: 1.395885]\n",
      "893 [D loss: 0.412657, acc.: 82.42%] [G loss: 1.428634]\n",
      "894 [D loss: 0.392025, acc.: 83.20%] [G loss: 1.415834]\n",
      "895 [D loss: 0.464313, acc.: 79.69%] [G loss: 1.511213]\n",
      "896 [D loss: 0.442222, acc.: 80.08%] [G loss: 1.513141]\n",
      "897 [D loss: 0.470593, acc.: 78.12%] [G loss: 1.398916]\n",
      "898 [D loss: 0.560139, acc.: 67.19%] [G loss: 1.526566]\n",
      "899 [D loss: 0.521735, acc.: 71.48%] [G loss: 2.214463]\n",
      "900 [D loss: 0.469003, acc.: 74.61%] [G loss: 1.806221]\n",
      "WARNING:tensorflow:Model was constructed with shape Tensor(\"input_7:0\", shape=(128, 32), dtype=float32) for input (128, 32), but it was re-called on a Tensor with incompatible shape (432, 32).\n",
      "generated_data\n",
      "901 [D loss: 0.488513, acc.: 77.73%] [G loss: 1.168829]\n",
      "902 [D loss: 0.502182, acc.: 79.30%] [G loss: 1.183137]\n",
      "903 [D loss: 0.473574, acc.: 80.47%] [G loss: 1.213199]\n",
      "904 [D loss: 0.453581, acc.: 80.08%] [G loss: 1.310420]\n",
      "905 [D loss: 0.452157, acc.: 81.25%] [G loss: 1.358686]\n",
      "906 [D loss: 0.450395, acc.: 79.69%] [G loss: 1.285903]\n",
      "907 [D loss: 0.455130, acc.: 79.30%] [G loss: 1.353012]\n",
      "908 [D loss: 0.445709, acc.: 80.86%] [G loss: 1.410323]\n",
      "909 [D loss: 0.452256, acc.: 82.03%] [G loss: 1.378384]\n",
      "910 [D loss: 0.478632, acc.: 79.30%] [G loss: 1.418117]\n",
      "911 [D loss: 0.460024, acc.: 80.86%] [G loss: 1.428874]\n",
      "912 [D loss: 0.455579, acc.: 77.73%] [G loss: 1.366481]\n",
      "913 [D loss: 0.464543, acc.: 78.91%] [G loss: 1.293692]\n",
      "914 [D loss: 0.456497, acc.: 80.08%] [G loss: 1.300163]\n",
      "915 [D loss: 0.437069, acc.: 81.25%] [G loss: 1.392627]\n",
      "916 [D loss: 0.447670, acc.: 79.69%] [G loss: 1.363955]\n",
      "917 [D loss: 0.455018, acc.: 78.12%] [G loss: 1.369570]\n",
      "918 [D loss: 0.432855, acc.: 77.73%] [G loss: 1.482814]\n",
      "919 [D loss: 0.406543, acc.: 80.08%] [G loss: 1.582977]\n",
      "920 [D loss: 0.422505, acc.: 79.30%] [G loss: 1.567149]\n",
      "921 [D loss: 0.426194, acc.: 79.30%] [G loss: 1.446698]\n",
      "922 [D loss: 0.448637, acc.: 78.91%] [G loss: 1.409167]\n",
      "923 [D loss: 0.483285, acc.: 73.44%] [G loss: 1.372405]\n",
      "924 [D loss: 0.467014, acc.: 75.39%] [G loss: 1.473003]\n",
      "925 [D loss: 0.481525, acc.: 74.22%] [G loss: 1.443622]\n",
      "926 [D loss: 0.493129, acc.: 75.39%] [G loss: 1.497871]\n",
      "927 [D loss: 0.455235, acc.: 77.34%] [G loss: 1.484147]\n",
      "928 [D loss: 0.482869, acc.: 77.73%] [G loss: 1.350738]\n",
      "929 [D loss: 0.451998, acc.: 81.25%] [G loss: 1.350987]\n",
      "930 [D loss: 0.465522, acc.: 75.78%] [G loss: 1.461215]\n",
      "931 [D loss: 0.444727, acc.: 78.52%] [G loss: 1.443499]\n",
      "932 [D loss: 0.490873, acc.: 74.61%] [G loss: 1.402756]\n",
      "933 [D loss: 0.491740, acc.: 74.22%] [G loss: 1.485168]\n",
      "934 [D loss: 0.465675, acc.: 77.73%] [G loss: 1.518659]\n",
      "935 [D loss: 0.483511, acc.: 77.73%] [G loss: 1.545732]\n",
      "936 [D loss: 0.456971, acc.: 78.12%] [G loss: 1.593327]\n",
      "937 [D loss: 0.415942, acc.: 80.47%] [G loss: 1.618223]\n",
      "938 [D loss: 0.407415, acc.: 83.98%] [G loss: 1.427127]\n",
      "939 [D loss: 0.438230, acc.: 80.08%] [G loss: 1.426561]\n",
      "940 [D loss: 0.465816, acc.: 77.73%] [G loss: 1.358056]\n",
      "941 [D loss: 0.436335, acc.: 80.08%] [G loss: 1.398149]\n",
      "942 [D loss: 0.456396, acc.: 78.12%] [G loss: 1.545647]\n",
      "943 [D loss: 0.444359, acc.: 79.69%] [G loss: 1.530561]\n",
      "944 [D loss: 0.438975, acc.: 79.69%] [G loss: 1.484859]\n",
      "945 [D loss: 0.428853, acc.: 82.81%] [G loss: 1.481974]\n",
      "946 [D loss: 0.440192, acc.: 82.03%] [G loss: 1.454185]\n",
      "947 [D loss: 0.418005, acc.: 84.38%] [G loss: 1.365760]\n",
      "948 [D loss: 0.440588, acc.: 80.86%] [G loss: 1.441379]\n",
      "949 [D loss: 0.411003, acc.: 84.77%] [G loss: 1.425539]\n",
      "950 [D loss: 0.457079, acc.: 78.52%] [G loss: 1.504152]\n",
      "951 [D loss: 0.443770, acc.: 80.47%] [G loss: 1.483692]\n",
      "952 [D loss: 0.473686, acc.: 77.34%] [G loss: 1.415963]\n",
      "953 [D loss: 0.476691, acc.: 76.56%] [G loss: 1.430118]\n",
      "954 [D loss: 0.463877, acc.: 80.08%] [G loss: 1.469073]\n",
      "955 [D loss: 0.466384, acc.: 78.12%] [G loss: 1.474608]\n",
      "956 [D loss: 0.435392, acc.: 80.08%] [G loss: 1.517501]\n",
      "957 [D loss: 0.435806, acc.: 80.47%] [G loss: 1.432302]\n",
      "958 [D loss: 0.404403, acc.: 82.42%] [G loss: 1.478881]\n",
      "959 [D loss: 0.394300, acc.: 82.42%] [G loss: 1.518902]\n",
      "960 [D loss: 0.380580, acc.: 84.77%] [G loss: 1.559248]\n",
      "961 [D loss: 0.393013, acc.: 83.59%] [G loss: 1.549679]\n",
      "962 [D loss: 0.391677, acc.: 84.77%] [G loss: 1.509574]\n",
      "963 [D loss: 0.413000, acc.: 82.81%] [G loss: 1.538985]\n",
      "964 [D loss: 0.416549, acc.: 82.42%] [G loss: 1.469856]\n",
      "965 [D loss: 0.418818, acc.: 80.47%] [G loss: 1.490078]\n",
      "966 [D loss: 0.434226, acc.: 82.03%] [G loss: 1.521726]\n",
      "967 [D loss: 0.430200, acc.: 82.03%] [G loss: 1.588611]\n",
      "968 [D loss: 0.423293, acc.: 80.47%] [G loss: 1.565681]\n",
      "969 [D loss: 0.382671, acc.: 83.98%] [G loss: 1.583134]\n",
      "970 [D loss: 0.378899, acc.: 83.20%] [G loss: 1.669928]\n",
      "971 [D loss: 0.384894, acc.: 83.59%] [G loss: 1.529688]\n",
      "972 [D loss: 0.419522, acc.: 82.03%] [G loss: 1.680691]\n",
      "973 [D loss: 0.428519, acc.: 80.08%] [G loss: 1.458580]\n",
      "974 [D loss: 0.454230, acc.: 80.86%] [G loss: 1.415532]\n",
      "975 [D loss: 0.435542, acc.: 81.64%] [G loss: 1.503581]\n",
      "976 [D loss: 0.397036, acc.: 85.94%] [G loss: 1.531397]\n",
      "977 [D loss: 0.427624, acc.: 82.42%] [G loss: 1.636887]\n",
      "978 [D loss: 0.412822, acc.: 82.81%] [G loss: 1.547252]\n",
      "979 [D loss: 0.419912, acc.: 82.42%] [G loss: 1.447576]\n",
      "980 [D loss: 0.395337, acc.: 83.20%] [G loss: 1.538682]\n",
      "981 [D loss: 0.397229, acc.: 83.98%] [G loss: 1.643337]\n",
      "982 [D loss: 0.402460, acc.: 82.03%] [G loss: 1.598481]\n",
      "983 [D loss: 0.384410, acc.: 82.81%] [G loss: 1.585602]\n",
      "984 [D loss: 0.421571, acc.: 82.03%] [G loss: 1.637201]\n",
      "985 [D loss: 0.463889, acc.: 78.91%] [G loss: 1.642705]\n",
      "986 [D loss: 0.473404, acc.: 77.34%] [G loss: 1.826455]\n",
      "987 [D loss: 0.455373, acc.: 80.47%] [G loss: 1.730338]\n",
      "988 [D loss: 0.439085, acc.: 79.30%] [G loss: 1.604086]\n",
      "989 [D loss: 0.392816, acc.: 83.59%] [G loss: 1.547736]\n",
      "990 [D loss: 0.387811, acc.: 83.20%] [G loss: 1.563864]\n",
      "991 [D loss: 0.431082, acc.: 81.64%] [G loss: 1.559042]\n",
      "992 [D loss: 0.423079, acc.: 79.69%] [G loss: 1.703741]\n",
      "993 [D loss: 0.398397, acc.: 82.42%] [G loss: 1.794697]\n",
      "994 [D loss: 0.401061, acc.: 85.16%] [G loss: 1.725490]\n",
      "995 [D loss: 0.394162, acc.: 82.81%] [G loss: 1.667916]\n",
      "996 [D loss: 0.398407, acc.: 82.42%] [G loss: 1.679538]\n",
      "997 [D loss: 0.420590, acc.: 82.42%] [G loss: 1.621687]\n",
      "998 [D loss: 0.449959, acc.: 78.12%] [G loss: 1.607513]\n",
      "999 [D loss: 0.448278, acc.: 79.69%] [G loss: 1.481863]\n",
      "1000 [D loss: 0.467965, acc.: 76.56%] [G loss: 1.455891]\n",
      "WARNING:tensorflow:Model was constructed with shape Tensor(\"input_7:0\", shape=(128, 32), dtype=float32) for input (128, 32), but it was re-called on a Tensor with incompatible shape (432, 32).\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "generated_data\n",
      "1001 [D loss: 0.471928, acc.: 76.56%] [G loss: 1.662675]\n",
      "1002 [D loss: 0.419803, acc.: 79.69%] [G loss: 1.746712]\n",
      "1003 [D loss: 0.404131, acc.: 83.20%] [G loss: 1.596799]\n",
      "1004 [D loss: 0.393438, acc.: 83.59%] [G loss: 1.623704]\n",
      "1005 [D loss: 0.388120, acc.: 83.98%] [G loss: 1.524998]\n",
      "1006 [D loss: 0.400253, acc.: 82.81%] [G loss: 1.556389]\n",
      "1007 [D loss: 0.409074, acc.: 80.86%] [G loss: 1.585865]\n",
      "1008 [D loss: 0.377932, acc.: 82.81%] [G loss: 1.648190]\n",
      "1009 [D loss: 0.386397, acc.: 81.64%] [G loss: 1.690170]\n",
      "1010 [D loss: 0.401969, acc.: 81.64%] [G loss: 1.551744]\n",
      "1011 [D loss: 0.422355, acc.: 79.69%] [G loss: 1.515615]\n",
      "1012 [D loss: 0.444338, acc.: 77.34%] [G loss: 1.603685]\n",
      "1013 [D loss: 0.410152, acc.: 83.59%] [G loss: 1.501407]\n",
      "1014 [D loss: 0.446898, acc.: 77.34%] [G loss: 1.518686]\n",
      "1015 [D loss: 0.400179, acc.: 83.20%] [G loss: 1.586310]\n",
      "1016 [D loss: 0.403746, acc.: 81.64%] [G loss: 1.564426]\n",
      "1017 [D loss: 0.410944, acc.: 81.25%] [G loss: 1.635561]\n",
      "1018 [D loss: 0.382287, acc.: 83.20%] [G loss: 1.716991]\n",
      "1019 [D loss: 0.374360, acc.: 84.77%] [G loss: 1.700454]\n",
      "1020 [D loss: 0.415931, acc.: 82.03%] [G loss: 1.577215]\n",
      "1021 [D loss: 0.388952, acc.: 84.77%] [G loss: 1.538828]\n",
      "1022 [D loss: 0.397694, acc.: 81.25%] [G loss: 1.498894]\n",
      "1023 [D loss: 0.397737, acc.: 83.20%] [G loss: 1.563593]\n",
      "1024 [D loss: 0.388556, acc.: 85.16%] [G loss: 1.611731]\n",
      "1025 [D loss: 0.386873, acc.: 83.20%] [G loss: 1.621743]\n",
      "1026 [D loss: 0.385326, acc.: 83.59%] [G loss: 1.639156]\n",
      "1027 [D loss: 0.398789, acc.: 83.20%] [G loss: 1.590473]\n",
      "1028 [D loss: 0.395006, acc.: 83.59%] [G loss: 1.413997]\n",
      "1029 [D loss: 0.456201, acc.: 80.08%] [G loss: 1.526135]\n",
      "1030 [D loss: 0.389873, acc.: 82.42%] [G loss: 1.632253]\n",
      "1031 [D loss: 0.400794, acc.: 83.98%] [G loss: 1.616851]\n",
      "1032 [D loss: 0.396686, acc.: 81.64%] [G loss: 1.564182]\n",
      "1033 [D loss: 0.390034, acc.: 83.59%] [G loss: 1.534389]\n",
      "1034 [D loss: 0.392921, acc.: 82.03%] [G loss: 1.451410]\n",
      "1035 [D loss: 0.377374, acc.: 82.42%] [G loss: 1.505383]\n",
      "1036 [D loss: 0.384613, acc.: 82.42%] [G loss: 1.598418]\n",
      "1037 [D loss: 0.390707, acc.: 82.81%] [G loss: 1.606247]\n",
      "1038 [D loss: 0.388781, acc.: 83.20%] [G loss: 1.602868]\n",
      "1039 [D loss: 0.387627, acc.: 82.03%] [G loss: 1.569978]\n",
      "1040 [D loss: 0.375229, acc.: 83.59%] [G loss: 1.576360]\n",
      "1041 [D loss: 0.385382, acc.: 82.03%] [G loss: 1.540752]\n",
      "1042 [D loss: 0.380323, acc.: 83.98%] [G loss: 1.555594]\n",
      "1043 [D loss: 0.381244, acc.: 81.64%] [G loss: 1.743215]\n",
      "1044 [D loss: 0.370052, acc.: 84.38%] [G loss: 1.652898]\n",
      "1045 [D loss: 0.398965, acc.: 81.64%] [G loss: 1.631379]\n",
      "1046 [D loss: 0.416711, acc.: 78.52%] [G loss: 1.611575]\n",
      "1047 [D loss: 0.458640, acc.: 77.34%] [G loss: 1.668576]\n",
      "1048 [D loss: 0.393868, acc.: 80.47%] [G loss: 1.724697]\n",
      "1049 [D loss: 0.401841, acc.: 80.08%] [G loss: 1.775614]\n",
      "1050 [D loss: 0.389630, acc.: 82.81%] [G loss: 1.738111]\n",
      "1051 [D loss: 0.398738, acc.: 81.64%] [G loss: 1.590838]\n",
      "1052 [D loss: 0.394332, acc.: 83.59%] [G loss: 1.665117]\n",
      "1053 [D loss: 0.387781, acc.: 82.42%] [G loss: 1.751668]\n",
      "1054 [D loss: 0.387413, acc.: 83.59%] [G loss: 1.661336]\n",
      "1055 [D loss: 0.385472, acc.: 82.42%] [G loss: 1.513651]\n",
      "1056 [D loss: 0.391778, acc.: 82.42%] [G loss: 1.512241]\n",
      "1057 [D loss: 0.397588, acc.: 81.25%] [G loss: 1.527100]\n",
      "1058 [D loss: 0.389175, acc.: 82.42%] [G loss: 1.531286]\n",
      "1059 [D loss: 0.386618, acc.: 82.42%] [G loss: 1.586976]\n",
      "1060 [D loss: 0.375723, acc.: 85.16%] [G loss: 1.629255]\n",
      "1061 [D loss: 0.372407, acc.: 83.59%] [G loss: 1.638762]\n",
      "1062 [D loss: 0.386136, acc.: 83.20%] [G loss: 1.614607]\n",
      "1063 [D loss: 0.393965, acc.: 81.64%] [G loss: 1.711503]\n",
      "1064 [D loss: 0.377456, acc.: 84.38%] [G loss: 1.533227]\n",
      "1065 [D loss: 0.386984, acc.: 83.20%] [G loss: 1.603517]\n",
      "1066 [D loss: 0.381513, acc.: 83.20%] [G loss: 1.647777]\n",
      "1067 [D loss: 0.382136, acc.: 82.81%] [G loss: 1.667373]\n",
      "1068 [D loss: 0.423533, acc.: 80.47%] [G loss: 1.608471]\n",
      "1069 [D loss: 0.388415, acc.: 82.81%] [G loss: 1.702143]\n",
      "1070 [D loss: 0.428618, acc.: 80.86%] [G loss: 1.689226]\n",
      "1071 [D loss: 0.401356, acc.: 80.86%] [G loss: 1.608275]\n",
      "1072 [D loss: 0.403297, acc.: 82.81%] [G loss: 1.547887]\n",
      "1073 [D loss: 0.392281, acc.: 81.64%] [G loss: 1.608604]\n",
      "1074 [D loss: 0.401087, acc.: 80.47%] [G loss: 1.632638]\n",
      "1075 [D loss: 0.367644, acc.: 85.16%] [G loss: 1.669163]\n",
      "1076 [D loss: 0.381164, acc.: 83.98%] [G loss: 1.587005]\n",
      "1077 [D loss: 0.403804, acc.: 82.42%] [G loss: 1.524788]\n",
      "1078 [D loss: 0.407368, acc.: 81.25%] [G loss: 1.643693]\n",
      "1079 [D loss: 0.408447, acc.: 81.25%] [G loss: 1.618577]\n",
      "1080 [D loss: 0.378095, acc.: 81.64%] [G loss: 1.669842]\n",
      "1081 [D loss: 0.388806, acc.: 80.47%] [G loss: 1.655898]\n",
      "1082 [D loss: 0.393176, acc.: 79.69%] [G loss: 1.592054]\n",
      "1083 [D loss: 0.372604, acc.: 85.94%] [G loss: 1.643302]\n",
      "1084 [D loss: 0.415274, acc.: 78.52%] [G loss: 1.628443]\n",
      "1085 [D loss: 0.392849, acc.: 80.47%] [G loss: 1.598913]\n",
      "1086 [D loss: 0.421078, acc.: 78.52%] [G loss: 1.584963]\n",
      "1087 [D loss: 0.429138, acc.: 79.69%] [G loss: 1.653453]\n",
      "1088 [D loss: 0.378578, acc.: 84.38%] [G loss: 1.572493]\n",
      "1089 [D loss: 0.369979, acc.: 85.16%] [G loss: 1.538145]\n",
      "1090 [D loss: 0.383812, acc.: 82.42%] [G loss: 1.567960]\n",
      "1091 [D loss: 0.387702, acc.: 82.42%] [G loss: 1.616168]\n",
      "1092 [D loss: 0.395263, acc.: 83.20%] [G loss: 1.608284]\n",
      "1093 [D loss: 0.413126, acc.: 79.30%] [G loss: 1.657248]\n",
      "1094 [D loss: 0.392578, acc.: 81.64%] [G loss: 1.562736]\n",
      "1095 [D loss: 0.392510, acc.: 82.81%] [G loss: 1.628042]\n",
      "1096 [D loss: 0.372875, acc.: 83.98%] [G loss: 1.604938]\n",
      "1097 [D loss: 0.397123, acc.: 82.03%] [G loss: 1.759528]\n",
      "1098 [D loss: 0.398609, acc.: 81.64%] [G loss: 1.630567]\n",
      "1099 [D loss: 0.381148, acc.: 80.47%] [G loss: 1.642228]\n",
      "1100 [D loss: 0.373746, acc.: 84.38%] [G loss: 1.630822]\n",
      "WARNING:tensorflow:Model was constructed with shape Tensor(\"input_7:0\", shape=(128, 32), dtype=float32) for input (128, 32), but it was re-called on a Tensor with incompatible shape (432, 32).\n",
      "generated_data\n",
      "1101 [D loss: 0.461718, acc.: 77.34%] [G loss: 1.653243]\n",
      "1102 [D loss: 0.396228, acc.: 80.86%] [G loss: 1.685189]\n",
      "1103 [D loss: 0.396102, acc.: 82.81%] [G loss: 1.659472]\n",
      "1104 [D loss: 0.374896, acc.: 83.20%] [G loss: 1.617968]\n",
      "1105 [D loss: 0.381444, acc.: 85.94%] [G loss: 1.532367]\n",
      "1106 [D loss: 0.380080, acc.: 82.42%] [G loss: 1.653149]\n",
      "1107 [D loss: 0.414182, acc.: 79.69%] [G loss: 1.669142]\n",
      "1108 [D loss: 0.374259, acc.: 83.20%] [G loss: 1.567302]\n",
      "1109 [D loss: 0.419114, acc.: 76.17%] [G loss: 1.637967]\n",
      "1110 [D loss: 0.397517, acc.: 81.25%] [G loss: 1.534873]\n",
      "1111 [D loss: 0.403073, acc.: 82.03%] [G loss: 1.570661]\n",
      "1112 [D loss: 0.360936, acc.: 83.98%] [G loss: 1.559629]\n",
      "1113 [D loss: 0.380196, acc.: 82.81%] [G loss: 1.619660]\n",
      "1114 [D loss: 0.410349, acc.: 80.47%] [G loss: 1.712841]\n",
      "1115 [D loss: 0.353825, acc.: 85.55%] [G loss: 1.814215]\n",
      "1116 [D loss: 0.360826, acc.: 83.59%] [G loss: 1.764415]\n",
      "1117 [D loss: 0.402217, acc.: 83.20%] [G loss: 1.777432]\n",
      "1118 [D loss: 0.367845, acc.: 82.42%] [G loss: 1.676394]\n",
      "1119 [D loss: 0.418970, acc.: 80.47%] [G loss: 1.696886]\n",
      "1120 [D loss: 0.386045, acc.: 83.59%] [G loss: 1.685807]\n",
      "1121 [D loss: 0.385656, acc.: 82.81%] [G loss: 1.563718]\n",
      "1122 [D loss: 0.377808, acc.: 83.98%] [G loss: 1.606525]\n",
      "1123 [D loss: 0.384366, acc.: 82.42%] [G loss: 1.674236]\n",
      "1124 [D loss: 0.364293, acc.: 84.77%] [G loss: 1.765057]\n",
      "1125 [D loss: 0.369898, acc.: 84.38%] [G loss: 1.763665]\n",
      "1126 [D loss: 0.339689, acc.: 86.33%] [G loss: 1.801063]\n",
      "1127 [D loss: 0.361153, acc.: 83.59%] [G loss: 1.670437]\n",
      "1128 [D loss: 0.359176, acc.: 85.94%] [G loss: 1.843441]\n",
      "1129 [D loss: 0.362813, acc.: 83.98%] [G loss: 1.684330]\n",
      "1130 [D loss: 0.378627, acc.: 82.81%] [G loss: 1.604832]\n",
      "1131 [D loss: 0.351820, acc.: 85.55%] [G loss: 1.578304]\n",
      "1132 [D loss: 0.402218, acc.: 79.30%] [G loss: 1.667738]\n",
      "1133 [D loss: 0.381565, acc.: 83.59%] [G loss: 1.646974]\n",
      "1134 [D loss: 0.381914, acc.: 82.81%] [G loss: 1.720701]\n",
      "1135 [D loss: 0.384401, acc.: 82.81%] [G loss: 1.764917]\n",
      "1136 [D loss: 0.358465, acc.: 84.77%] [G loss: 1.762060]\n",
      "1137 [D loss: 0.363166, acc.: 85.16%] [G loss: 1.629565]\n",
      "1138 [D loss: 0.364363, acc.: 84.38%] [G loss: 1.673145]\n",
      "1139 [D loss: 0.380591, acc.: 85.16%] [G loss: 1.831586]\n",
      "1140 [D loss: 0.367215, acc.: 84.77%] [G loss: 1.809899]\n",
      "1141 [D loss: 0.376400, acc.: 82.03%] [G loss: 1.721804]\n",
      "1142 [D loss: 0.389460, acc.: 84.38%] [G loss: 1.699247]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "1143 [D loss: 0.357181, acc.: 85.16%] [G loss: 1.681373]\n",
      "1144 [D loss: 0.387529, acc.: 82.42%] [G loss: 1.660942]\n",
      "1145 [D loss: 0.338489, acc.: 86.72%] [G loss: 1.768677]\n",
      "1146 [D loss: 0.361618, acc.: 83.59%] [G loss: 1.725510]\n",
      "1147 [D loss: 0.353377, acc.: 83.98%] [G loss: 1.738087]\n",
      "1148 [D loss: 0.363668, acc.: 80.86%] [G loss: 1.752634]\n",
      "1149 [D loss: 0.360784, acc.: 84.77%] [G loss: 1.793872]\n",
      "1150 [D loss: 0.374213, acc.: 82.42%] [G loss: 1.676481]\n",
      "1151 [D loss: 0.372349, acc.: 82.81%] [G loss: 1.855904]\n",
      "1152 [D loss: 0.376699, acc.: 82.81%] [G loss: 1.839736]\n",
      "1153 [D loss: 0.379091, acc.: 83.20%] [G loss: 1.692280]\n",
      "1154 [D loss: 0.365958, acc.: 82.42%] [G loss: 1.615889]\n",
      "1155 [D loss: 0.335225, acc.: 85.55%] [G loss: 1.699451]\n",
      "1156 [D loss: 0.392882, acc.: 80.47%] [G loss: 1.762641]\n",
      "1157 [D loss: 0.364246, acc.: 83.20%] [G loss: 1.785413]\n",
      "1158 [D loss: 0.358164, acc.: 82.42%] [G loss: 1.672139]\n",
      "1159 [D loss: 0.369259, acc.: 82.03%] [G loss: 1.697261]\n",
      "1160 [D loss: 0.334769, acc.: 83.20%] [G loss: 1.690874]\n",
      "1161 [D loss: 0.347298, acc.: 85.55%] [G loss: 1.617055]\n",
      "1162 [D loss: 0.347393, acc.: 84.77%] [G loss: 1.764534]\n",
      "1163 [D loss: 0.355329, acc.: 82.03%] [G loss: 1.708935]\n",
      "1164 [D loss: 0.369596, acc.: 83.20%] [G loss: 1.704981]\n",
      "1165 [D loss: 0.359141, acc.: 83.59%] [G loss: 1.767997]\n",
      "1166 [D loss: 0.369473, acc.: 81.64%] [G loss: 1.736473]\n",
      "1167 [D loss: 0.375316, acc.: 82.81%] [G loss: 1.722817]\n",
      "1168 [D loss: 0.371143, acc.: 82.81%] [G loss: 1.872207]\n",
      "1169 [D loss: 0.346662, acc.: 84.77%] [G loss: 1.699773]\n",
      "1170 [D loss: 0.361553, acc.: 81.64%] [G loss: 1.663880]\n",
      "1171 [D loss: 0.346997, acc.: 86.33%] [G loss: 1.703593]\n",
      "1172 [D loss: 0.340219, acc.: 85.16%] [G loss: 1.703252]\n",
      "1173 [D loss: 0.335140, acc.: 85.94%] [G loss: 1.785115]\n",
      "1174 [D loss: 0.383748, acc.: 82.42%] [G loss: 1.797442]\n",
      "1175 [D loss: 0.334122, acc.: 84.38%] [G loss: 1.846442]\n",
      "1176 [D loss: 0.344492, acc.: 85.16%] [G loss: 1.797443]\n",
      "1177 [D loss: 0.346108, acc.: 83.98%] [G loss: 1.679944]\n",
      "1178 [D loss: 0.334185, acc.: 85.55%] [G loss: 1.768093]\n",
      "1179 [D loss: 0.336236, acc.: 86.33%] [G loss: 1.626272]\n",
      "1180 [D loss: 0.360559, acc.: 84.38%] [G loss: 1.817447]\n",
      "1181 [D loss: 0.308471, acc.: 85.55%] [G loss: 1.861646]\n",
      "1182 [D loss: 0.323666, acc.: 85.55%] [G loss: 1.730948]\n",
      "1183 [D loss: 0.349967, acc.: 85.55%] [G loss: 1.739972]\n",
      "1184 [D loss: 0.337331, acc.: 83.98%] [G loss: 1.802203]\n",
      "1185 [D loss: 0.336440, acc.: 84.38%] [G loss: 1.898373]\n",
      "1186 [D loss: 0.346396, acc.: 85.55%] [G loss: 1.874982]\n",
      "1187 [D loss: 0.379500, acc.: 83.98%] [G loss: 1.830110]\n",
      "1188 [D loss: 0.359130, acc.: 84.77%] [G loss: 1.814717]\n",
      "1189 [D loss: 0.336976, acc.: 85.55%] [G loss: 1.725487]\n",
      "1190 [D loss: 0.347549, acc.: 84.77%] [G loss: 1.668852]\n",
      "1191 [D loss: 0.355978, acc.: 84.77%] [G loss: 1.760396]\n",
      "1192 [D loss: 0.333464, acc.: 85.55%] [G loss: 1.771617]\n",
      "1193 [D loss: 0.356136, acc.: 84.38%] [G loss: 1.709818]\n",
      "1194 [D loss: 0.379357, acc.: 83.20%] [G loss: 1.899406]\n",
      "1195 [D loss: 0.347141, acc.: 85.16%] [G loss: 1.746981]\n",
      "1196 [D loss: 0.352248, acc.: 85.55%] [G loss: 1.639265]\n",
      "1197 [D loss: 0.363045, acc.: 84.38%] [G loss: 1.848779]\n",
      "1198 [D loss: 0.315522, acc.: 87.89%] [G loss: 1.743866]\n",
      "1199 [D loss: 0.359452, acc.: 83.20%] [G loss: 1.828559]\n",
      "1200 [D loss: 0.344236, acc.: 85.16%] [G loss: 1.877811]\n",
      "WARNING:tensorflow:Model was constructed with shape Tensor(\"input_7:0\", shape=(128, 32), dtype=float32) for input (128, 32), but it was re-called on a Tensor with incompatible shape (432, 32).\n",
      "generated_data\n",
      "1201 [D loss: 0.346545, acc.: 85.16%] [G loss: 1.779867]\n",
      "1202 [D loss: 0.344078, acc.: 85.55%] [G loss: 1.787578]\n",
      "1203 [D loss: 0.348831, acc.: 83.20%] [G loss: 1.835762]\n",
      "1204 [D loss: 0.335242, acc.: 83.98%] [G loss: 1.761350]\n",
      "1205 [D loss: 0.337213, acc.: 85.94%] [G loss: 1.742874]\n",
      "1206 [D loss: 0.326569, acc.: 86.72%] [G loss: 1.924308]\n",
      "1207 [D loss: 0.366545, acc.: 82.81%] [G loss: 1.727105]\n",
      "1208 [D loss: 0.355207, acc.: 83.98%] [G loss: 1.979975]\n",
      "1209 [D loss: 0.339446, acc.: 84.38%] [G loss: 1.965146]\n",
      "1210 [D loss: 0.335711, acc.: 83.59%] [G loss: 1.823409]\n",
      "1211 [D loss: 0.338972, acc.: 85.94%] [G loss: 1.705326]\n",
      "1212 [D loss: 0.313272, acc.: 88.28%] [G loss: 1.858236]\n",
      "1213 [D loss: 0.337436, acc.: 83.98%] [G loss: 1.782805]\n",
      "1214 [D loss: 0.330081, acc.: 87.11%] [G loss: 1.921905]\n",
      "1215 [D loss: 0.354255, acc.: 85.94%] [G loss: 1.921510]\n",
      "1216 [D loss: 0.365160, acc.: 81.64%] [G loss: 2.024440]\n",
      "1217 [D loss: 0.375385, acc.: 82.81%] [G loss: 1.792094]\n",
      "1218 [D loss: 0.327006, acc.: 87.89%] [G loss: 1.872737]\n",
      "1219 [D loss: 0.336084, acc.: 85.16%] [G loss: 1.871659]\n",
      "1220 [D loss: 0.328254, acc.: 87.89%] [G loss: 1.917504]\n",
      "1221 [D loss: 0.386561, acc.: 84.38%] [G loss: 1.807684]\n",
      "1222 [D loss: 0.340833, acc.: 85.55%] [G loss: 1.883052]\n",
      "1223 [D loss: 0.307550, acc.: 88.67%] [G loss: 1.810086]\n",
      "1224 [D loss: 0.344077, acc.: 86.33%] [G loss: 1.786861]\n",
      "1225 [D loss: 0.339474, acc.: 85.55%] [G loss: 1.755511]\n",
      "1226 [D loss: 0.333839, acc.: 87.89%] [G loss: 1.870261]\n",
      "1227 [D loss: 0.296276, acc.: 88.67%] [G loss: 1.769585]\n",
      "1228 [D loss: 0.308139, acc.: 87.89%] [G loss: 1.859815]\n",
      "1229 [D loss: 0.308483, acc.: 87.50%] [G loss: 1.808344]\n",
      "1230 [D loss: 0.355385, acc.: 83.59%] [G loss: 1.793695]\n",
      "1231 [D loss: 0.338631, acc.: 84.38%] [G loss: 1.995847]\n",
      "1232 [D loss: 0.328142, acc.: 85.94%] [G loss: 1.937969]\n",
      "1233 [D loss: 0.310539, acc.: 88.67%] [G loss: 1.800946]\n",
      "1234 [D loss: 0.334666, acc.: 85.94%] [G loss: 1.876631]\n",
      "1235 [D loss: 0.313333, acc.: 87.89%] [G loss: 1.925378]\n",
      "1236 [D loss: 0.345173, acc.: 86.33%] [G loss: 2.006404]\n",
      "1237 [D loss: 0.298431, acc.: 87.50%] [G loss: 1.820857]\n",
      "1238 [D loss: 0.354522, acc.: 82.03%] [G loss: 1.866700]\n",
      "1239 [D loss: 0.339342, acc.: 82.81%] [G loss: 1.855682]\n",
      "1240 [D loss: 0.314092, acc.: 86.33%] [G loss: 1.968995]\n",
      "1241 [D loss: 0.328279, acc.: 85.55%] [G loss: 1.948867]\n",
      "1242 [D loss: 0.311102, acc.: 87.11%] [G loss: 1.865727]\n",
      "1243 [D loss: 0.320115, acc.: 83.98%] [G loss: 1.859595]\n",
      "1244 [D loss: 0.306080, acc.: 87.50%] [G loss: 2.016714]\n",
      "1245 [D loss: 0.374280, acc.: 81.25%] [G loss: 1.985366]\n",
      "1246 [D loss: 0.329302, acc.: 85.94%] [G loss: 1.952643]\n",
      "1247 [D loss: 0.325779, acc.: 86.33%] [G loss: 1.990598]\n",
      "1248 [D loss: 0.329135, acc.: 84.38%] [G loss: 2.093388]\n",
      "1249 [D loss: 0.327424, acc.: 85.16%] [G loss: 1.894921]\n",
      "1250 [D loss: 0.325752, acc.: 86.33%] [G loss: 1.845104]\n",
      "1251 [D loss: 0.318067, acc.: 86.72%] [G loss: 1.915056]\n",
      "1252 [D loss: 0.310562, acc.: 87.89%] [G loss: 2.047849]\n",
      "1253 [D loss: 0.318828, acc.: 84.38%] [G loss: 1.812297]\n",
      "1254 [D loss: 0.332048, acc.: 86.33%] [G loss: 1.902708]\n",
      "1255 [D loss: 0.318785, acc.: 86.72%] [G loss: 1.955736]\n",
      "1256 [D loss: 0.349696, acc.: 83.98%] [G loss: 1.837282]\n",
      "1257 [D loss: 0.342249, acc.: 85.55%] [G loss: 2.114284]\n",
      "1258 [D loss: 0.349174, acc.: 82.03%] [G loss: 1.954787]\n",
      "1259 [D loss: 0.311821, acc.: 86.72%] [G loss: 1.902364]\n",
      "1260 [D loss: 0.333228, acc.: 85.55%] [G loss: 1.838400]\n",
      "1261 [D loss: 0.306330, acc.: 89.84%] [G loss: 1.980160]\n",
      "1262 [D loss: 0.311624, acc.: 88.28%] [G loss: 1.976811]\n",
      "1263 [D loss: 0.309606, acc.: 87.89%] [G loss: 2.061712]\n",
      "1264 [D loss: 0.338567, acc.: 85.94%] [G loss: 1.939857]\n",
      "1265 [D loss: 0.319646, acc.: 85.16%] [G loss: 1.983190]\n",
      "1266 [D loss: 0.315404, acc.: 87.50%] [G loss: 1.964833]\n",
      "1267 [D loss: 0.304059, acc.: 88.67%] [G loss: 2.048852]\n",
      "1268 [D loss: 0.328330, acc.: 85.16%] [G loss: 1.919537]\n",
      "1269 [D loss: 0.334275, acc.: 86.33%] [G loss: 1.887181]\n",
      "1270 [D loss: 0.322839, acc.: 87.11%] [G loss: 2.022689]\n",
      "1271 [D loss: 0.349137, acc.: 83.59%] [G loss: 2.053345]\n",
      "1272 [D loss: 0.296417, acc.: 89.45%] [G loss: 1.877825]\n",
      "1273 [D loss: 0.324517, acc.: 86.33%] [G loss: 2.018006]\n",
      "1274 [D loss: 0.311138, acc.: 86.33%] [G loss: 1.977395]\n",
      "1275 [D loss: 0.316591, acc.: 84.38%] [G loss: 1.902934]\n",
      "1276 [D loss: 0.318636, acc.: 86.72%] [G loss: 1.942626]\n",
      "1277 [D loss: 0.315016, acc.: 88.67%] [G loss: 2.123607]\n",
      "1278 [D loss: 0.324254, acc.: 85.16%] [G loss: 1.924727]\n",
      "1279 [D loss: 0.333930, acc.: 86.33%] [G loss: 2.074471]\n",
      "1280 [D loss: 0.316314, acc.: 87.11%] [G loss: 2.066535]\n",
      "1281 [D loss: 0.307114, acc.: 87.50%] [G loss: 1.910104]\n",
      "1282 [D loss: 0.312489, acc.: 87.11%] [G loss: 2.012149]\n",
      "1283 [D loss: 0.293506, acc.: 86.72%] [G loss: 2.027928]\n",
      "1284 [D loss: 0.336517, acc.: 85.16%] [G loss: 2.056833]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "1285 [D loss: 0.310521, acc.: 87.11%] [G loss: 1.996953]\n",
      "1286 [D loss: 0.312390, acc.: 86.72%] [G loss: 1.974320]\n",
      "1287 [D loss: 0.326985, acc.: 88.28%] [G loss: 2.081944]\n",
      "1288 [D loss: 0.339577, acc.: 84.38%] [G loss: 2.030206]\n",
      "1289 [D loss: 0.316203, acc.: 86.33%] [G loss: 1.866560]\n",
      "1290 [D loss: 0.318225, acc.: 87.89%] [G loss: 1.816840]\n",
      "1291 [D loss: 0.257979, acc.: 91.41%] [G loss: 1.955603]\n",
      "1292 [D loss: 0.333798, acc.: 85.94%] [G loss: 2.026098]\n",
      "1293 [D loss: 0.285009, acc.: 88.67%] [G loss: 2.100589]\n",
      "1294 [D loss: 0.294239, acc.: 85.94%] [G loss: 2.065187]\n",
      "1295 [D loss: 0.328014, acc.: 84.77%] [G loss: 2.072885]\n",
      "1296 [D loss: 0.301859, acc.: 85.94%] [G loss: 2.139288]\n",
      "1297 [D loss: 0.308570, acc.: 85.55%] [G loss: 2.046536]\n",
      "1298 [D loss: 0.332068, acc.: 87.89%] [G loss: 2.060686]\n",
      "1299 [D loss: 0.308366, acc.: 86.33%] [G loss: 2.094350]\n",
      "1300 [D loss: 0.297702, acc.: 87.11%] [G loss: 2.106950]\n",
      "WARNING:tensorflow:Model was constructed with shape Tensor(\"input_7:0\", shape=(128, 32), dtype=float32) for input (128, 32), but it was re-called on a Tensor with incompatible shape (432, 32).\n",
      "generated_data\n",
      "1301 [D loss: 0.309598, acc.: 88.28%] [G loss: 2.138109]\n",
      "1302 [D loss: 0.305807, acc.: 85.94%] [G loss: 2.008178]\n",
      "1303 [D loss: 0.296359, acc.: 89.45%] [G loss: 2.153698]\n",
      "1304 [D loss: 0.310003, acc.: 87.50%] [G loss: 2.076374]\n",
      "1305 [D loss: 0.294149, acc.: 89.06%] [G loss: 2.037656]\n",
      "1306 [D loss: 0.305236, acc.: 89.06%] [G loss: 2.241182]\n",
      "1307 [D loss: 0.306169, acc.: 86.72%] [G loss: 2.050230]\n",
      "1308 [D loss: 0.301516, acc.: 87.11%] [G loss: 2.106800]\n",
      "1309 [D loss: 0.316916, acc.: 88.28%] [G loss: 2.231546]\n",
      "1310 [D loss: 0.301179, acc.: 87.50%] [G loss: 2.136884]\n",
      "1311 [D loss: 0.314315, acc.: 85.16%] [G loss: 2.028786]\n",
      "1312 [D loss: 0.283269, acc.: 88.67%] [G loss: 1.972368]\n",
      "1313 [D loss: 0.296977, acc.: 87.50%] [G loss: 2.011536]\n",
      "1314 [D loss: 0.282167, acc.: 90.23%] [G loss: 2.245255]\n",
      "1315 [D loss: 0.299329, acc.: 88.67%] [G loss: 2.152430]\n",
      "1316 [D loss: 0.295884, acc.: 86.33%] [G loss: 2.108775]\n",
      "1317 [D loss: 0.313089, acc.: 87.50%] [G loss: 2.226704]\n",
      "1318 [D loss: 0.287395, acc.: 87.50%] [G loss: 2.050211]\n",
      "1319 [D loss: 0.279647, acc.: 86.72%] [G loss: 2.004870]\n",
      "1320 [D loss: 0.311509, acc.: 87.50%] [G loss: 2.158530]\n",
      "1321 [D loss: 0.298816, acc.: 87.11%] [G loss: 2.042258]\n",
      "1322 [D loss: 0.310670, acc.: 88.28%] [G loss: 2.121309]\n",
      "1323 [D loss: 0.297264, acc.: 87.89%] [G loss: 2.081173]\n",
      "1324 [D loss: 0.293921, acc.: 87.89%] [G loss: 2.156027]\n",
      "1325 [D loss: 0.300509, acc.: 87.11%] [G loss: 2.057453]\n",
      "1326 [D loss: 0.290989, acc.: 87.89%] [G loss: 2.178113]\n",
      "1327 [D loss: 0.288569, acc.: 88.28%] [G loss: 1.970349]\n",
      "1328 [D loss: 0.289951, acc.: 89.06%] [G loss: 2.136798]\n",
      "1329 [D loss: 0.282774, acc.: 87.89%] [G loss: 2.141404]\n",
      "1330 [D loss: 0.269333, acc.: 91.02%] [G loss: 2.084059]\n",
      "1331 [D loss: 0.288776, acc.: 87.50%] [G loss: 2.205293]\n",
      "1332 [D loss: 0.317614, acc.: 87.50%] [G loss: 2.203310]\n",
      "1333 [D loss: 0.308665, acc.: 87.11%] [G loss: 2.143910]\n",
      "1334 [D loss: 0.312339, acc.: 85.16%] [G loss: 2.180410]\n",
      "1335 [D loss: 0.300923, acc.: 86.33%] [G loss: 2.210721]\n",
      "1336 [D loss: 0.316623, acc.: 85.16%] [G loss: 2.125004]\n",
      "1337 [D loss: 0.300191, acc.: 87.11%] [G loss: 2.225011]\n",
      "1338 [D loss: 0.284220, acc.: 87.11%] [G loss: 2.108186]\n",
      "1339 [D loss: 0.293787, acc.: 88.28%] [G loss: 2.020027]\n",
      "1340 [D loss: 0.282921, acc.: 88.28%] [G loss: 2.052489]\n",
      "1341 [D loss: 0.322515, acc.: 85.94%] [G loss: 2.160037]\n",
      "1342 [D loss: 0.286782, acc.: 87.50%] [G loss: 2.044429]\n",
      "1343 [D loss: 0.292626, acc.: 88.67%] [G loss: 2.033492]\n",
      "1344 [D loss: 0.320412, acc.: 85.55%] [G loss: 2.278917]\n",
      "1345 [D loss: 0.265471, acc.: 88.67%] [G loss: 2.196727]\n",
      "1346 [D loss: 0.297032, acc.: 86.33%] [G loss: 1.979693]\n",
      "1347 [D loss: 0.314152, acc.: 85.94%] [G loss: 2.036433]\n",
      "1348 [D loss: 0.311429, acc.: 85.94%] [G loss: 2.283512]\n",
      "1349 [D loss: 0.318970, acc.: 86.72%] [G loss: 2.091399]\n",
      "1350 [D loss: 0.298723, acc.: 88.28%] [G loss: 2.074735]\n",
      "1351 [D loss: 0.267430, acc.: 88.67%] [G loss: 2.136593]\n",
      "1352 [D loss: 0.334068, acc.: 86.33%] [G loss: 2.130636]\n",
      "1353 [D loss: 0.309217, acc.: 86.72%] [G loss: 2.239571]\n",
      "1354 [D loss: 0.289330, acc.: 88.28%] [G loss: 2.072701]\n",
      "1355 [D loss: 0.283223, acc.: 87.89%] [G loss: 2.097057]\n",
      "1356 [D loss: 0.274083, acc.: 89.45%] [G loss: 2.201328]\n",
      "1357 [D loss: 0.265828, acc.: 89.84%] [G loss: 2.220596]\n",
      "1358 [D loss: 0.314080, acc.: 87.11%] [G loss: 2.193949]\n",
      "1359 [D loss: 0.265896, acc.: 87.89%] [G loss: 1.976556]\n",
      "1360 [D loss: 0.274009, acc.: 90.23%] [G loss: 2.116518]\n",
      "1361 [D loss: 0.295519, acc.: 87.89%] [G loss: 2.238123]\n",
      "1362 [D loss: 0.283696, acc.: 87.89%] [G loss: 2.061584]\n",
      "1363 [D loss: 0.273695, acc.: 89.06%] [G loss: 2.144814]\n",
      "1364 [D loss: 0.286854, acc.: 89.06%] [G loss: 2.113733]\n",
      "1365 [D loss: 0.261218, acc.: 90.23%] [G loss: 2.116254]\n",
      "1366 [D loss: 0.303294, acc.: 86.72%] [G loss: 2.282653]\n",
      "1367 [D loss: 0.251555, acc.: 90.23%] [G loss: 2.271600]\n",
      "1368 [D loss: 0.277056, acc.: 88.67%] [G loss: 2.044264]\n",
      "1369 [D loss: 0.301070, acc.: 86.72%] [G loss: 2.174543]\n",
      "1370 [D loss: 0.316067, acc.: 86.33%] [G loss: 2.336585]\n",
      "1371 [D loss: 0.274382, acc.: 86.72%] [G loss: 2.130676]\n",
      "1372 [D loss: 0.286727, acc.: 88.67%] [G loss: 2.230464]\n",
      "1373 [D loss: 0.274232, acc.: 87.89%] [G loss: 2.290008]\n",
      "1374 [D loss: 0.286249, acc.: 89.84%] [G loss: 2.007284]\n",
      "1375 [D loss: 0.291482, acc.: 86.72%] [G loss: 2.288085]\n",
      "1376 [D loss: 0.264632, acc.: 89.45%] [G loss: 2.186891]\n",
      "1377 [D loss: 0.301810, acc.: 85.55%] [G loss: 1.949984]\n",
      "1378 [D loss: 0.262665, acc.: 89.45%] [G loss: 2.235701]\n",
      "1379 [D loss: 0.298298, acc.: 86.72%] [G loss: 2.180336]\n",
      "1380 [D loss: 0.270117, acc.: 87.89%] [G loss: 2.120441]\n",
      "1381 [D loss: 0.288376, acc.: 86.33%] [G loss: 2.390926]\n",
      "1382 [D loss: 0.285094, acc.: 85.94%] [G loss: 2.128656]\n",
      "1383 [D loss: 0.278329, acc.: 86.33%] [G loss: 2.157194]\n",
      "1384 [D loss: 0.287181, acc.: 88.28%] [G loss: 2.275610]\n",
      "1385 [D loss: 0.290709, acc.: 87.11%] [G loss: 2.268329]\n",
      "1386 [D loss: 0.262892, acc.: 90.23%] [G loss: 2.200886]\n",
      "1387 [D loss: 0.277143, acc.: 88.28%] [G loss: 2.197843]\n",
      "1388 [D loss: 0.289989, acc.: 87.89%] [G loss: 2.155928]\n",
      "1389 [D loss: 0.251787, acc.: 88.67%] [G loss: 2.357386]\n",
      "1390 [D loss: 0.293393, acc.: 87.89%] [G loss: 2.248330]\n",
      "1391 [D loss: 0.278492, acc.: 87.11%] [G loss: 2.265421]\n",
      "1392 [D loss: 0.281648, acc.: 87.50%] [G loss: 2.144079]\n",
      "1393 [D loss: 0.260378, acc.: 88.28%] [G loss: 2.277649]\n",
      "1394 [D loss: 0.273382, acc.: 87.50%] [G loss: 2.142538]\n",
      "1395 [D loss: 0.267365, acc.: 88.28%] [G loss: 2.302526]\n",
      "1396 [D loss: 0.259545, acc.: 89.45%] [G loss: 2.211472]\n",
      "1397 [D loss: 0.261998, acc.: 88.67%] [G loss: 2.157285]\n",
      "1398 [D loss: 0.286302, acc.: 87.11%] [G loss: 2.363489]\n",
      "1399 [D loss: 0.277613, acc.: 90.23%] [G loss: 2.302327]\n",
      "1400 [D loss: 0.237286, acc.: 91.41%] [G loss: 2.244371]\n",
      "WARNING:tensorflow:Model was constructed with shape Tensor(\"input_7:0\", shape=(128, 32), dtype=float32) for input (128, 32), but it was re-called on a Tensor with incompatible shape (432, 32).\n",
      "generated_data\n",
      "1401 [D loss: 0.252441, acc.: 91.02%] [G loss: 2.295553]\n",
      "1402 [D loss: 0.279549, acc.: 88.28%] [G loss: 2.406223]\n",
      "1403 [D loss: 0.272280, acc.: 87.89%] [G loss: 2.173524]\n",
      "1404 [D loss: 0.247701, acc.: 91.41%] [G loss: 2.079127]\n",
      "1405 [D loss: 0.291627, acc.: 87.50%] [G loss: 2.132167]\n",
      "1406 [D loss: 0.303943, acc.: 86.33%] [G loss: 2.595555]\n",
      "1407 [D loss: 0.284640, acc.: 87.11%] [G loss: 2.115097]\n",
      "1408 [D loss: 0.262786, acc.: 88.67%] [G loss: 2.326055]\n",
      "1409 [D loss: 0.287061, acc.: 87.50%] [G loss: 2.353445]\n",
      "1410 [D loss: 0.286762, acc.: 89.06%] [G loss: 2.304885]\n",
      "1411 [D loss: 0.265024, acc.: 89.45%] [G loss: 2.217983]\n",
      "1412 [D loss: 0.274554, acc.: 88.67%] [G loss: 2.222741]\n",
      "1413 [D loss: 0.248578, acc.: 89.45%] [G loss: 2.219468]\n",
      "1414 [D loss: 0.271080, acc.: 87.89%] [G loss: 2.296281]\n",
      "1415 [D loss: 0.273735, acc.: 86.72%] [G loss: 2.399930]\n",
      "1416 [D loss: 0.266434, acc.: 86.72%] [G loss: 2.322104]\n",
      "1417 [D loss: 0.264563, acc.: 88.67%] [G loss: 2.237541]\n",
      "1418 [D loss: 0.260562, acc.: 88.67%] [G loss: 2.245517]\n",
      "1419 [D loss: 0.262152, acc.: 89.84%] [G loss: 2.347611]\n",
      "1420 [D loss: 0.252562, acc.: 89.84%] [G loss: 2.467370]\n",
      "1421 [D loss: 0.250766, acc.: 88.28%] [G loss: 2.354057]\n",
      "1422 [D loss: 0.293530, acc.: 87.50%] [G loss: 2.158481]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "1423 [D loss: 0.274819, acc.: 87.50%] [G loss: 2.403659]\n",
      "1424 [D loss: 0.272555, acc.: 90.62%] [G loss: 2.275120]\n",
      "1425 [D loss: 0.252911, acc.: 91.02%] [G loss: 2.230258]\n",
      "1426 [D loss: 0.305221, acc.: 87.11%] [G loss: 2.451114]\n",
      "1427 [D loss: 0.242831, acc.: 88.67%] [G loss: 2.468337]\n",
      "1428 [D loss: 0.282706, acc.: 87.89%] [G loss: 2.224561]\n",
      "1429 [D loss: 0.248261, acc.: 91.80%] [G loss: 2.332393]\n",
      "1430 [D loss: 0.253771, acc.: 89.06%] [G loss: 2.328414]\n",
      "1431 [D loss: 0.275109, acc.: 87.89%] [G loss: 2.523818]\n",
      "1432 [D loss: 0.256527, acc.: 88.67%] [G loss: 2.295719]\n",
      "1433 [D loss: 0.263722, acc.: 90.23%] [G loss: 2.343731]\n",
      "1434 [D loss: 0.240730, acc.: 91.02%] [G loss: 2.279450]\n",
      "1435 [D loss: 0.259688, acc.: 89.06%] [G loss: 2.200968]\n",
      "1436 [D loss: 0.260896, acc.: 89.45%] [G loss: 2.456709]\n",
      "1437 [D loss: 0.240620, acc.: 92.19%] [G loss: 2.229975]\n",
      "1438 [D loss: 0.238731, acc.: 90.62%] [G loss: 2.175312]\n",
      "1439 [D loss: 0.274416, acc.: 88.67%] [G loss: 2.420468]\n",
      "1440 [D loss: 0.241408, acc.: 91.02%] [G loss: 2.495674]\n",
      "1441 [D loss: 0.265542, acc.: 88.28%] [G loss: 2.214932]\n",
      "1442 [D loss: 0.263947, acc.: 90.23%] [G loss: 2.444576]\n",
      "1443 [D loss: 0.251273, acc.: 89.84%] [G loss: 2.427729]\n",
      "1444 [D loss: 0.266010, acc.: 87.89%] [G loss: 2.356336]\n",
      "1445 [D loss: 0.260219, acc.: 89.45%] [G loss: 2.348880]\n",
      "1446 [D loss: 0.252003, acc.: 89.84%] [G loss: 2.327394]\n",
      "1447 [D loss: 0.247741, acc.: 90.23%] [G loss: 2.325971]\n",
      "1448 [D loss: 0.285899, acc.: 88.28%] [G loss: 2.290019]\n",
      "1449 [D loss: 0.261275, acc.: 89.06%] [G loss: 2.382854]\n",
      "1450 [D loss: 0.261131, acc.: 89.45%] [G loss: 2.358586]\n",
      "1451 [D loss: 0.264753, acc.: 88.28%] [G loss: 2.453160]\n",
      "1452 [D loss: 0.273573, acc.: 88.28%] [G loss: 2.359598]\n",
      "1453 [D loss: 0.239656, acc.: 91.41%] [G loss: 2.475997]\n",
      "1454 [D loss: 0.257998, acc.: 89.45%] [G loss: 2.578007]\n",
      "1455 [D loss: 0.270490, acc.: 88.28%] [G loss: 2.326705]\n",
      "1456 [D loss: 0.268324, acc.: 88.28%] [G loss: 2.244922]\n",
      "1457 [D loss: 0.248134, acc.: 87.89%] [G loss: 2.434691]\n",
      "1458 [D loss: 0.256882, acc.: 90.62%] [G loss: 2.360446]\n",
      "1459 [D loss: 0.250515, acc.: 89.84%] [G loss: 2.473171]\n",
      "1460 [D loss: 0.284496, acc.: 88.67%] [G loss: 2.207406]\n",
      "1461 [D loss: 0.241019, acc.: 91.80%] [G loss: 2.327918]\n",
      "1462 [D loss: 0.235433, acc.: 89.84%] [G loss: 2.380780]\n",
      "1463 [D loss: 0.242820, acc.: 91.02%] [G loss: 2.420807]\n",
      "1464 [D loss: 0.252517, acc.: 89.84%] [G loss: 2.329981]\n",
      "1465 [D loss: 0.237269, acc.: 90.23%] [G loss: 2.315433]\n",
      "1466 [D loss: 0.238866, acc.: 91.02%] [G loss: 2.552945]\n",
      "1467 [D loss: 0.246448, acc.: 91.41%] [G loss: 2.290585]\n",
      "1468 [D loss: 0.273958, acc.: 87.89%] [G loss: 2.267512]\n",
      "1469 [D loss: 0.242266, acc.: 90.23%] [G loss: 2.485197]\n",
      "1470 [D loss: 0.239517, acc.: 89.45%] [G loss: 2.400635]\n",
      "1471 [D loss: 0.246324, acc.: 90.62%] [G loss: 2.399795]\n",
      "1472 [D loss: 0.251511, acc.: 91.41%] [G loss: 2.274300]\n",
      "1473 [D loss: 0.233445, acc.: 89.84%] [G loss: 2.464184]\n",
      "1474 [D loss: 0.240448, acc.: 90.23%] [G loss: 2.478275]\n",
      "1475 [D loss: 0.263473, acc.: 87.89%] [G loss: 2.345858]\n",
      "1476 [D loss: 0.218651, acc.: 90.23%] [G loss: 2.607213]\n",
      "1477 [D loss: 0.268613, acc.: 87.11%] [G loss: 2.374528]\n",
      "1478 [D loss: 0.233519, acc.: 91.41%] [G loss: 2.329559]\n",
      "1479 [D loss: 0.280282, acc.: 87.89%] [G loss: 2.442041]\n",
      "1480 [D loss: 0.217168, acc.: 91.02%] [G loss: 2.381610]\n",
      "1481 [D loss: 0.255378, acc.: 87.89%] [G loss: 2.349242]\n",
      "1482 [D loss: 0.230633, acc.: 91.41%] [G loss: 2.334421]\n",
      "1483 [D loss: 0.249168, acc.: 91.41%] [G loss: 2.516039]\n",
      "1484 [D loss: 0.270327, acc.: 88.67%] [G loss: 2.459391]\n",
      "1485 [D loss: 0.246909, acc.: 89.45%] [G loss: 2.377430]\n",
      "1486 [D loss: 0.243623, acc.: 89.06%] [G loss: 2.584800]\n",
      "1487 [D loss: 0.242704, acc.: 89.45%] [G loss: 2.493508]\n",
      "1488 [D loss: 0.264327, acc.: 88.67%] [G loss: 2.221514]\n",
      "1489 [D loss: 0.252277, acc.: 89.84%] [G loss: 2.411602]\n",
      "1490 [D loss: 0.254513, acc.: 88.28%] [G loss: 2.398462]\n",
      "1491 [D loss: 0.281658, acc.: 87.89%] [G loss: 2.489247]\n",
      "1492 [D loss: 0.244050, acc.: 90.62%] [G loss: 2.550045]\n",
      "1493 [D loss: 0.223989, acc.: 91.80%] [G loss: 2.463012]\n",
      "1494 [D loss: 0.219006, acc.: 91.41%] [G loss: 2.282012]\n",
      "1495 [D loss: 0.221723, acc.: 91.41%] [G loss: 2.490373]\n",
      "1496 [D loss: 0.224250, acc.: 91.41%] [G loss: 2.240542]\n",
      "1497 [D loss: 0.217281, acc.: 91.41%] [G loss: 2.253728]\n",
      "1498 [D loss: 0.214012, acc.: 91.80%] [G loss: 2.422497]\n",
      "1499 [D loss: 0.245661, acc.: 87.89%] [G loss: 2.571674]\n",
      "1500 [D loss: 0.247830, acc.: 89.06%] [G loss: 2.394419]\n",
      "WARNING:tensorflow:Model was constructed with shape Tensor(\"input_7:0\", shape=(128, 32), dtype=float32) for input (128, 32), but it was re-called on a Tensor with incompatible shape (432, 32).\n",
      "generated_data\n",
      "1501 [D loss: 0.241274, acc.: 89.45%] [G loss: 2.534273]\n",
      "1502 [D loss: 0.232310, acc.: 90.62%] [G loss: 2.539409]\n",
      "1503 [D loss: 0.231527, acc.: 91.02%] [G loss: 2.557504]\n",
      "1504 [D loss: 0.251722, acc.: 89.06%] [G loss: 2.438801]\n",
      "1505 [D loss: 0.233361, acc.: 91.41%] [G loss: 2.513727]\n",
      "1506 [D loss: 0.219554, acc.: 93.36%] [G loss: 2.421368]\n",
      "1507 [D loss: 0.238731, acc.: 89.45%] [G loss: 2.366417]\n",
      "1508 [D loss: 0.251273, acc.: 89.45%] [G loss: 2.530351]\n",
      "1509 [D loss: 0.213274, acc.: 90.23%] [G loss: 2.548798]\n",
      "1510 [D loss: 0.233917, acc.: 89.84%] [G loss: 2.319322]\n",
      "1511 [D loss: 0.230678, acc.: 90.62%] [G loss: 2.516070]\n",
      "1512 [D loss: 0.244760, acc.: 87.89%] [G loss: 2.520116]\n",
      "1513 [D loss: 0.234879, acc.: 90.62%] [G loss: 2.459761]\n",
      "1514 [D loss: 0.239647, acc.: 90.62%] [G loss: 2.489361]\n",
      "1515 [D loss: 0.223699, acc.: 90.62%] [G loss: 2.571207]\n",
      "1516 [D loss: 0.233090, acc.: 89.45%] [G loss: 2.414580]\n",
      "1517 [D loss: 0.208652, acc.: 92.58%] [G loss: 2.537120]\n",
      "1518 [D loss: 0.245335, acc.: 91.41%] [G loss: 2.542221]\n",
      "1519 [D loss: 0.222157, acc.: 91.80%] [G loss: 2.508711]\n",
      "1520 [D loss: 0.222637, acc.: 90.23%] [G loss: 2.558558]\n",
      "1521 [D loss: 0.205347, acc.: 91.41%] [G loss: 2.548101]\n",
      "1522 [D loss: 0.219167, acc.: 90.23%] [G loss: 2.576898]\n",
      "1523 [D loss: 0.230132, acc.: 89.06%] [G loss: 2.558325]\n",
      "1524 [D loss: 0.241066, acc.: 91.02%] [G loss: 2.317256]\n",
      "1525 [D loss: 0.231884, acc.: 91.02%] [G loss: 2.314647]\n",
      "1526 [D loss: 0.245930, acc.: 89.45%] [G loss: 2.382430]\n",
      "1527 [D loss: 0.227457, acc.: 91.80%] [G loss: 2.495125]\n",
      "1528 [D loss: 0.281932, acc.: 89.06%] [G loss: 2.502094]\n",
      "1529 [D loss: 0.226381, acc.: 89.84%] [G loss: 2.566762]\n",
      "1530 [D loss: 0.247651, acc.: 91.02%] [G loss: 2.418691]\n",
      "1531 [D loss: 0.246484, acc.: 89.84%] [G loss: 2.561795]\n",
      "1532 [D loss: 0.221548, acc.: 90.23%] [G loss: 2.527673]\n",
      "1533 [D loss: 0.247231, acc.: 88.67%] [G loss: 2.404176]\n",
      "1534 [D loss: 0.243211, acc.: 90.23%] [G loss: 2.628247]\n",
      "1535 [D loss: 0.222732, acc.: 89.84%] [G loss: 2.545270]\n",
      "1536 [D loss: 0.244684, acc.: 89.84%] [G loss: 2.731747]\n",
      "1537 [D loss: 0.229211, acc.: 90.62%] [G loss: 2.555634]\n",
      "1538 [D loss: 0.209468, acc.: 91.80%] [G loss: 2.347923]\n",
      "1539 [D loss: 0.218007, acc.: 91.02%] [G loss: 2.355589]\n",
      "1540 [D loss: 0.213431, acc.: 92.19%] [G loss: 2.480071]\n",
      "1541 [D loss: 0.221291, acc.: 91.02%] [G loss: 2.325340]\n",
      "1542 [D loss: 0.221699, acc.: 91.02%] [G loss: 2.563756]\n",
      "1543 [D loss: 0.223000, acc.: 91.02%] [G loss: 2.724837]\n",
      "1544 [D loss: 0.248587, acc.: 89.84%] [G loss: 2.674980]\n",
      "1545 [D loss: 0.248983, acc.: 89.45%] [G loss: 2.396714]\n",
      "1546 [D loss: 0.249147, acc.: 90.23%] [G loss: 2.501750]\n",
      "1547 [D loss: 0.232668, acc.: 90.62%] [G loss: 2.640229]\n",
      "1548 [D loss: 0.213247, acc.: 90.23%] [G loss: 2.802523]\n",
      "1549 [D loss: 0.227241, acc.: 89.45%] [G loss: 2.566593]\n",
      "1550 [D loss: 0.220903, acc.: 91.80%] [G loss: 2.519916]\n",
      "1551 [D loss: 0.227842, acc.: 91.80%] [G loss: 2.713614]\n",
      "1552 [D loss: 0.219806, acc.: 90.62%] [G loss: 2.649523]\n",
      "1553 [D loss: 0.211245, acc.: 92.19%] [G loss: 2.509767]\n",
      "1554 [D loss: 0.236294, acc.: 91.80%] [G loss: 2.481234]\n",
      "1555 [D loss: 0.210578, acc.: 91.41%] [G loss: 2.683214]\n",
      "1556 [D loss: 0.206975, acc.: 91.02%] [G loss: 2.510527]\n",
      "1557 [D loss: 0.204588, acc.: 91.80%] [G loss: 2.620656]\n",
      "1558 [D loss: 0.215743, acc.: 92.58%] [G loss: 2.644594]\n",
      "1559 [D loss: 0.249476, acc.: 89.06%] [G loss: 2.565577]\n",
      "1560 [D loss: 0.217088, acc.: 92.19%] [G loss: 2.546928]\n",
      "1561 [D loss: 0.214808, acc.: 91.41%] [G loss: 2.714004]\n",
      "1562 [D loss: 0.219939, acc.: 91.02%] [G loss: 2.483664]\n",
      "1563 [D loss: 0.226216, acc.: 89.84%] [G loss: 2.601348]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "1564 [D loss: 0.205853, acc.: 90.62%] [G loss: 2.448462]\n",
      "1565 [D loss: 0.217779, acc.: 90.23%] [G loss: 2.457719]\n",
      "1566 [D loss: 0.203376, acc.: 91.02%] [G loss: 2.736894]\n",
      "1567 [D loss: 0.231558, acc.: 90.23%] [G loss: 2.678501]\n",
      "1568 [D loss: 0.221201, acc.: 91.02%] [G loss: 2.503916]\n",
      "1569 [D loss: 0.212481, acc.: 92.97%] [G loss: 2.670399]\n",
      "1570 [D loss: 0.213874, acc.: 90.23%] [G loss: 2.608123]\n",
      "1571 [D loss: 0.225474, acc.: 92.19%] [G loss: 2.658918]\n",
      "1572 [D loss: 0.227877, acc.: 90.23%] [G loss: 2.703914]\n",
      "1573 [D loss: 0.228491, acc.: 89.06%] [G loss: 2.760374]\n",
      "1574 [D loss: 0.218609, acc.: 91.80%] [G loss: 2.674360]\n",
      "1575 [D loss: 0.229233, acc.: 91.41%] [G loss: 2.490391]\n",
      "1576 [D loss: 0.214664, acc.: 90.62%] [G loss: 2.590290]\n",
      "1577 [D loss: 0.223152, acc.: 90.62%] [G loss: 2.600971]\n",
      "1578 [D loss: 0.258797, acc.: 89.45%] [G loss: 2.565575]\n",
      "1579 [D loss: 0.224979, acc.: 91.41%] [G loss: 2.631284]\n",
      "1580 [D loss: 0.249818, acc.: 92.97%] [G loss: 2.398693]\n",
      "1581 [D loss: 0.212187, acc.: 91.41%] [G loss: 2.425808]\n",
      "1582 [D loss: 0.207022, acc.: 92.97%] [G loss: 2.656041]\n",
      "1583 [D loss: 0.204193, acc.: 91.41%] [G loss: 2.475511]\n",
      "1584 [D loss: 0.203259, acc.: 92.58%] [G loss: 2.666861]\n",
      "1585 [D loss: 0.215738, acc.: 91.41%] [G loss: 2.615506]\n",
      "1586 [D loss: 0.211459, acc.: 89.45%] [G loss: 2.794242]\n",
      "1587 [D loss: 0.241247, acc.: 89.45%] [G loss: 2.413139]\n",
      "1588 [D loss: 0.207640, acc.: 92.58%] [G loss: 2.619388]\n",
      "1589 [D loss: 0.223108, acc.: 89.06%] [G loss: 2.770351]\n",
      "1590 [D loss: 0.219142, acc.: 89.84%] [G loss: 2.689328]\n",
      "1591 [D loss: 0.207271, acc.: 93.75%] [G loss: 2.749548]\n",
      "1592 [D loss: 0.198190, acc.: 92.58%] [G loss: 2.452350]\n",
      "1593 [D loss: 0.205327, acc.: 92.97%] [G loss: 2.638198]\n",
      "1594 [D loss: 0.207495, acc.: 92.97%] [G loss: 2.684829]\n",
      "1595 [D loss: 0.221291, acc.: 91.80%] [G loss: 2.744814]\n",
      "1596 [D loss: 0.213928, acc.: 91.41%] [G loss: 2.771900]\n",
      "1597 [D loss: 0.242473, acc.: 88.67%] [G loss: 2.677172]\n",
      "1598 [D loss: 0.254273, acc.: 88.28%] [G loss: 2.726683]\n",
      "1599 [D loss: 0.196707, acc.: 91.02%] [G loss: 2.501837]\n",
      "1600 [D loss: 0.190803, acc.: 91.80%] [G loss: 2.714580]\n",
      "WARNING:tensorflow:Model was constructed with shape Tensor(\"input_7:0\", shape=(128, 32), dtype=float32) for input (128, 32), but it was re-called on a Tensor with incompatible shape (432, 32).\n",
      "generated_data\n",
      "1601 [D loss: 0.192472, acc.: 92.58%] [G loss: 2.802853]\n",
      "1602 [D loss: 0.210735, acc.: 90.23%] [G loss: 2.650466]\n",
      "1603 [D loss: 0.237500, acc.: 89.84%] [G loss: 2.647255]\n",
      "1604 [D loss: 0.220686, acc.: 89.45%] [G loss: 2.641694]\n",
      "1605 [D loss: 0.254807, acc.: 89.45%] [G loss: 2.798995]\n",
      "1606 [D loss: 0.208910, acc.: 91.41%] [G loss: 2.614257]\n",
      "1607 [D loss: 0.219119, acc.: 90.62%] [G loss: 2.665969]\n",
      "1608 [D loss: 0.196488, acc.: 92.58%] [G loss: 2.592611]\n",
      "1609 [D loss: 0.207460, acc.: 91.02%] [G loss: 2.707648]\n",
      "1610 [D loss: 0.200714, acc.: 92.97%] [G loss: 2.803106]\n",
      "1611 [D loss: 0.250605, acc.: 90.23%] [G loss: 2.683294]\n",
      "1612 [D loss: 0.203374, acc.: 91.41%] [G loss: 2.918045]\n",
      "1613 [D loss: 0.220990, acc.: 90.23%] [G loss: 2.674019]\n",
      "1614 [D loss: 0.197867, acc.: 93.75%] [G loss: 2.688175]\n",
      "1615 [D loss: 0.200489, acc.: 91.80%] [G loss: 2.744043]\n",
      "1616 [D loss: 0.225321, acc.: 91.02%] [G loss: 2.728338]\n",
      "1617 [D loss: 0.218466, acc.: 90.62%] [G loss: 2.817054]\n",
      "1618 [D loss: 0.207957, acc.: 91.02%] [G loss: 2.675639]\n",
      "1619 [D loss: 0.223127, acc.: 92.19%] [G loss: 2.508085]\n",
      "1620 [D loss: 0.192929, acc.: 92.19%] [G loss: 2.593898]\n",
      "1621 [D loss: 0.221980, acc.: 91.02%] [G loss: 2.854665]\n",
      "1622 [D loss: 0.212339, acc.: 92.19%] [G loss: 2.714877]\n",
      "1623 [D loss: 0.196045, acc.: 93.75%] [G loss: 2.676423]\n",
      "1624 [D loss: 0.235190, acc.: 89.45%] [G loss: 2.719589]\n",
      "1625 [D loss: 0.199817, acc.: 91.02%] [G loss: 2.796871]\n",
      "1626 [D loss: 0.199520, acc.: 91.41%] [G loss: 2.737249]\n",
      "1627 [D loss: 0.226449, acc.: 89.84%] [G loss: 2.665418]\n",
      "1628 [D loss: 0.214212, acc.: 90.62%] [G loss: 2.756086]\n",
      "1629 [D loss: 0.214541, acc.: 91.80%] [G loss: 2.695067]\n",
      "1630 [D loss: 0.205360, acc.: 92.58%] [G loss: 2.789366]\n",
      "1631 [D loss: 0.200561, acc.: 92.19%] [G loss: 2.687438]\n",
      "1632 [D loss: 0.226191, acc.: 91.41%] [G loss: 2.540184]\n",
      "1633 [D loss: 0.211422, acc.: 91.41%] [G loss: 2.799394]\n",
      "1634 [D loss: 0.242052, acc.: 91.02%] [G loss: 2.769275]\n",
      "1635 [D loss: 0.198891, acc.: 92.58%] [G loss: 2.862143]\n",
      "1636 [D loss: 0.189025, acc.: 92.58%] [G loss: 2.754623]\n",
      "1637 [D loss: 0.200377, acc.: 93.75%] [G loss: 2.521966]\n",
      "1638 [D loss: 0.203359, acc.: 90.62%] [G loss: 2.781140]\n",
      "1639 [D loss: 0.210878, acc.: 91.41%] [G loss: 2.605001]\n",
      "1640 [D loss: 0.198461, acc.: 92.19%] [G loss: 2.736545]\n",
      "1641 [D loss: 0.209843, acc.: 89.45%] [G loss: 2.765037]\n",
      "1642 [D loss: 0.194104, acc.: 91.41%] [G loss: 2.832045]\n",
      "1643 [D loss: 0.177134, acc.: 93.75%] [G loss: 2.866343]\n",
      "1644 [D loss: 0.189772, acc.: 92.58%] [G loss: 2.746893]\n",
      "1645 [D loss: 0.167439, acc.: 93.75%] [G loss: 2.596150]\n",
      "1646 [D loss: 0.186771, acc.: 92.97%] [G loss: 2.470901]\n",
      "1647 [D loss: 0.194486, acc.: 92.58%] [G loss: 2.746938]\n",
      "1648 [D loss: 0.218378, acc.: 90.62%] [G loss: 2.769907]\n",
      "1649 [D loss: 0.240052, acc.: 88.28%] [G loss: 2.736047]\n",
      "1650 [D loss: 0.197202, acc.: 92.58%] [G loss: 2.806395]\n",
      "1651 [D loss: 0.206564, acc.: 92.19%] [G loss: 2.873428]\n",
      "1652 [D loss: 0.203677, acc.: 91.02%] [G loss: 2.691729]\n",
      "1653 [D loss: 0.206268, acc.: 91.41%] [G loss: 2.612837]\n",
      "1654 [D loss: 0.185904, acc.: 93.75%] [G loss: 2.654583]\n",
      "1655 [D loss: 0.190644, acc.: 91.80%] [G loss: 2.871888]\n",
      "1656 [D loss: 0.240934, acc.: 89.84%] [G loss: 2.909217]\n",
      "1657 [D loss: 0.226467, acc.: 91.41%] [G loss: 2.905594]\n",
      "1658 [D loss: 0.243047, acc.: 89.45%] [G loss: 2.583092]\n",
      "1659 [D loss: 0.251694, acc.: 88.28%] [G loss: 2.730901]\n",
      "1660 [D loss: 0.210057, acc.: 90.62%] [G loss: 2.960614]\n",
      "1661 [D loss: 0.208569, acc.: 92.19%] [G loss: 2.929675]\n",
      "1662 [D loss: 0.184982, acc.: 91.41%] [G loss: 2.657039]\n",
      "1663 [D loss: 0.204122, acc.: 90.23%] [G loss: 2.691333]\n",
      "1664 [D loss: 0.194146, acc.: 92.97%] [G loss: 2.534033]\n",
      "1665 [D loss: 0.223374, acc.: 89.84%] [G loss: 2.731797]\n",
      "1666 [D loss: 0.180226, acc.: 93.36%] [G loss: 2.771806]\n",
      "1667 [D loss: 0.219502, acc.: 89.84%] [G loss: 2.921402]\n",
      "1668 [D loss: 0.203326, acc.: 91.02%] [G loss: 2.821865]\n",
      "1669 [D loss: 0.201486, acc.: 93.36%] [G loss: 2.665871]\n",
      "1670 [D loss: 0.182939, acc.: 92.58%] [G loss: 2.574305]\n",
      "1671 [D loss: 0.214130, acc.: 90.23%] [G loss: 2.949486]\n",
      "1672 [D loss: 0.195875, acc.: 91.80%] [G loss: 2.628117]\n",
      "1673 [D loss: 0.200684, acc.: 91.41%] [G loss: 2.995261]\n",
      "1674 [D loss: 0.214898, acc.: 91.41%] [G loss: 2.806509]\n",
      "1675 [D loss: 0.167452, acc.: 92.19%] [G loss: 2.823366]\n",
      "1676 [D loss: 0.205568, acc.: 91.80%] [G loss: 2.781380]\n",
      "1677 [D loss: 0.224319, acc.: 90.62%] [G loss: 2.915511]\n",
      "1678 [D loss: 0.222138, acc.: 90.62%] [G loss: 2.759878]\n",
      "1679 [D loss: 0.201336, acc.: 90.62%] [G loss: 2.857949]\n",
      "1680 [D loss: 0.192676, acc.: 92.19%] [G loss: 2.853169]\n",
      "1681 [D loss: 0.225938, acc.: 91.80%] [G loss: 2.727038]\n",
      "1682 [D loss: 0.232874, acc.: 88.28%] [G loss: 2.828539]\n",
      "1683 [D loss: 0.219681, acc.: 90.62%] [G loss: 2.670491]\n",
      "1684 [D loss: 0.200997, acc.: 90.62%] [G loss: 2.888945]\n",
      "1685 [D loss: 0.200368, acc.: 92.19%] [G loss: 2.605652]\n",
      "1686 [D loss: 0.193293, acc.: 91.02%] [G loss: 2.735895]\n",
      "1687 [D loss: 0.206521, acc.: 90.62%] [G loss: 3.070330]\n",
      "1688 [D loss: 0.190134, acc.: 92.97%] [G loss: 2.930827]\n",
      "1689 [D loss: 0.201227, acc.: 91.80%] [G loss: 2.755536]\n",
      "1690 [D loss: 0.180907, acc.: 92.19%] [G loss: 2.689118]\n",
      "1691 [D loss: 0.204663, acc.: 91.02%] [G loss: 2.955944]\n",
      "1692 [D loss: 0.213422, acc.: 90.23%] [G loss: 2.811078]\n",
      "1693 [D loss: 0.199141, acc.: 92.19%] [G loss: 2.877693]\n",
      "1694 [D loss: 0.180917, acc.: 93.75%] [G loss: 2.812066]\n",
      "1695 [D loss: 0.226865, acc.: 90.62%] [G loss: 2.815059]\n",
      "1696 [D loss: 0.173998, acc.: 93.36%] [G loss: 2.885026]\n",
      "1697 [D loss: 0.225931, acc.: 91.41%] [G loss: 3.066002]\n",
      "1698 [D loss: 0.214935, acc.: 89.84%] [G loss: 2.829964]\n",
      "1699 [D loss: 0.196452, acc.: 92.58%] [G loss: 2.664221]\n",
      "1700 [D loss: 0.180944, acc.: 92.97%] [G loss: 2.840186]\n",
      "WARNING:tensorflow:Model was constructed with shape Tensor(\"input_7:0\", shape=(128, 32), dtype=float32) for input (128, 32), but it was re-called on a Tensor with incompatible shape (432, 32).\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "generated_data\n",
      "1701 [D loss: 0.191717, acc.: 92.19%] [G loss: 2.941401]\n",
      "1702 [D loss: 0.206264, acc.: 91.41%] [G loss: 2.839406]\n",
      "1703 [D loss: 0.190147, acc.: 91.41%] [G loss: 2.869216]\n",
      "1704 [D loss: 0.206163, acc.: 90.23%] [G loss: 2.825541]\n",
      "1705 [D loss: 0.194249, acc.: 90.62%] [G loss: 2.577460]\n",
      "1706 [D loss: 0.195987, acc.: 92.97%] [G loss: 2.671904]\n",
      "1707 [D loss: 0.197269, acc.: 91.41%] [G loss: 2.872068]\n",
      "1708 [D loss: 0.218795, acc.: 91.02%] [G loss: 2.937155]\n",
      "1709 [D loss: 0.209325, acc.: 91.02%] [G loss: 2.573416]\n",
      "1710 [D loss: 0.208890, acc.: 91.41%] [G loss: 2.787606]\n",
      "1711 [D loss: 0.195438, acc.: 92.19%] [G loss: 2.852637]\n",
      "1712 [D loss: 0.200617, acc.: 92.58%] [G loss: 2.769835]\n",
      "1713 [D loss: 0.188278, acc.: 91.80%] [G loss: 2.918789]\n",
      "1714 [D loss: 0.192054, acc.: 91.80%] [G loss: 2.932136]\n",
      "1715 [D loss: 0.207101, acc.: 91.02%] [G loss: 3.001681]\n",
      "1716 [D loss: 0.220427, acc.: 90.23%] [G loss: 2.851496]\n",
      "1717 [D loss: 0.216631, acc.: 90.23%] [G loss: 2.978218]\n",
      "1718 [D loss: 0.209424, acc.: 91.41%] [G loss: 2.831128]\n",
      "1719 [D loss: 0.197444, acc.: 91.02%] [G loss: 2.858161]\n",
      "1720 [D loss: 0.208564, acc.: 92.19%] [G loss: 2.869434]\n",
      "1721 [D loss: 0.214559, acc.: 89.84%] [G loss: 2.668483]\n",
      "1722 [D loss: 0.197427, acc.: 92.58%] [G loss: 2.647340]\n",
      "1723 [D loss: 0.181627, acc.: 93.36%] [G loss: 2.667589]\n",
      "1724 [D loss: 0.221109, acc.: 90.62%] [G loss: 3.002739]\n",
      "1725 [D loss: 0.205670, acc.: 89.45%] [G loss: 3.128656]\n",
      "1726 [D loss: 0.206990, acc.: 90.62%] [G loss: 2.792583]\n",
      "1727 [D loss: 0.187285, acc.: 92.58%] [G loss: 2.909179]\n",
      "1728 [D loss: 0.181648, acc.: 93.75%] [G loss: 2.925364]\n",
      "1729 [D loss: 0.176107, acc.: 93.75%] [G loss: 2.776299]\n",
      "1730 [D loss: 0.201531, acc.: 93.75%] [G loss: 2.643466]\n",
      "1731 [D loss: 0.199208, acc.: 91.80%] [G loss: 2.683510]\n",
      "1732 [D loss: 0.207043, acc.: 91.41%] [G loss: 2.980633]\n",
      "1733 [D loss: 0.200595, acc.: 91.41%] [G loss: 3.005414]\n",
      "1734 [D loss: 0.202517, acc.: 91.41%] [G loss: 3.049803]\n",
      "1735 [D loss: 0.192556, acc.: 93.36%] [G loss: 2.957952]\n",
      "1736 [D loss: 0.207234, acc.: 92.19%] [G loss: 2.751290]\n",
      "1737 [D loss: 0.193812, acc.: 91.41%] [G loss: 2.911399]\n",
      "1738 [D loss: 0.202696, acc.: 90.62%] [G loss: 3.058869]\n",
      "1739 [D loss: 0.204368, acc.: 91.41%] [G loss: 2.818956]\n",
      "1740 [D loss: 0.209915, acc.: 88.67%] [G loss: 2.729971]\n",
      "1741 [D loss: 0.214962, acc.: 89.45%] [G loss: 2.788000]\n",
      "1742 [D loss: 0.223012, acc.: 91.41%] [G loss: 2.895329]\n",
      "1743 [D loss: 0.211036, acc.: 91.41%] [G loss: 2.926004]\n",
      "1744 [D loss: 0.192587, acc.: 92.19%] [G loss: 2.734105]\n",
      "1745 [D loss: 0.214312, acc.: 91.02%] [G loss: 2.687555]\n",
      "1746 [D loss: 0.213157, acc.: 90.62%] [G loss: 2.897330]\n",
      "1747 [D loss: 0.180162, acc.: 92.97%] [G loss: 2.783288]\n",
      "1748 [D loss: 0.202982, acc.: 91.41%] [G loss: 2.746521]\n",
      "1749 [D loss: 0.210190, acc.: 90.62%] [G loss: 2.731708]\n",
      "1750 [D loss: 0.199642, acc.: 90.62%] [G loss: 2.989672]\n",
      "1751 [D loss: 0.177608, acc.: 91.80%] [G loss: 2.821568]\n",
      "1752 [D loss: 0.194261, acc.: 91.80%] [G loss: 2.861269]\n",
      "1753 [D loss: 0.184567, acc.: 92.19%] [G loss: 2.862303]\n",
      "1754 [D loss: 0.207930, acc.: 91.41%] [G loss: 2.863905]\n",
      "1755 [D loss: 0.197185, acc.: 91.41%] [G loss: 2.733061]\n",
      "1756 [D loss: 0.198891, acc.: 91.02%] [G loss: 2.996010]\n",
      "1757 [D loss: 0.185373, acc.: 92.58%] [G loss: 3.143368]\n",
      "1758 [D loss: 0.199052, acc.: 90.62%] [G loss: 2.926968]\n",
      "1759 [D loss: 0.219926, acc.: 91.80%] [G loss: 2.795291]\n",
      "1760 [D loss: 0.213146, acc.: 90.62%] [G loss: 2.974482]\n",
      "1761 [D loss: 0.210632, acc.: 91.80%] [G loss: 2.960495]\n",
      "1762 [D loss: 0.223650, acc.: 89.06%] [G loss: 2.979412]\n",
      "1763 [D loss: 0.215388, acc.: 91.80%] [G loss: 2.753542]\n",
      "1764 [D loss: 0.193364, acc.: 92.58%] [G loss: 2.811030]\n",
      "1765 [D loss: 0.197535, acc.: 92.19%] [G loss: 2.878154]\n",
      "1766 [D loss: 0.211040, acc.: 89.84%] [G loss: 2.958325]\n",
      "1767 [D loss: 0.193217, acc.: 91.41%] [G loss: 2.969928]\n",
      "1768 [D loss: 0.197436, acc.: 91.80%] [G loss: 2.945132]\n",
      "1769 [D loss: 0.211724, acc.: 91.02%] [G loss: 2.856129]\n",
      "1770 [D loss: 0.175070, acc.: 92.19%] [G loss: 2.827996]\n",
      "1771 [D loss: 0.186959, acc.: 92.19%] [G loss: 2.741657]\n",
      "1772 [D loss: 0.174953, acc.: 93.36%] [G loss: 2.951541]\n",
      "1773 [D loss: 0.207788, acc.: 91.02%] [G loss: 3.222981]\n",
      "1774 [D loss: 0.181586, acc.: 91.02%] [G loss: 2.976268]\n",
      "1775 [D loss: 0.200546, acc.: 92.19%] [G loss: 2.759434]\n",
      "1776 [D loss: 0.197258, acc.: 91.41%] [G loss: 2.906775]\n",
      "1777 [D loss: 0.209450, acc.: 91.02%] [G loss: 2.902477]\n",
      "1778 [D loss: 0.192276, acc.: 91.41%] [G loss: 2.951941]\n",
      "1779 [D loss: 0.202391, acc.: 91.02%] [G loss: 3.122229]\n",
      "1780 [D loss: 0.221924, acc.: 89.06%] [G loss: 2.834138]\n",
      "1781 [D loss: 0.186511, acc.: 91.80%] [G loss: 2.782640]\n",
      "1782 [D loss: 0.207657, acc.: 91.41%] [G loss: 2.900418]\n",
      "1783 [D loss: 0.190398, acc.: 91.41%] [G loss: 2.875617]\n",
      "1784 [D loss: 0.195735, acc.: 91.80%] [G loss: 2.737222]\n",
      "1785 [D loss: 0.180421, acc.: 92.19%] [G loss: 2.912789]\n",
      "1786 [D loss: 0.221850, acc.: 90.23%] [G loss: 2.846378]\n",
      "1787 [D loss: 0.210839, acc.: 90.23%] [G loss: 3.192032]\n",
      "1788 [D loss: 0.217197, acc.: 91.02%] [G loss: 2.946253]\n",
      "1789 [D loss: 0.201853, acc.: 92.19%] [G loss: 3.012534]\n",
      "1790 [D loss: 0.198564, acc.: 92.97%] [G loss: 2.939809]\n",
      "1791 [D loss: 0.224395, acc.: 90.62%] [G loss: 2.905706]\n",
      "1792 [D loss: 0.208409, acc.: 92.19%] [G loss: 2.662904]\n",
      "1793 [D loss: 0.186006, acc.: 92.19%] [G loss: 2.964505]\n",
      "1794 [D loss: 0.209463, acc.: 93.75%] [G loss: 3.003477]\n",
      "1795 [D loss: 0.171384, acc.: 92.97%] [G loss: 2.786887]\n",
      "1796 [D loss: 0.216378, acc.: 90.23%] [G loss: 2.957950]\n",
      "1797 [D loss: 0.170279, acc.: 93.75%] [G loss: 2.768325]\n",
      "1798 [D loss: 0.168344, acc.: 93.36%] [G loss: 2.903422]\n",
      "1799 [D loss: 0.206841, acc.: 91.41%] [G loss: 2.932290]\n",
      "1800 [D loss: 0.200246, acc.: 92.19%] [G loss: 3.073585]\n",
      "WARNING:tensorflow:Model was constructed with shape Tensor(\"input_7:0\", shape=(128, 32), dtype=float32) for input (128, 32), but it was re-called on a Tensor with incompatible shape (432, 32).\n",
      "generated_data\n",
      "1801 [D loss: 0.200178, acc.: 91.02%] [G loss: 3.018864]\n",
      "1802 [D loss: 0.185780, acc.: 91.80%] [G loss: 3.103268]\n",
      "1803 [D loss: 0.206994, acc.: 91.41%] [G loss: 2.674305]\n",
      "1804 [D loss: 0.169925, acc.: 94.14%] [G loss: 2.860744]\n",
      "1805 [D loss: 0.184305, acc.: 91.80%] [G loss: 2.688315]\n",
      "1806 [D loss: 0.202297, acc.: 91.02%] [G loss: 2.924583]\n",
      "1807 [D loss: 0.212383, acc.: 91.41%] [G loss: 2.951238]\n",
      "1808 [D loss: 0.211937, acc.: 91.02%] [G loss: 3.104795]\n",
      "1809 [D loss: 0.203162, acc.: 92.58%] [G loss: 2.972952]\n",
      "1810 [D loss: 0.225771, acc.: 89.45%] [G loss: 2.968959]\n",
      "1811 [D loss: 0.203232, acc.: 91.41%] [G loss: 3.066403]\n",
      "1812 [D loss: 0.197714, acc.: 91.80%] [G loss: 2.827002]\n",
      "1813 [D loss: 0.199932, acc.: 91.80%] [G loss: 3.066714]\n",
      "1814 [D loss: 0.194204, acc.: 92.19%] [G loss: 3.084887]\n",
      "1815 [D loss: 0.182114, acc.: 90.62%] [G loss: 3.093232]\n",
      "1816 [D loss: 0.171953, acc.: 93.36%] [G loss: 2.791331]\n",
      "1817 [D loss: 0.215001, acc.: 92.19%] [G loss: 3.156388]\n",
      "1818 [D loss: 0.187288, acc.: 91.80%] [G loss: 3.027142]\n",
      "1819 [D loss: 0.183273, acc.: 92.97%] [G loss: 3.120730]\n",
      "1820 [D loss: 0.192318, acc.: 92.19%] [G loss: 3.032218]\n",
      "1821 [D loss: 0.183039, acc.: 92.58%] [G loss: 2.867140]\n",
      "1822 [D loss: 0.200759, acc.: 91.02%] [G loss: 2.761122]\n",
      "1823 [D loss: 0.202434, acc.: 90.62%] [G loss: 3.042134]\n",
      "1824 [D loss: 0.202218, acc.: 91.02%] [G loss: 3.008665]\n",
      "1825 [D loss: 0.193420, acc.: 91.80%] [G loss: 2.861341]\n",
      "1826 [D loss: 0.170658, acc.: 91.41%] [G loss: 2.978463]\n",
      "1827 [D loss: 0.178934, acc.: 91.80%] [G loss: 2.946743]\n",
      "1828 [D loss: 0.165853, acc.: 92.19%] [G loss: 3.010827]\n",
      "1829 [D loss: 0.206023, acc.: 92.19%] [G loss: 2.940742]\n",
      "1830 [D loss: 0.203749, acc.: 90.62%] [G loss: 3.070301]\n",
      "1831 [D loss: 0.208979, acc.: 92.19%] [G loss: 2.840893]\n",
      "1832 [D loss: 0.181892, acc.: 93.36%] [G loss: 3.030101]\n",
      "1833 [D loss: 0.191078, acc.: 92.19%] [G loss: 2.743224]\n",
      "1834 [D loss: 0.194796, acc.: 92.58%] [G loss: 2.754530]\n",
      "1835 [D loss: 0.171362, acc.: 93.75%] [G loss: 3.133007]\n",
      "1836 [D loss: 0.182194, acc.: 93.75%] [G loss: 3.008260]\n",
      "1837 [D loss: 0.180157, acc.: 92.19%] [G loss: 2.799803]\n",
      "1838 [D loss: 0.212566, acc.: 90.23%] [G loss: 3.020650]\n",
      "1839 [D loss: 0.207818, acc.: 91.41%] [G loss: 2.958987]\n",
      "1840 [D loss: 0.222840, acc.: 91.80%] [G loss: 3.081196]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "1841 [D loss: 0.189747, acc.: 93.75%] [G loss: 2.853798]\n",
      "1842 [D loss: 0.207439, acc.: 92.19%] [G loss: 3.008190]\n",
      "1843 [D loss: 0.194974, acc.: 91.80%] [G loss: 3.153977]\n",
      "1844 [D loss: 0.209135, acc.: 89.84%] [G loss: 3.057695]\n",
      "1845 [D loss: 0.211006, acc.: 90.23%] [G loss: 2.835890]\n",
      "1846 [D loss: 0.180431, acc.: 92.58%] [G loss: 2.766136]\n",
      "1847 [D loss: 0.201971, acc.: 91.80%] [G loss: 2.919172]\n",
      "1848 [D loss: 0.201861, acc.: 91.02%] [G loss: 2.802912]\n",
      "1849 [D loss: 0.176192, acc.: 92.19%] [G loss: 3.080324]\n",
      "1850 [D loss: 0.201444, acc.: 92.97%] [G loss: 3.104690]\n",
      "1851 [D loss: 0.187987, acc.: 91.41%] [G loss: 3.013953]\n",
      "1852 [D loss: 0.197037, acc.: 92.19%] [G loss: 2.852689]\n",
      "1853 [D loss: 0.187015, acc.: 92.19%] [G loss: 3.024547]\n",
      "1854 [D loss: 0.198326, acc.: 92.19%] [G loss: 2.988179]\n",
      "1855 [D loss: 0.193929, acc.: 91.02%] [G loss: 2.948438]\n",
      "1856 [D loss: 0.211826, acc.: 92.58%] [G loss: 3.030147]\n",
      "1857 [D loss: 0.193935, acc.: 92.19%] [G loss: 3.043049]\n",
      "1858 [D loss: 0.185152, acc.: 91.41%] [G loss: 3.005577]\n",
      "1859 [D loss: 0.202956, acc.: 91.80%] [G loss: 2.793350]\n",
      "1860 [D loss: 0.161713, acc.: 93.36%] [G loss: 3.212604]\n",
      "1861 [D loss: 0.217834, acc.: 91.02%] [G loss: 2.829255]\n",
      "1862 [D loss: 0.175406, acc.: 92.58%] [G loss: 2.917650]\n",
      "1863 [D loss: 0.199430, acc.: 92.19%] [G loss: 3.105930]\n",
      "1864 [D loss: 0.184705, acc.: 92.97%] [G loss: 2.855648]\n",
      "1865 [D loss: 0.184613, acc.: 91.02%] [G loss: 2.718095]\n",
      "1866 [D loss: 0.196306, acc.: 92.19%] [G loss: 2.971350]\n",
      "1867 [D loss: 0.203292, acc.: 91.02%] [G loss: 3.110476]\n",
      "1868 [D loss: 0.175832, acc.: 92.97%] [G loss: 2.984920]\n",
      "1869 [D loss: 0.194524, acc.: 92.19%] [G loss: 2.935209]\n",
      "1870 [D loss: 0.202178, acc.: 90.23%] [G loss: 2.952250]\n",
      "1871 [D loss: 0.183212, acc.: 91.80%] [G loss: 3.041865]\n",
      "1872 [D loss: 0.197619, acc.: 92.19%] [G loss: 2.990449]\n",
      "1873 [D loss: 0.184904, acc.: 92.19%] [G loss: 3.139047]\n",
      "1874 [D loss: 0.195123, acc.: 92.19%] [G loss: 2.795560]\n",
      "1875 [D loss: 0.221202, acc.: 91.41%] [G loss: 2.728813]\n",
      "1876 [D loss: 0.191101, acc.: 92.19%] [G loss: 3.256458]\n",
      "1877 [D loss: 0.177251, acc.: 93.36%] [G loss: 3.106522]\n",
      "1878 [D loss: 0.206053, acc.: 91.41%] [G loss: 2.794040]\n",
      "1879 [D loss: 0.189489, acc.: 93.75%] [G loss: 3.030794]\n",
      "1880 [D loss: 0.203428, acc.: 93.36%] [G loss: 2.875513]\n",
      "1881 [D loss: 0.192652, acc.: 92.19%] [G loss: 3.022687]\n",
      "1882 [D loss: 0.171443, acc.: 92.58%] [G loss: 3.002761]\n",
      "1883 [D loss: 0.184171, acc.: 91.41%] [G loss: 3.110669]\n",
      "1884 [D loss: 0.201771, acc.: 91.02%] [G loss: 3.111189]\n",
      "1885 [D loss: 0.178954, acc.: 92.58%] [G loss: 2.935022]\n",
      "1886 [D loss: 0.204570, acc.: 91.41%] [G loss: 3.075171]\n",
      "1887 [D loss: 0.181662, acc.: 92.58%] [G loss: 3.047594]\n",
      "1888 [D loss: 0.181078, acc.: 92.58%] [G loss: 3.001500]\n",
      "1889 [D loss: 0.199108, acc.: 91.41%] [G loss: 3.101579]\n",
      "1890 [D loss: 0.186630, acc.: 92.58%] [G loss: 3.023467]\n",
      "1891 [D loss: 0.184575, acc.: 92.19%] [G loss: 3.054045]\n",
      "1892 [D loss: 0.172921, acc.: 92.19%] [G loss: 3.090286]\n",
      "1893 [D loss: 0.217804, acc.: 91.41%] [G loss: 3.153611]\n",
      "1894 [D loss: 0.179397, acc.: 91.41%] [G loss: 3.175010]\n",
      "1895 [D loss: 0.199844, acc.: 91.41%] [G loss: 2.997302]\n",
      "1896 [D loss: 0.176272, acc.: 91.41%] [G loss: 3.011050]\n",
      "1897 [D loss: 0.195098, acc.: 92.19%] [G loss: 2.837945]\n",
      "1898 [D loss: 0.226800, acc.: 89.45%] [G loss: 2.755707]\n",
      "1899 [D loss: 0.205963, acc.: 91.02%] [G loss: 3.173236]\n",
      "1900 [D loss: 0.203972, acc.: 91.02%] [G loss: 3.090719]\n",
      "WARNING:tensorflow:Model was constructed with shape Tensor(\"input_7:0\", shape=(128, 32), dtype=float32) for input (128, 32), but it was re-called on a Tensor with incompatible shape (432, 32).\n",
      "generated_data\n",
      "1901 [D loss: 0.215156, acc.: 89.84%] [G loss: 2.876858]\n",
      "1902 [D loss: 0.187941, acc.: 92.58%] [G loss: 3.295218]\n",
      "1903 [D loss: 0.211361, acc.: 91.41%] [G loss: 2.856800]\n",
      "1904 [D loss: 0.186419, acc.: 92.19%] [G loss: 2.952243]\n",
      "1905 [D loss: 0.204455, acc.: 91.80%] [G loss: 2.946643]\n",
      "1906 [D loss: 0.192490, acc.: 92.58%] [G loss: 3.085817]\n",
      "1907 [D loss: 0.218568, acc.: 90.23%] [G loss: 3.025311]\n",
      "1908 [D loss: 0.202824, acc.: 91.41%] [G loss: 3.022258]\n",
      "1909 [D loss: 0.204112, acc.: 92.58%] [G loss: 2.844719]\n",
      "1910 [D loss: 0.192509, acc.: 91.41%] [G loss: 3.097934]\n",
      "1911 [D loss: 0.212295, acc.: 90.62%] [G loss: 3.059549]\n",
      "1912 [D loss: 0.180546, acc.: 92.58%] [G loss: 2.922469]\n",
      "1913 [D loss: 0.190448, acc.: 91.80%] [G loss: 3.034439]\n",
      "1914 [D loss: 0.203613, acc.: 89.84%] [G loss: 3.220822]\n",
      "1915 [D loss: 0.196323, acc.: 91.02%] [G loss: 2.871717]\n",
      "1916 [D loss: 0.190499, acc.: 92.19%] [G loss: 2.705875]\n",
      "1917 [D loss: 0.239124, acc.: 89.06%] [G loss: 3.362924]\n",
      "1918 [D loss: 0.208553, acc.: 89.06%] [G loss: 2.947755]\n",
      "1919 [D loss: 0.185449, acc.: 91.02%] [G loss: 2.976640]\n",
      "1920 [D loss: 0.186178, acc.: 92.97%] [G loss: 2.986816]\n",
      "1921 [D loss: 0.183426, acc.: 91.80%] [G loss: 3.236490]\n",
      "1922 [D loss: 0.183100, acc.: 92.19%] [G loss: 2.961261]\n",
      "1923 [D loss: 0.205945, acc.: 91.80%] [G loss: 2.994947]\n",
      "1924 [D loss: 0.186726, acc.: 92.97%] [G loss: 3.086448]\n",
      "1925 [D loss: 0.184064, acc.: 92.19%] [G loss: 2.959475]\n",
      "1926 [D loss: 0.218707, acc.: 91.41%] [G loss: 3.098199]\n",
      "1927 [D loss: 0.181553, acc.: 91.80%] [G loss: 3.133501]\n",
      "1928 [D loss: 0.210919, acc.: 91.41%] [G loss: 3.255743]\n",
      "1929 [D loss: 0.190679, acc.: 92.58%] [G loss: 3.106027]\n",
      "1930 [D loss: 0.160489, acc.: 94.14%] [G loss: 3.028472]\n",
      "1931 [D loss: 0.214957, acc.: 91.02%] [G loss: 3.151386]\n",
      "1932 [D loss: 0.188043, acc.: 90.62%] [G loss: 3.176719]\n",
      "1933 [D loss: 0.208067, acc.: 91.02%] [G loss: 2.990281]\n",
      "1934 [D loss: 0.200240, acc.: 90.23%] [G loss: 3.065154]\n",
      "1935 [D loss: 0.204363, acc.: 92.19%] [G loss: 3.028367]\n",
      "1936 [D loss: 0.201502, acc.: 91.41%] [G loss: 2.998418]\n",
      "1937 [D loss: 0.196793, acc.: 92.58%] [G loss: 2.950148]\n",
      "1938 [D loss: 0.199757, acc.: 90.23%] [G loss: 3.111842]\n",
      "1939 [D loss: 0.181697, acc.: 92.58%] [G loss: 3.032258]\n",
      "1940 [D loss: 0.165794, acc.: 91.80%] [G loss: 2.926558]\n",
      "1941 [D loss: 0.194741, acc.: 92.58%] [G loss: 3.008071]\n",
      "1942 [D loss: 0.182733, acc.: 92.58%] [G loss: 3.333424]\n",
      "1943 [D loss: 0.199770, acc.: 90.62%] [G loss: 3.147209]\n",
      "1944 [D loss: 0.213529, acc.: 91.02%] [G loss: 2.965992]\n",
      "1945 [D loss: 0.176065, acc.: 92.19%] [G loss: 3.074842]\n",
      "1946 [D loss: 0.213426, acc.: 91.41%] [G loss: 2.948893]\n",
      "1947 [D loss: 0.213644, acc.: 92.58%] [G loss: 3.056797]\n",
      "1948 [D loss: 0.211502, acc.: 91.41%] [G loss: 3.163921]\n",
      "1949 [D loss: 0.199273, acc.: 91.80%] [G loss: 3.170249]\n",
      "1950 [D loss: 0.192062, acc.: 91.41%] [G loss: 3.222281]\n",
      "1951 [D loss: 0.204996, acc.: 90.62%] [G loss: 2.959589]\n",
      "1952 [D loss: 0.201844, acc.: 91.02%] [G loss: 3.035153]\n",
      "1953 [D loss: 0.181731, acc.: 93.36%] [G loss: 2.963029]\n",
      "1954 [D loss: 0.203020, acc.: 92.58%] [G loss: 3.027396]\n",
      "1955 [D loss: 0.165732, acc.: 92.97%] [G loss: 2.873567]\n",
      "1956 [D loss: 0.199858, acc.: 91.02%] [G loss: 3.109951]\n",
      "1957 [D loss: 0.177058, acc.: 92.58%] [G loss: 2.814670]\n",
      "1958 [D loss: 0.196573, acc.: 92.19%] [G loss: 3.149598]\n",
      "1959 [D loss: 0.173189, acc.: 92.19%] [G loss: 2.943340]\n",
      "1960 [D loss: 0.176061, acc.: 92.58%] [G loss: 2.956425]\n",
      "1961 [D loss: 0.182936, acc.: 91.41%] [G loss: 3.071502]\n",
      "1962 [D loss: 0.190446, acc.: 92.97%] [G loss: 3.259341]\n",
      "1963 [D loss: 0.193869, acc.: 91.80%] [G loss: 3.096234]\n",
      "1964 [D loss: 0.198023, acc.: 91.41%] [G loss: 3.082475]\n",
      "1965 [D loss: 0.223094, acc.: 89.45%] [G loss: 3.203937]\n",
      "1966 [D loss: 0.203343, acc.: 89.84%] [G loss: 2.914788]\n",
      "1967 [D loss: 0.181229, acc.: 93.75%] [G loss: 3.088303]\n",
      "1968 [D loss: 0.196592, acc.: 91.41%] [G loss: 2.916644]\n",
      "1969 [D loss: 0.190382, acc.: 92.19%] [G loss: 2.868629]\n",
      "1970 [D loss: 0.199637, acc.: 91.41%] [G loss: 3.158772]\n",
      "1971 [D loss: 0.216035, acc.: 91.41%] [G loss: 2.902545]\n",
      "1972 [D loss: 0.210865, acc.: 92.97%] [G loss: 3.133317]\n",
      "1973 [D loss: 0.188134, acc.: 92.19%] [G loss: 2.903071]\n",
      "1974 [D loss: 0.180545, acc.: 92.97%] [G loss: 2.981472]\n",
      "1975 [D loss: 0.217234, acc.: 90.23%] [G loss: 3.145254]\n",
      "1976 [D loss: 0.196467, acc.: 92.19%] [G loss: 2.960684]\n",
      "1977 [D loss: 0.173965, acc.: 92.97%] [G loss: 2.837327]\n",
      "1978 [D loss: 0.168418, acc.: 93.36%] [G loss: 2.924500]\n",
      "1979 [D loss: 0.178725, acc.: 91.80%] [G loss: 2.881402]\n",
      "1980 [D loss: 0.195543, acc.: 91.41%] [G loss: 3.016090]\n",
      "1981 [D loss: 0.188835, acc.: 91.41%] [G loss: 3.187693]\n",
      "1982 [D loss: 0.192676, acc.: 92.19%] [G loss: 3.125088]\n",
      "1983 [D loss: 0.181452, acc.: 93.75%] [G loss: 3.263379]\n",
      "1984 [D loss: 0.178879, acc.: 93.36%] [G loss: 3.010940]\n",
      "1985 [D loss: 0.180819, acc.: 92.58%] [G loss: 3.070634]\n",
      "1986 [D loss: 0.168928, acc.: 92.97%] [G loss: 3.032884]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "1987 [D loss: 0.210549, acc.: 91.41%] [G loss: 3.126989]\n",
      "1988 [D loss: 0.160501, acc.: 93.36%] [G loss: 3.078322]\n",
      "1989 [D loss: 0.213617, acc.: 88.67%] [G loss: 3.238759]\n",
      "1990 [D loss: 0.162518, acc.: 93.36%] [G loss: 3.134805]\n",
      "1991 [D loss: 0.187201, acc.: 93.36%] [G loss: 2.785651]\n",
      "1992 [D loss: 0.193394, acc.: 90.62%] [G loss: 2.877316]\n",
      "1993 [D loss: 0.191022, acc.: 91.80%] [G loss: 3.002200]\n",
      "1994 [D loss: 0.212963, acc.: 91.02%] [G loss: 3.145579]\n",
      "1995 [D loss: 0.201705, acc.: 92.19%] [G loss: 3.064188]\n",
      "1996 [D loss: 0.212708, acc.: 91.41%] [G loss: 2.911861]\n",
      "1997 [D loss: 0.190248, acc.: 92.19%] [G loss: 3.244368]\n",
      "1998 [D loss: 0.193675, acc.: 91.41%] [G loss: 3.091109]\n",
      "1999 [D loss: 0.201106, acc.: 91.41%] [G loss: 2.849186]\n",
      "2000 [D loss: 0.206236, acc.: 91.80%] [G loss: 3.128824]\n",
      "WARNING:tensorflow:Model was constructed with shape Tensor(\"input_7:0\", shape=(128, 32), dtype=float32) for input (128, 32), but it was re-called on a Tensor with incompatible shape (432, 32).\n",
      "generated_data\n",
      "2001 [D loss: 0.208201, acc.: 90.23%] [G loss: 3.377672]\n",
      "2002 [D loss: 0.187648, acc.: 91.41%] [G loss: 3.039101]\n",
      "2003 [D loss: 0.179854, acc.: 93.75%] [G loss: 2.772810]\n",
      "2004 [D loss: 0.192040, acc.: 92.97%] [G loss: 3.271379]\n",
      "2005 [D loss: 0.171842, acc.: 93.36%] [G loss: 3.224533]\n",
      "2006 [D loss: 0.205890, acc.: 90.62%] [G loss: 3.012692]\n",
      "2007 [D loss: 0.202247, acc.: 91.80%] [G loss: 3.077921]\n",
      "2008 [D loss: 0.235157, acc.: 88.28%] [G loss: 2.853350]\n",
      "2009 [D loss: 0.180334, acc.: 92.58%] [G loss: 3.100469]\n",
      "2010 [D loss: 0.201999, acc.: 92.19%] [G loss: 3.001088]\n",
      "2011 [D loss: 0.177854, acc.: 92.58%] [G loss: 3.021219]\n",
      "2012 [D loss: 0.228297, acc.: 91.02%] [G loss: 3.025017]\n",
      "2013 [D loss: 0.161846, acc.: 92.97%] [G loss: 3.378102]\n",
      "2014 [D loss: 0.203924, acc.: 91.41%] [G loss: 3.039093]\n",
      "2015 [D loss: 0.180224, acc.: 91.80%] [G loss: 3.122213]\n",
      "2016 [D loss: 0.210370, acc.: 91.41%] [G loss: 3.089503]\n",
      "2017 [D loss: 0.190592, acc.: 91.80%] [G loss: 3.145454]\n",
      "2018 [D loss: 0.192952, acc.: 92.58%] [G loss: 2.921624]\n",
      "2019 [D loss: 0.185726, acc.: 92.58%] [G loss: 2.947041]\n",
      "2020 [D loss: 0.196937, acc.: 91.80%] [G loss: 3.107229]\n",
      "2021 [D loss: 0.191834, acc.: 92.19%] [G loss: 3.131514]\n",
      "2022 [D loss: 0.219929, acc.: 92.19%] [G loss: 3.370173]\n",
      "2023 [D loss: 0.184024, acc.: 92.58%] [G loss: 3.032979]\n",
      "2024 [D loss: 0.185253, acc.: 91.41%] [G loss: 2.780249]\n",
      "2025 [D loss: 0.204794, acc.: 90.62%] [G loss: 3.119846]\n",
      "2026 [D loss: 0.172505, acc.: 93.36%] [G loss: 2.996351]\n",
      "2027 [D loss: 0.216620, acc.: 90.23%] [G loss: 3.496663]\n",
      "2028 [D loss: 0.180057, acc.: 92.97%] [G loss: 2.984364]\n",
      "2029 [D loss: 0.196723, acc.: 91.80%] [G loss: 3.067076]\n",
      "2030 [D loss: 0.182411, acc.: 92.19%] [G loss: 3.229803]\n",
      "2031 [D loss: 0.206184, acc.: 90.62%] [G loss: 3.272857]\n",
      "2032 [D loss: 0.237436, acc.: 89.06%] [G loss: 3.124816]\n",
      "2033 [D loss: 0.178282, acc.: 92.97%] [G loss: 3.121818]\n",
      "2034 [D loss: 0.186378, acc.: 92.19%] [G loss: 3.019002]\n",
      "2035 [D loss: 0.182311, acc.: 94.14%] [G loss: 3.133559]\n",
      "2036 [D loss: 0.160749, acc.: 92.97%] [G loss: 2.846884]\n",
      "2037 [D loss: 0.233829, acc.: 89.06%] [G loss: 3.434567]\n",
      "2038 [D loss: 0.181448, acc.: 91.41%] [G loss: 3.000851]\n",
      "2039 [D loss: 0.198947, acc.: 92.58%] [G loss: 3.010824]\n",
      "2040 [D loss: 0.197006, acc.: 92.19%] [G loss: 3.114713]\n",
      "2041 [D loss: 0.207551, acc.: 90.62%] [G loss: 3.136700]\n",
      "2042 [D loss: 0.209018, acc.: 91.41%] [G loss: 3.144531]\n",
      "2043 [D loss: 0.204041, acc.: 91.02%] [G loss: 2.871131]\n",
      "2044 [D loss: 0.207085, acc.: 92.19%] [G loss: 2.989472]\n",
      "2045 [D loss: 0.196780, acc.: 92.19%] [G loss: 3.410755]\n",
      "2046 [D loss: 0.193535, acc.: 91.41%] [G loss: 3.310699]\n",
      "2047 [D loss: 0.194939, acc.: 91.41%] [G loss: 2.914949]\n",
      "2048 [D loss: 0.212906, acc.: 91.02%] [G loss: 3.140490]\n",
      "2049 [D loss: 0.183258, acc.: 92.19%] [G loss: 3.273981]\n",
      "2050 [D loss: 0.183601, acc.: 91.80%] [G loss: 2.866466]\n",
      "2051 [D loss: 0.175561, acc.: 92.19%] [G loss: 3.122325]\n",
      "2052 [D loss: 0.197764, acc.: 92.19%] [G loss: 3.048217]\n",
      "2053 [D loss: 0.195237, acc.: 91.41%] [G loss: 2.857318]\n",
      "2054 [D loss: 0.180450, acc.: 92.19%] [G loss: 3.264625]\n",
      "2055 [D loss: 0.185025, acc.: 92.19%] [G loss: 3.043429]\n",
      "2056 [D loss: 0.175646, acc.: 91.80%] [G loss: 3.141884]\n",
      "2057 [D loss: 0.186053, acc.: 92.58%] [G loss: 2.910663]\n",
      "2058 [D loss: 0.198882, acc.: 92.58%] [G loss: 3.242686]\n",
      "2059 [D loss: 0.191513, acc.: 91.02%] [G loss: 3.026828]\n",
      "2060 [D loss: 0.189949, acc.: 90.62%] [G loss: 3.174901]\n",
      "2061 [D loss: 0.203423, acc.: 91.41%] [G loss: 2.972065]\n",
      "2062 [D loss: 0.178427, acc.: 92.19%] [G loss: 3.157908]\n",
      "2063 [D loss: 0.215500, acc.: 90.62%] [G loss: 3.198261]\n",
      "2064 [D loss: 0.237864, acc.: 89.06%] [G loss: 2.887190]\n",
      "2065 [D loss: 0.192304, acc.: 92.58%] [G loss: 3.247962]\n",
      "2066 [D loss: 0.195725, acc.: 92.58%] [G loss: 3.228820]\n",
      "2067 [D loss: 0.172680, acc.: 92.19%] [G loss: 3.081921]\n",
      "2068 [D loss: 0.171617, acc.: 92.19%] [G loss: 3.068509]\n",
      "2069 [D loss: 0.195721, acc.: 91.41%] [G loss: 3.061917]\n",
      "2070 [D loss: 0.219769, acc.: 89.84%] [G loss: 3.250153]\n",
      "2071 [D loss: 0.194708, acc.: 91.80%] [G loss: 3.262465]\n",
      "2072 [D loss: 0.190376, acc.: 92.19%] [G loss: 3.230400]\n",
      "2073 [D loss: 0.222024, acc.: 91.02%] [G loss: 3.132210]\n",
      "2074 [D loss: 0.194045, acc.: 91.41%] [G loss: 3.106173]\n",
      "2075 [D loss: 0.198521, acc.: 91.02%] [G loss: 3.236278]\n",
      "2076 [D loss: 0.192823, acc.: 92.19%] [G loss: 3.087624]\n",
      "2077 [D loss: 0.178368, acc.: 92.58%] [G loss: 3.203532]\n",
      "2078 [D loss: 0.166622, acc.: 92.19%] [G loss: 3.140111]\n",
      "2079 [D loss: 0.183028, acc.: 91.80%] [G loss: 3.011194]\n",
      "2080 [D loss: 0.201140, acc.: 92.97%] [G loss: 3.123571]\n",
      "2081 [D loss: 0.175977, acc.: 92.58%] [G loss: 3.098468]\n",
      "2082 [D loss: 0.201147, acc.: 90.23%] [G loss: 2.940989]\n",
      "2083 [D loss: 0.197046, acc.: 91.02%] [G loss: 3.056589]\n",
      "2084 [D loss: 0.198922, acc.: 92.19%] [G loss: 3.179646]\n",
      "2085 [D loss: 0.215218, acc.: 89.06%] [G loss: 3.046277]\n",
      "2086 [D loss: 0.195240, acc.: 92.58%] [G loss: 3.239556]\n",
      "2087 [D loss: 0.202297, acc.: 92.97%] [G loss: 3.072617]\n",
      "2088 [D loss: 0.178532, acc.: 93.75%] [G loss: 3.142671]\n",
      "2089 [D loss: 0.201294, acc.: 91.80%] [G loss: 2.985657]\n",
      "2090 [D loss: 0.229150, acc.: 89.06%] [G loss: 3.259336]\n",
      "2091 [D loss: 0.190928, acc.: 91.80%] [G loss: 3.477491]\n",
      "2092 [D loss: 0.197824, acc.: 92.58%] [G loss: 3.043920]\n",
      "2093 [D loss: 0.177118, acc.: 91.80%] [G loss: 3.082703]\n",
      "2094 [D loss: 0.202543, acc.: 92.19%] [G loss: 3.088039]\n",
      "2095 [D loss: 0.176465, acc.: 92.19%] [G loss: 3.231940]\n",
      "2096 [D loss: 0.212833, acc.: 92.19%] [G loss: 3.173385]\n",
      "2097 [D loss: 0.197175, acc.: 92.19%] [G loss: 3.067783]\n",
      "2098 [D loss: 0.175179, acc.: 93.75%] [G loss: 2.925654]\n",
      "2099 [D loss: 0.212857, acc.: 91.02%] [G loss: 3.176878]\n",
      "2100 [D loss: 0.178612, acc.: 91.80%] [G loss: 3.192662]\n",
      "WARNING:tensorflow:Model was constructed with shape Tensor(\"input_7:0\", shape=(128, 32), dtype=float32) for input (128, 32), but it was re-called on a Tensor with incompatible shape (432, 32).\n",
      "generated_data\n",
      "2101 [D loss: 0.218979, acc.: 89.06%] [G loss: 3.361041]\n",
      "2102 [D loss: 0.217310, acc.: 91.80%] [G loss: 3.224582]\n",
      "2103 [D loss: 0.198029, acc.: 92.19%] [G loss: 2.944400]\n",
      "2104 [D loss: 0.198143, acc.: 92.97%] [G loss: 3.301033]\n",
      "2105 [D loss: 0.207936, acc.: 93.36%] [G loss: 3.232316]\n",
      "2106 [D loss: 0.234391, acc.: 90.23%] [G loss: 3.104233]\n",
      "2107 [D loss: 0.203978, acc.: 90.23%] [G loss: 2.947426]\n",
      "2108 [D loss: 0.193082, acc.: 90.62%] [G loss: 3.093648]\n",
      "2109 [D loss: 0.203755, acc.: 91.02%] [G loss: 3.084358]\n",
      "2110 [D loss: 0.186868, acc.: 90.62%] [G loss: 2.934789]\n",
      "2111 [D loss: 0.183168, acc.: 91.41%] [G loss: 3.249407]\n",
      "2112 [D loss: 0.219912, acc.: 91.02%] [G loss: 3.155575]\n",
      "2113 [D loss: 0.198009, acc.: 91.80%] [G loss: 2.986441]\n",
      "2114 [D loss: 0.188263, acc.: 91.02%] [G loss: 3.205903]\n",
      "2115 [D loss: 0.189288, acc.: 92.58%] [G loss: 3.107296]\n",
      "2116 [D loss: 0.196530, acc.: 91.41%] [G loss: 3.204895]\n",
      "2117 [D loss: 0.196259, acc.: 91.80%] [G loss: 3.088056]\n",
      "2118 [D loss: 0.186302, acc.: 91.41%] [G loss: 2.990579]\n",
      "2119 [D loss: 0.195169, acc.: 91.02%] [G loss: 3.237096]\n",
      "2120 [D loss: 0.188034, acc.: 91.80%] [G loss: 3.092665]\n",
      "2121 [D loss: 0.178544, acc.: 91.80%] [G loss: 3.008281]\n",
      "2122 [D loss: 0.191180, acc.: 93.36%] [G loss: 3.224586]\n",
      "2123 [D loss: 0.207760, acc.: 91.41%] [G loss: 3.027956]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "2124 [D loss: 0.197918, acc.: 91.80%] [G loss: 3.043257]\n",
      "2125 [D loss: 0.194454, acc.: 90.23%] [G loss: 3.222708]\n",
      "2126 [D loss: 0.203874, acc.: 91.02%] [G loss: 3.039623]\n",
      "2127 [D loss: 0.200194, acc.: 92.58%] [G loss: 3.153836]\n",
      "2128 [D loss: 0.198920, acc.: 91.02%] [G loss: 2.893870]\n",
      "2129 [D loss: 0.188284, acc.: 92.97%] [G loss: 2.821083]\n",
      "2130 [D loss: 0.196251, acc.: 91.80%] [G loss: 2.856738]\n",
      "2131 [D loss: 0.200474, acc.: 91.80%] [G loss: 3.114586]\n",
      "2132 [D loss: 0.176685, acc.: 92.97%] [G loss: 3.392145]\n",
      "2133 [D loss: 0.182165, acc.: 92.58%] [G loss: 3.262442]\n",
      "2134 [D loss: 0.175386, acc.: 91.80%] [G loss: 2.952330]\n",
      "2135 [D loss: 0.158127, acc.: 93.36%] [G loss: 2.995521]\n",
      "2136 [D loss: 0.186656, acc.: 91.02%] [G loss: 3.377853]\n",
      "2137 [D loss: 0.183761, acc.: 90.62%] [G loss: 3.120375]\n",
      "2138 [D loss: 0.174474, acc.: 92.58%] [G loss: 2.958952]\n",
      "2139 [D loss: 0.213321, acc.: 89.84%] [G loss: 3.396533]\n",
      "2140 [D loss: 0.170337, acc.: 91.80%] [G loss: 3.221155]\n",
      "2141 [D loss: 0.190067, acc.: 91.80%] [G loss: 2.902348]\n",
      "2142 [D loss: 0.164794, acc.: 93.36%] [G loss: 3.117175]\n",
      "2143 [D loss: 0.215777, acc.: 90.62%] [G loss: 3.485407]\n",
      "2144 [D loss: 0.178815, acc.: 92.19%] [G loss: 3.029531]\n",
      "2145 [D loss: 0.182123, acc.: 92.58%] [G loss: 3.177808]\n",
      "2146 [D loss: 0.197784, acc.: 91.80%] [G loss: 3.283949]\n",
      "2147 [D loss: 0.203292, acc.: 91.02%] [G loss: 3.355076]\n",
      "2148 [D loss: 0.186136, acc.: 92.58%] [G loss: 3.305483]\n",
      "2149 [D loss: 0.192220, acc.: 91.80%] [G loss: 3.330420]\n",
      "2150 [D loss: 0.190994, acc.: 90.62%] [G loss: 2.967835]\n",
      "2151 [D loss: 0.171094, acc.: 92.58%] [G loss: 3.281223]\n",
      "2152 [D loss: 0.178229, acc.: 92.19%] [G loss: 3.228036]\n",
      "2153 [D loss: 0.179549, acc.: 92.97%] [G loss: 3.009978]\n",
      "2154 [D loss: 0.169127, acc.: 92.97%] [G loss: 3.084294]\n",
      "2155 [D loss: 0.210429, acc.: 90.62%] [G loss: 3.076926]\n",
      "2156 [D loss: 0.215961, acc.: 89.84%] [G loss: 3.203403]\n",
      "2157 [D loss: 0.207726, acc.: 91.41%] [G loss: 3.306826]\n",
      "2158 [D loss: 0.199456, acc.: 91.41%] [G loss: 2.943920]\n",
      "2159 [D loss: 0.220565, acc.: 90.62%] [G loss: 3.159227]\n",
      "2160 [D loss: 0.196288, acc.: 91.41%] [G loss: 3.387491]\n",
      "2161 [D loss: 0.199176, acc.: 91.80%] [G loss: 3.242362]\n",
      "2162 [D loss: 0.189002, acc.: 91.41%] [G loss: 3.238662]\n",
      "2163 [D loss: 0.196228, acc.: 91.41%] [G loss: 3.184088]\n",
      "2164 [D loss: 0.190281, acc.: 92.58%] [G loss: 3.013360]\n",
      "2165 [D loss: 0.213766, acc.: 91.41%] [G loss: 3.090172]\n",
      "2166 [D loss: 0.163396, acc.: 92.97%] [G loss: 3.222095]\n",
      "2167 [D loss: 0.168552, acc.: 92.97%] [G loss: 2.850496]\n",
      "2168 [D loss: 0.196045, acc.: 92.58%] [G loss: 3.113665]\n",
      "2169 [D loss: 0.161890, acc.: 91.80%] [G loss: 3.287662]\n",
      "2170 [D loss: 0.180434, acc.: 92.19%] [G loss: 3.211804]\n",
      "2171 [D loss: 0.191699, acc.: 92.19%] [G loss: 3.031798]\n",
      "2172 [D loss: 0.186406, acc.: 93.36%] [G loss: 3.053230]\n",
      "2173 [D loss: 0.225168, acc.: 90.23%] [G loss: 3.183739]\n",
      "2174 [D loss: 0.194251, acc.: 91.80%] [G loss: 3.352652]\n",
      "2175 [D loss: 0.197126, acc.: 91.02%] [G loss: 3.080625]\n",
      "2176 [D loss: 0.196357, acc.: 91.41%] [G loss: 3.175702]\n",
      "2177 [D loss: 0.193835, acc.: 91.41%] [G loss: 3.121435]\n",
      "2178 [D loss: 0.197827, acc.: 90.62%] [G loss: 3.094899]\n",
      "2179 [D loss: 0.184672, acc.: 92.19%] [G loss: 3.206423]\n",
      "2180 [D loss: 0.204550, acc.: 92.19%] [G loss: 3.076781]\n",
      "2181 [D loss: 0.195902, acc.: 90.23%] [G loss: 2.963645]\n",
      "2182 [D loss: 0.197188, acc.: 91.80%] [G loss: 3.284099]\n",
      "2183 [D loss: 0.197806, acc.: 91.41%] [G loss: 3.381826]\n",
      "2184 [D loss: 0.200306, acc.: 93.36%] [G loss: 3.104197]\n",
      "2185 [D loss: 0.201509, acc.: 91.41%] [G loss: 3.030801]\n",
      "2186 [D loss: 0.174486, acc.: 93.75%] [G loss: 3.135948]\n",
      "2187 [D loss: 0.220707, acc.: 91.02%] [G loss: 3.127687]\n",
      "2188 [D loss: 0.190717, acc.: 91.80%] [G loss: 3.120647]\n",
      "2189 [D loss: 0.201174, acc.: 92.19%] [G loss: 2.917560]\n",
      "2190 [D loss: 0.204424, acc.: 92.19%] [G loss: 3.034132]\n",
      "2191 [D loss: 0.180567, acc.: 92.58%] [G loss: 3.170830]\n",
      "2192 [D loss: 0.200401, acc.: 91.80%] [G loss: 3.168840]\n",
      "2193 [D loss: 0.198599, acc.: 92.19%] [G loss: 2.822898]\n",
      "2194 [D loss: 0.222708, acc.: 90.23%] [G loss: 3.021920]\n",
      "2195 [D loss: 0.203796, acc.: 91.80%] [G loss: 3.386737]\n",
      "2196 [D loss: 0.217275, acc.: 91.41%] [G loss: 3.152267]\n",
      "2197 [D loss: 0.222236, acc.: 89.84%] [G loss: 3.075341]\n",
      "2198 [D loss: 0.184795, acc.: 92.97%] [G loss: 3.277747]\n",
      "2199 [D loss: 0.200583, acc.: 91.80%] [G loss: 3.069163]\n",
      "2200 [D loss: 0.180680, acc.: 92.19%] [G loss: 2.945936]\n",
      "WARNING:tensorflow:Model was constructed with shape Tensor(\"input_7:0\", shape=(128, 32), dtype=float32) for input (128, 32), but it was re-called on a Tensor with incompatible shape (432, 32).\n",
      "generated_data\n",
      "2201 [D loss: 0.194283, acc.: 91.80%] [G loss: 2.862834]\n",
      "2202 [D loss: 0.183396, acc.: 91.41%] [G loss: 3.295082]\n",
      "2203 [D loss: 0.194354, acc.: 91.41%] [G loss: 3.249832]\n",
      "2204 [D loss: 0.188885, acc.: 91.80%] [G loss: 2.952321]\n",
      "2205 [D loss: 0.184003, acc.: 92.19%] [G loss: 3.250758]\n",
      "2206 [D loss: 0.196720, acc.: 91.02%] [G loss: 3.068593]\n",
      "2207 [D loss: 0.197039, acc.: 91.80%] [G loss: 3.189271]\n",
      "2208 [D loss: 0.200424, acc.: 91.80%] [G loss: 3.176687]\n",
      "2209 [D loss: 0.184023, acc.: 91.41%] [G loss: 2.898821]\n",
      "2210 [D loss: 0.174672, acc.: 94.14%] [G loss: 3.125731]\n",
      "2211 [D loss: 0.190894, acc.: 92.58%] [G loss: 3.223379]\n",
      "2212 [D loss: 0.182568, acc.: 93.75%] [G loss: 3.156161]\n",
      "2213 [D loss: 0.191797, acc.: 91.80%] [G loss: 3.088560]\n",
      "2214 [D loss: 0.179451, acc.: 91.80%] [G loss: 3.109355]\n",
      "2215 [D loss: 0.200676, acc.: 89.45%] [G loss: 3.215102]\n",
      "2216 [D loss: 0.175966, acc.: 92.97%] [G loss: 3.064156]\n",
      "2217 [D loss: 0.196325, acc.: 91.80%] [G loss: 3.159965]\n",
      "2218 [D loss: 0.193248, acc.: 93.75%] [G loss: 3.010600]\n",
      "2219 [D loss: 0.183250, acc.: 92.58%] [G loss: 3.185187]\n",
      "2220 [D loss: 0.172830, acc.: 92.97%] [G loss: 3.110631]\n",
      "2221 [D loss: 0.184331, acc.: 92.19%] [G loss: 3.163686]\n",
      "2222 [D loss: 0.189109, acc.: 91.80%] [G loss: 3.215954]\n",
      "2223 [D loss: 0.229809, acc.: 90.62%] [G loss: 2.980850]\n",
      "2224 [D loss: 0.205874, acc.: 90.62%] [G loss: 3.090801]\n",
      "2225 [D loss: 0.204333, acc.: 91.02%] [G loss: 3.259400]\n",
      "2226 [D loss: 0.205274, acc.: 92.19%] [G loss: 2.999718]\n",
      "2227 [D loss: 0.169613, acc.: 91.80%] [G loss: 3.005015]\n",
      "2228 [D loss: 0.185284, acc.: 92.19%] [G loss: 3.126019]\n",
      "2229 [D loss: 0.192828, acc.: 91.41%] [G loss: 3.119433]\n",
      "2230 [D loss: 0.161677, acc.: 92.19%] [G loss: 3.141761]\n",
      "2231 [D loss: 0.191056, acc.: 92.58%] [G loss: 3.167590]\n",
      "2232 [D loss: 0.180627, acc.: 92.97%] [G loss: 3.005247]\n",
      "2233 [D loss: 0.181863, acc.: 92.19%] [G loss: 3.241513]\n",
      "2234 [D loss: 0.186246, acc.: 92.97%] [G loss: 3.204135]\n",
      "2235 [D loss: 0.191709, acc.: 92.97%] [G loss: 3.002656]\n",
      "2236 [D loss: 0.178819, acc.: 92.19%] [G loss: 2.910232]\n",
      "2237 [D loss: 0.184641, acc.: 92.97%] [G loss: 2.889689]\n",
      "2238 [D loss: 0.175050, acc.: 92.97%] [G loss: 2.937637]\n",
      "2239 [D loss: 0.209068, acc.: 91.02%] [G loss: 3.416152]\n",
      "2240 [D loss: 0.184247, acc.: 92.19%] [G loss: 3.133597]\n",
      "2241 [D loss: 0.206800, acc.: 91.41%] [G loss: 2.985379]\n",
      "2242 [D loss: 0.192062, acc.: 91.41%] [G loss: 3.228860]\n",
      "2243 [D loss: 0.176255, acc.: 93.36%] [G loss: 3.288630]\n",
      "2244 [D loss: 0.192986, acc.: 91.41%] [G loss: 3.059481]\n",
      "2245 [D loss: 0.189638, acc.: 91.41%] [G loss: 3.165168]\n",
      "2246 [D loss: 0.210086, acc.: 91.41%] [G loss: 3.454610]\n",
      "2247 [D loss: 0.223109, acc.: 89.45%] [G loss: 3.292230]\n",
      "2248 [D loss: 0.169932, acc.: 92.19%] [G loss: 3.296207]\n",
      "2249 [D loss: 0.197707, acc.: 92.19%] [G loss: 3.107048]\n",
      "2250 [D loss: 0.217369, acc.: 91.02%] [G loss: 3.269829]\n",
      "2251 [D loss: 0.187405, acc.: 92.58%] [G loss: 3.199187]\n",
      "2252 [D loss: 0.184464, acc.: 92.58%] [G loss: 3.095134]\n",
      "2253 [D loss: 0.183084, acc.: 92.19%] [G loss: 3.039839]\n",
      "2254 [D loss: 0.192215, acc.: 91.80%] [G loss: 3.533529]\n",
      "2255 [D loss: 0.210616, acc.: 91.80%] [G loss: 3.269760]\n",
      "2256 [D loss: 0.196535, acc.: 91.80%] [G loss: 3.202157]\n",
      "2257 [D loss: 0.179273, acc.: 91.41%] [G loss: 3.097322]\n",
      "2258 [D loss: 0.195045, acc.: 91.80%] [G loss: 3.131461]\n",
      "2259 [D loss: 0.191115, acc.: 91.80%] [G loss: 3.599961]\n",
      "2260 [D loss: 0.175738, acc.: 92.19%] [G loss: 3.338455]\n",
      "2261 [D loss: 0.176451, acc.: 92.97%] [G loss: 3.233634]\n",
      "2262 [D loss: 0.178756, acc.: 91.41%] [G loss: 3.006928]\n",
      "2263 [D loss: 0.196603, acc.: 91.02%] [G loss: 3.454623]\n",
      "2264 [D loss: 0.196625, acc.: 90.62%] [G loss: 2.960166]\n",
      "2265 [D loss: 0.180973, acc.: 91.41%] [G loss: 3.436542]\n",
      "2266 [D loss: 0.186282, acc.: 92.19%] [G loss: 3.051934]\n",
      "2267 [D loss: 0.201022, acc.: 91.80%] [G loss: 3.300543]\n",
      "2268 [D loss: 0.196485, acc.: 92.58%] [G loss: 3.225361]\n",
      "2269 [D loss: 0.209700, acc.: 91.80%] [G loss: 3.048941]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "2270 [D loss: 0.187325, acc.: 91.80%] [G loss: 3.250750]\n",
      "2271 [D loss: 0.192734, acc.: 92.58%] [G loss: 3.016315]\n",
      "2272 [D loss: 0.190973, acc.: 91.80%] [G loss: 3.071504]\n",
      "2273 [D loss: 0.177351, acc.: 92.58%] [G loss: 3.314475]\n",
      "2274 [D loss: 0.179241, acc.: 92.97%] [G loss: 3.139202]\n",
      "2275 [D loss: 0.203743, acc.: 91.80%] [G loss: 3.456249]\n",
      "2276 [D loss: 0.197952, acc.: 91.02%] [G loss: 3.340337]\n",
      "2277 [D loss: 0.201577, acc.: 91.41%] [G loss: 3.003437]\n",
      "2278 [D loss: 0.196929, acc.: 92.19%] [G loss: 3.024688]\n",
      "2279 [D loss: 0.196168, acc.: 91.41%] [G loss: 2.934249]\n",
      "2280 [D loss: 0.203711, acc.: 91.80%] [G loss: 3.163511]\n",
      "2281 [D loss: 0.186844, acc.: 92.58%] [G loss: 3.020759]\n",
      "2282 [D loss: 0.208612, acc.: 92.19%] [G loss: 3.165546]\n",
      "2283 [D loss: 0.193310, acc.: 91.80%] [G loss: 2.981388]\n",
      "2284 [D loss: 0.189815, acc.: 91.80%] [G loss: 3.164985]\n",
      "2285 [D loss: 0.196238, acc.: 91.41%] [G loss: 3.131209]\n",
      "2286 [D loss: 0.203290, acc.: 92.19%] [G loss: 2.947752]\n",
      "2287 [D loss: 0.175248, acc.: 92.97%] [G loss: 3.156174]\n",
      "2288 [D loss: 0.186920, acc.: 92.19%] [G loss: 3.199376]\n",
      "2289 [D loss: 0.188538, acc.: 92.58%] [G loss: 3.236623]\n",
      "2290 [D loss: 0.188825, acc.: 92.19%] [G loss: 2.961577]\n",
      "2291 [D loss: 0.177156, acc.: 92.19%] [G loss: 3.162068]\n",
      "2292 [D loss: 0.186089, acc.: 91.41%] [G loss: 3.349805]\n",
      "2293 [D loss: 0.199131, acc.: 92.97%] [G loss: 3.318122]\n",
      "2294 [D loss: 0.192930, acc.: 92.19%] [G loss: 3.127300]\n",
      "2295 [D loss: 0.181933, acc.: 92.19%] [G loss: 3.144609]\n",
      "2296 [D loss: 0.193726, acc.: 92.97%] [G loss: 3.248223]\n",
      "2297 [D loss: 0.181543, acc.: 91.41%] [G loss: 3.295633]\n",
      "2298 [D loss: 0.214034, acc.: 92.19%] [G loss: 3.247710]\n",
      "2299 [D loss: 0.183386, acc.: 92.19%] [G loss: 3.340041]\n",
      "2300 [D loss: 0.214101, acc.: 91.41%] [G loss: 3.151182]\n",
      "WARNING:tensorflow:Model was constructed with shape Tensor(\"input_7:0\", shape=(128, 32), dtype=float32) for input (128, 32), but it was re-called on a Tensor with incompatible shape (432, 32).\n",
      "generated_data\n",
      "2301 [D loss: 0.202808, acc.: 91.41%] [G loss: 3.221156]\n",
      "2302 [D loss: 0.178473, acc.: 92.19%] [G loss: 3.187303]\n",
      "2303 [D loss: 0.171112, acc.: 93.36%] [G loss: 3.025460]\n",
      "2304 [D loss: 0.190479, acc.: 90.62%] [G loss: 3.137488]\n",
      "2305 [D loss: 0.175379, acc.: 92.58%] [G loss: 3.340159]\n",
      "2306 [D loss: 0.176959, acc.: 94.14%] [G loss: 3.354723]\n",
      "2307 [D loss: 0.214070, acc.: 91.80%] [G loss: 3.098750]\n",
      "2308 [D loss: 0.195604, acc.: 91.80%] [G loss: 2.951618]\n",
      "2309 [D loss: 0.208182, acc.: 90.23%] [G loss: 2.922018]\n",
      "2310 [D loss: 0.189294, acc.: 90.62%] [G loss: 3.214444]\n",
      "2311 [D loss: 0.179283, acc.: 91.41%] [G loss: 2.960454]\n",
      "2312 [D loss: 0.199103, acc.: 90.23%] [G loss: 3.237262]\n",
      "2313 [D loss: 0.192944, acc.: 92.97%] [G loss: 3.098822]\n",
      "2314 [D loss: 0.200083, acc.: 91.41%] [G loss: 3.153835]\n",
      "2315 [D loss: 0.190529, acc.: 92.19%] [G loss: 3.155188]\n",
      "2316 [D loss: 0.190509, acc.: 91.80%] [G loss: 2.975429]\n",
      "2317 [D loss: 0.194722, acc.: 91.02%] [G loss: 3.110139]\n",
      "2318 [D loss: 0.195874, acc.: 91.41%] [G loss: 3.296217]\n",
      "2319 [D loss: 0.183226, acc.: 91.02%] [G loss: 3.260878]\n",
      "2320 [D loss: 0.188924, acc.: 91.41%] [G loss: 3.030603]\n",
      "2321 [D loss: 0.188155, acc.: 92.19%] [G loss: 3.007299]\n",
      "2322 [D loss: 0.204562, acc.: 92.97%] [G loss: 3.268070]\n",
      "2323 [D loss: 0.181819, acc.: 93.36%] [G loss: 3.283695]\n",
      "2324 [D loss: 0.182122, acc.: 92.19%] [G loss: 3.049049]\n",
      "2325 [D loss: 0.198605, acc.: 91.80%] [G loss: 3.028753]\n",
      "2326 [D loss: 0.191835, acc.: 91.80%] [G loss: 3.030981]\n",
      "2327 [D loss: 0.190980, acc.: 93.36%] [G loss: 3.127777]\n",
      "2328 [D loss: 0.189758, acc.: 91.41%] [G loss: 3.220527]\n",
      "2329 [D loss: 0.193117, acc.: 92.19%] [G loss: 3.236091]\n",
      "2330 [D loss: 0.163046, acc.: 92.97%] [G loss: 3.066649]\n",
      "2331 [D loss: 0.199412, acc.: 91.41%] [G loss: 2.976174]\n",
      "2332 [D loss: 0.181137, acc.: 93.75%] [G loss: 3.225694]\n",
      "2333 [D loss: 0.207859, acc.: 92.58%] [G loss: 3.390578]\n",
      "2334 [D loss: 0.202842, acc.: 91.41%] [G loss: 3.141405]\n",
      "2335 [D loss: 0.186516, acc.: 89.84%] [G loss: 3.098552]\n",
      "2336 [D loss: 0.200944, acc.: 91.80%] [G loss: 3.152299]\n",
      "2337 [D loss: 0.196979, acc.: 91.80%] [G loss: 3.388985]\n",
      "2338 [D loss: 0.189382, acc.: 92.19%] [G loss: 3.139328]\n",
      "2339 [D loss: 0.222298, acc.: 91.41%] [G loss: 3.086412]\n",
      "2340 [D loss: 0.213806, acc.: 91.02%] [G loss: 3.171009]\n",
      "2341 [D loss: 0.195402, acc.: 92.97%] [G loss: 2.916506]\n",
      "2342 [D loss: 0.178961, acc.: 92.97%] [G loss: 3.034367]\n",
      "2343 [D loss: 0.194797, acc.: 91.41%] [G loss: 3.291585]\n",
      "2344 [D loss: 0.188642, acc.: 92.58%] [G loss: 2.998201]\n",
      "2345 [D loss: 0.171072, acc.: 92.58%] [G loss: 2.783849]\n",
      "2346 [D loss: 0.170504, acc.: 93.75%] [G loss: 3.174796]\n",
      "2347 [D loss: 0.187312, acc.: 91.80%] [G loss: 3.258285]\n",
      "2348 [D loss: 0.200393, acc.: 91.41%] [G loss: 3.100951]\n",
      "2349 [D loss: 0.186408, acc.: 91.80%] [G loss: 3.041697]\n",
      "2350 [D loss: 0.199957, acc.: 91.41%] [G loss: 3.073736]\n",
      "2351 [D loss: 0.217646, acc.: 91.02%] [G loss: 3.087010]\n",
      "2352 [D loss: 0.180231, acc.: 92.97%] [G loss: 3.221670]\n",
      "2353 [D loss: 0.181190, acc.: 92.97%] [G loss: 3.228129]\n",
      "2354 [D loss: 0.197484, acc.: 91.02%] [G loss: 3.054890]\n",
      "2355 [D loss: 0.178660, acc.: 92.19%] [G loss: 3.036177]\n",
      "2356 [D loss: 0.199446, acc.: 91.80%] [G loss: 3.083385]\n",
      "2357 [D loss: 0.190446, acc.: 92.97%] [G loss: 3.298022]\n",
      "2358 [D loss: 0.194995, acc.: 92.19%] [G loss: 3.004083]\n",
      "2359 [D loss: 0.169832, acc.: 92.97%] [G loss: 3.195044]\n",
      "2360 [D loss: 0.176207, acc.: 91.80%] [G loss: 3.201860]\n",
      "2361 [D loss: 0.195988, acc.: 92.19%] [G loss: 3.333222]\n",
      "2362 [D loss: 0.237873, acc.: 90.62%] [G loss: 3.239234]\n",
      "2363 [D loss: 0.221526, acc.: 89.84%] [G loss: 3.255772]\n",
      "2364 [D loss: 0.198228, acc.: 91.80%] [G loss: 3.495117]\n",
      "2365 [D loss: 0.209331, acc.: 91.02%] [G loss: 3.058162]\n",
      "2366 [D loss: 0.197299, acc.: 91.80%] [G loss: 3.082448]\n",
      "2367 [D loss: 0.211066, acc.: 91.41%] [G loss: 3.256758]\n",
      "2368 [D loss: 0.209323, acc.: 90.62%] [G loss: 3.530863]\n",
      "2369 [D loss: 0.196111, acc.: 90.62%] [G loss: 3.069888]\n",
      "2370 [D loss: 0.219115, acc.: 91.02%] [G loss: 3.219857]\n",
      "2371 [D loss: 0.194915, acc.: 91.80%] [G loss: 3.018081]\n",
      "2372 [D loss: 0.170697, acc.: 92.97%] [G loss: 3.355808]\n",
      "2373 [D loss: 0.187719, acc.: 91.80%] [G loss: 3.454669]\n",
      "2374 [D loss: 0.218020, acc.: 92.19%] [G loss: 3.315707]\n",
      "2375 [D loss: 0.187402, acc.: 92.58%] [G loss: 2.998635]\n",
      "2376 [D loss: 0.177218, acc.: 92.19%] [G loss: 3.219270]\n",
      "2377 [D loss: 0.197643, acc.: 92.19%] [G loss: 3.418705]\n",
      "2378 [D loss: 0.190756, acc.: 92.19%] [G loss: 3.316223]\n",
      "2379 [D loss: 0.189807, acc.: 91.80%] [G loss: 3.242919]\n",
      "2380 [D loss: 0.165379, acc.: 92.58%] [G loss: 3.109513]\n",
      "2381 [D loss: 0.170147, acc.: 92.19%] [G loss: 3.222212]\n",
      "2382 [D loss: 0.174675, acc.: 92.97%] [G loss: 2.969331]\n",
      "2383 [D loss: 0.188510, acc.: 92.19%] [G loss: 3.189906]\n",
      "2384 [D loss: 0.174340, acc.: 92.19%] [G loss: 2.870681]\n",
      "2385 [D loss: 0.177047, acc.: 91.80%] [G loss: 3.160559]\n",
      "2386 [D loss: 0.200233, acc.: 91.80%] [G loss: 3.438803]\n",
      "2387 [D loss: 0.206558, acc.: 91.41%] [G loss: 3.384660]\n",
      "2388 [D loss: 0.206406, acc.: 92.19%] [G loss: 3.445603]\n",
      "2389 [D loss: 0.198618, acc.: 92.19%] [G loss: 3.179575]\n",
      "2390 [D loss: 0.198908, acc.: 91.80%] [G loss: 3.110248]\n",
      "2391 [D loss: 0.199290, acc.: 91.80%] [G loss: 3.546575]\n",
      "2392 [D loss: 0.206404, acc.: 91.80%] [G loss: 3.271318]\n",
      "2393 [D loss: 0.178964, acc.: 91.80%] [G loss: 3.079636]\n",
      "2394 [D loss: 0.183327, acc.: 91.80%] [G loss: 3.050941]\n",
      "2395 [D loss: 0.192576, acc.: 92.58%] [G loss: 3.041805]\n",
      "2396 [D loss: 0.177172, acc.: 92.19%] [G loss: 3.077155]\n",
      "2397 [D loss: 0.205469, acc.: 92.19%] [G loss: 3.394518]\n",
      "2398 [D loss: 0.209054, acc.: 91.80%] [G loss: 3.260306]\n",
      "2399 [D loss: 0.214637, acc.: 89.84%] [G loss: 3.094970]\n",
      "2400 [D loss: 0.186227, acc.: 91.41%] [G loss: 3.006083]\n",
      "WARNING:tensorflow:Model was constructed with shape Tensor(\"input_7:0\", shape=(128, 32), dtype=float32) for input (128, 32), but it was re-called on a Tensor with incompatible shape (432, 32).\n",
      "generated_data\n",
      "2401 [D loss: 0.202234, acc.: 91.80%] [G loss: 2.881226]\n",
      "2402 [D loss: 0.205035, acc.: 91.41%] [G loss: 3.120836]\n",
      "2403 [D loss: 0.197426, acc.: 92.19%] [G loss: 3.300150]\n",
      "2404 [D loss: 0.175693, acc.: 92.19%] [G loss: 3.044015]\n",
      "2405 [D loss: 0.185204, acc.: 92.97%] [G loss: 3.041171]\n",
      "2406 [D loss: 0.177904, acc.: 92.97%] [G loss: 3.492015]\n",
      "2407 [D loss: 0.208230, acc.: 91.80%] [G loss: 3.101806]\n",
      "2408 [D loss: 0.194367, acc.: 91.02%] [G loss: 3.200511]\n",
      "2409 [D loss: 0.189625, acc.: 91.80%] [G loss: 3.093156]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "2410 [D loss: 0.202657, acc.: 91.80%] [G loss: 3.063717]\n",
      "2411 [D loss: 0.175495, acc.: 92.19%] [G loss: 3.209321]\n",
      "2412 [D loss: 0.192633, acc.: 91.80%] [G loss: 3.184520]\n",
      "2413 [D loss: 0.219703, acc.: 89.45%] [G loss: 3.179330]\n",
      "2414 [D loss: 0.184992, acc.: 92.19%] [G loss: 2.950513]\n",
      "2415 [D loss: 0.184093, acc.: 91.41%] [G loss: 3.147828]\n",
      "2416 [D loss: 0.207924, acc.: 91.41%] [G loss: 3.206968]\n",
      "2417 [D loss: 0.197750, acc.: 92.58%] [G loss: 3.290154]\n",
      "2418 [D loss: 0.205480, acc.: 91.80%] [G loss: 3.231485]\n",
      "2419 [D loss: 0.199448, acc.: 91.80%] [G loss: 3.158862]\n",
      "2420 [D loss: 0.181367, acc.: 91.41%] [G loss: 3.084131]\n",
      "2421 [D loss: 0.184952, acc.: 91.41%] [G loss: 2.916889]\n",
      "2422 [D loss: 0.183138, acc.: 92.58%] [G loss: 3.265976]\n",
      "2423 [D loss: 0.183921, acc.: 92.97%] [G loss: 3.102611]\n",
      "2424 [D loss: 0.193061, acc.: 91.41%] [G loss: 3.100873]\n",
      "2425 [D loss: 0.195606, acc.: 91.41%] [G loss: 3.339671]\n",
      "2426 [D loss: 0.221088, acc.: 90.23%] [G loss: 3.456846]\n",
      "2427 [D loss: 0.200642, acc.: 90.62%] [G loss: 3.297761]\n",
      "2428 [D loss: 0.198001, acc.: 91.41%] [G loss: 3.175005]\n",
      "2429 [D loss: 0.206380, acc.: 91.02%] [G loss: 3.334241]\n",
      "2430 [D loss: 0.182503, acc.: 92.19%] [G loss: 3.690062]\n",
      "2431 [D loss: 0.174063, acc.: 91.41%] [G loss: 3.234533]\n",
      "2432 [D loss: 0.191430, acc.: 92.97%] [G loss: 3.175753]\n",
      "2433 [D loss: 0.181463, acc.: 91.80%] [G loss: 3.111944]\n",
      "2434 [D loss: 0.222847, acc.: 91.80%] [G loss: 3.419977]\n",
      "2435 [D loss: 0.211512, acc.: 91.80%] [G loss: 3.205501]\n",
      "2436 [D loss: 0.181135, acc.: 91.80%] [G loss: 3.106955]\n",
      "2437 [D loss: 0.214679, acc.: 91.80%] [G loss: 3.080643]\n",
      "2438 [D loss: 0.192866, acc.: 92.97%] [G loss: 3.323388]\n",
      "2439 [D loss: 0.178959, acc.: 92.58%] [G loss: 3.272471]\n",
      "2440 [D loss: 0.202290, acc.: 92.58%] [G loss: 3.321634]\n",
      "2441 [D loss: 0.186603, acc.: 92.19%] [G loss: 3.003659]\n",
      "2442 [D loss: 0.191629, acc.: 92.58%] [G loss: 2.985467]\n",
      "2443 [D loss: 0.199733, acc.: 91.41%] [G loss: 3.234355]\n",
      "2444 [D loss: 0.188715, acc.: 91.80%] [G loss: 3.201985]\n",
      "2445 [D loss: 0.184881, acc.: 92.19%] [G loss: 3.383827]\n",
      "2446 [D loss: 0.208506, acc.: 91.80%] [G loss: 3.089681]\n",
      "2447 [D loss: 0.197073, acc.: 91.80%] [G loss: 2.888354]\n",
      "2448 [D loss: 0.191579, acc.: 91.41%] [G loss: 3.376206]\n",
      "2449 [D loss: 0.170119, acc.: 93.75%] [G loss: 3.169399]\n",
      "2450 [D loss: 0.188670, acc.: 93.36%] [G loss: 3.198001]\n",
      "2451 [D loss: 0.200154, acc.: 91.80%] [G loss: 3.107983]\n",
      "2452 [D loss: 0.197088, acc.: 92.19%] [G loss: 3.533169]\n",
      "2453 [D loss: 0.215809, acc.: 92.58%] [G loss: 3.226754]\n",
      "2454 [D loss: 0.195639, acc.: 91.41%] [G loss: 2.866937]\n",
      "2455 [D loss: 0.203247, acc.: 92.19%] [G loss: 2.884644]\n",
      "2456 [D loss: 0.191012, acc.: 92.19%] [G loss: 3.451173]\n",
      "2457 [D loss: 0.189187, acc.: 92.58%] [G loss: 3.117210]\n",
      "2458 [D loss: 0.186847, acc.: 91.80%] [G loss: 3.239274]\n",
      "2459 [D loss: 0.189520, acc.: 92.58%] [G loss: 2.959458]\n",
      "2460 [D loss: 0.197759, acc.: 91.80%] [G loss: 3.322969]\n",
      "2461 [D loss: 0.198546, acc.: 91.80%] [G loss: 3.127182]\n",
      "2462 [D loss: 0.201522, acc.: 91.41%] [G loss: 3.197614]\n",
      "2463 [D loss: 0.196166, acc.: 91.80%] [G loss: 3.334245]\n",
      "2464 [D loss: 0.202469, acc.: 92.19%] [G loss: 3.339660]\n",
      "2465 [D loss: 0.197478, acc.: 92.58%] [G loss: 3.059750]\n",
      "2466 [D loss: 0.192764, acc.: 92.97%] [G loss: 3.131432]\n",
      "2467 [D loss: 0.179021, acc.: 93.36%] [G loss: 2.926350]\n",
      "2468 [D loss: 0.190791, acc.: 93.75%] [G loss: 3.010793]\n",
      "2469 [D loss: 0.165160, acc.: 93.75%] [G loss: 3.122771]\n",
      "2470 [D loss: 0.197058, acc.: 92.19%] [G loss: 3.245099]\n",
      "2471 [D loss: 0.202826, acc.: 91.41%] [G loss: 3.061274]\n",
      "2472 [D loss: 0.193337, acc.: 91.80%] [G loss: 3.183187]\n",
      "2473 [D loss: 0.208588, acc.: 91.02%] [G loss: 3.119879]\n",
      "2474 [D loss: 0.193391, acc.: 91.02%] [G loss: 3.172119]\n",
      "2475 [D loss: 0.192654, acc.: 91.80%] [G loss: 3.069454]\n",
      "2476 [D loss: 0.187834, acc.: 92.58%] [G loss: 3.432321]\n",
      "2477 [D loss: 0.174578, acc.: 92.58%] [G loss: 3.217284]\n",
      "2478 [D loss: 0.175143, acc.: 91.41%] [G loss: 3.162780]\n",
      "2479 [D loss: 0.194072, acc.: 92.58%] [G loss: 2.861295]\n",
      "2480 [D loss: 0.199376, acc.: 90.62%] [G loss: 3.281009]\n",
      "2481 [D loss: 0.191755, acc.: 92.19%] [G loss: 3.038054]\n",
      "2482 [D loss: 0.178667, acc.: 92.19%] [G loss: 3.153180]\n",
      "2483 [D loss: 0.181773, acc.: 92.58%] [G loss: 2.928259]\n",
      "2484 [D loss: 0.212817, acc.: 90.62%] [G loss: 3.260396]\n",
      "2485 [D loss: 0.207769, acc.: 92.58%] [G loss: 2.870934]\n",
      "2486 [D loss: 0.164761, acc.: 92.97%] [G loss: 2.948606]\n",
      "2487 [D loss: 0.216767, acc.: 91.80%] [G loss: 3.191971]\n",
      "2488 [D loss: 0.206541, acc.: 91.80%] [G loss: 3.195937]\n",
      "2489 [D loss: 0.198889, acc.: 92.19%] [G loss: 3.042377]\n",
      "2490 [D loss: 0.199026, acc.: 92.19%] [G loss: 2.808755]\n",
      "2491 [D loss: 0.188710, acc.: 92.19%] [G loss: 2.907332]\n",
      "2492 [D loss: 0.208509, acc.: 91.80%] [G loss: 3.355147]\n",
      "2493 [D loss: 0.200011, acc.: 91.41%] [G loss: 3.255692]\n",
      "2494 [D loss: 0.197202, acc.: 92.19%] [G loss: 3.083410]\n",
      "2495 [D loss: 0.226751, acc.: 89.45%] [G loss: 3.045581]\n",
      "2496 [D loss: 0.217735, acc.: 91.02%] [G loss: 3.262699]\n",
      "2497 [D loss: 0.190468, acc.: 91.80%] [G loss: 3.211440]\n",
      "2498 [D loss: 0.216649, acc.: 90.23%] [G loss: 3.248394]\n",
      "2499 [D loss: 0.202587, acc.: 91.80%] [G loss: 2.902044]\n",
      "2500 [D loss: 0.195918, acc.: 91.02%] [G loss: 2.924814]\n",
      "WARNING:tensorflow:Model was constructed with shape Tensor(\"input_7:0\", shape=(128, 32), dtype=float32) for input (128, 32), but it was re-called on a Tensor with incompatible shape (432, 32).\n",
      "generated_data\n",
      "2501 [D loss: 0.215501, acc.: 91.41%] [G loss: 3.273256]\n",
      "2502 [D loss: 0.198815, acc.: 91.80%] [G loss: 3.188718]\n",
      "2503 [D loss: 0.212184, acc.: 91.02%] [G loss: 2.875267]\n",
      "2504 [D loss: 0.191153, acc.: 92.19%] [G loss: 3.152954]\n",
      "2505 [D loss: 0.202526, acc.: 92.19%] [G loss: 3.098453]\n",
      "2506 [D loss: 0.182942, acc.: 92.19%] [G loss: 3.234119]\n",
      "2507 [D loss: 0.171468, acc.: 91.41%] [G loss: 3.184060]\n",
      "2508 [D loss: 0.188733, acc.: 91.80%] [G loss: 3.096459]\n",
      "2509 [D loss: 0.181159, acc.: 92.97%] [G loss: 3.106814]\n",
      "2510 [D loss: 0.198943, acc.: 91.41%] [G loss: 3.153831]\n",
      "2511 [D loss: 0.159193, acc.: 93.75%] [G loss: 3.281023]\n",
      "2512 [D loss: 0.182069, acc.: 92.58%] [G loss: 3.265963]\n",
      "2513 [D loss: 0.192527, acc.: 92.58%] [G loss: 3.221870]\n",
      "2514 [D loss: 0.226526, acc.: 91.41%] [G loss: 3.135427]\n",
      "2515 [D loss: 0.181221, acc.: 92.19%] [G loss: 2.959568]\n",
      "2516 [D loss: 0.214607, acc.: 91.02%] [G loss: 3.341811]\n",
      "2517 [D loss: 0.197799, acc.: 91.41%] [G loss: 3.078279]\n",
      "2518 [D loss: 0.193608, acc.: 91.80%] [G loss: 2.842383]\n",
      "2519 [D loss: 0.205171, acc.: 91.80%] [G loss: 3.208854]\n",
      "2520 [D loss: 0.221699, acc.: 91.80%] [G loss: 3.372734]\n",
      "2521 [D loss: 0.194709, acc.: 92.19%] [G loss: 3.141017]\n",
      "2522 [D loss: 0.212497, acc.: 91.41%] [G loss: 3.264583]\n",
      "2523 [D loss: 0.207597, acc.: 91.80%] [G loss: 3.368585]\n",
      "2524 [D loss: 0.205669, acc.: 91.80%] [G loss: 3.165182]\n",
      "2525 [D loss: 0.219241, acc.: 92.58%] [G loss: 3.022879]\n",
      "2526 [D loss: 0.186338, acc.: 92.19%] [G loss: 2.853246]\n",
      "2527 [D loss: 0.213264, acc.: 91.80%] [G loss: 3.151305]\n",
      "2528 [D loss: 0.194830, acc.: 91.80%] [G loss: 3.265090]\n",
      "2529 [D loss: 0.202548, acc.: 91.80%] [G loss: 2.985740]\n",
      "2530 [D loss: 0.218817, acc.: 92.58%] [G loss: 3.204508]\n",
      "2531 [D loss: 0.200509, acc.: 92.19%] [G loss: 3.026919]\n",
      "2532 [D loss: 0.231959, acc.: 91.02%] [G loss: 2.871179]\n",
      "2533 [D loss: 0.199940, acc.: 91.41%] [G loss: 2.926511]\n",
      "2534 [D loss: 0.204981, acc.: 91.41%] [G loss: 3.087913]\n",
      "2535 [D loss: 0.188684, acc.: 92.19%] [G loss: 3.295497]\n",
      "2536 [D loss: 0.207222, acc.: 91.80%] [G loss: 3.105910]\n",
      "2537 [D loss: 0.208839, acc.: 90.62%] [G loss: 3.069114]\n",
      "2538 [D loss: 0.207562, acc.: 92.19%] [G loss: 2.884336]\n",
      "2539 [D loss: 0.182059, acc.: 92.97%] [G loss: 3.071754]\n",
      "2540 [D loss: 0.205309, acc.: 92.19%] [G loss: 3.131810]\n",
      "2541 [D loss: 0.197499, acc.: 91.41%] [G loss: 3.003513]\n",
      "2542 [D loss: 0.187949, acc.: 91.80%] [G loss: 3.433621]\n",
      "2543 [D loss: 0.190741, acc.: 91.80%] [G loss: 3.268802]\n",
      "2544 [D loss: 0.195161, acc.: 91.02%] [G loss: 3.100844]\n",
      "2545 [D loss: 0.200841, acc.: 92.19%] [G loss: 2.954223]\n",
      "2546 [D loss: 0.191776, acc.: 92.58%] [G loss: 3.404402]\n",
      "2547 [D loss: 0.194645, acc.: 91.80%] [G loss: 3.318985]\n",
      "2548 [D loss: 0.207356, acc.: 91.41%] [G loss: 3.159480]\n",
      "2549 [D loss: 0.223476, acc.: 89.84%] [G loss: 3.112684]\n",
      "2550 [D loss: 0.170969, acc.: 92.97%] [G loss: 3.276582]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "2551 [D loss: 0.209426, acc.: 91.02%] [G loss: 3.497607]\n",
      "2552 [D loss: 0.202199, acc.: 91.41%] [G loss: 3.245438]\n",
      "2553 [D loss: 0.183383, acc.: 92.19%] [G loss: 3.081015]\n",
      "2554 [D loss: 0.203924, acc.: 92.19%] [G loss: 3.341625]\n",
      "2555 [D loss: 0.189574, acc.: 91.80%] [G loss: 3.270366]\n",
      "2556 [D loss: 0.196326, acc.: 91.80%] [G loss: 2.953084]\n",
      "2557 [D loss: 0.211559, acc.: 90.62%] [G loss: 3.103699]\n",
      "2558 [D loss: 0.210227, acc.: 91.41%] [G loss: 3.192655]\n",
      "2559 [D loss: 0.219575, acc.: 91.02%] [G loss: 3.104138]\n",
      "2560 [D loss: 0.203822, acc.: 91.02%] [G loss: 2.964474]\n",
      "2561 [D loss: 0.220300, acc.: 89.84%] [G loss: 3.252833]\n",
      "2562 [D loss: 0.165937, acc.: 92.58%] [G loss: 3.426154]\n",
      "2563 [D loss: 0.196299, acc.: 91.80%] [G loss: 3.048300]\n",
      "2564 [D loss: 0.191634, acc.: 91.80%] [G loss: 2.957625]\n",
      "2565 [D loss: 0.207995, acc.: 92.19%] [G loss: 3.043509]\n",
      "2566 [D loss: 0.194969, acc.: 91.02%] [G loss: 3.352868]\n",
      "2567 [D loss: 0.198833, acc.: 91.80%] [G loss: 3.118186]\n",
      "2568 [D loss: 0.200975, acc.: 91.41%] [G loss: 3.100512]\n",
      "2569 [D loss: 0.210598, acc.: 90.62%] [G loss: 2.904318]\n",
      "2570 [D loss: 0.189274, acc.: 92.19%] [G loss: 3.190434]\n",
      "2571 [D loss: 0.189419, acc.: 92.19%] [G loss: 2.943989]\n",
      "2572 [D loss: 0.195377, acc.: 92.97%] [G loss: 2.950149]\n",
      "2573 [D loss: 0.203535, acc.: 92.19%] [G loss: 3.055371]\n",
      "2574 [D loss: 0.189285, acc.: 92.19%] [G loss: 3.313822]\n",
      "2575 [D loss: 0.212294, acc.: 92.19%] [G loss: 3.232594]\n",
      "2576 [D loss: 0.200377, acc.: 92.58%] [G loss: 3.011918]\n",
      "2577 [D loss: 0.219834, acc.: 91.41%] [G loss: 3.413633]\n",
      "2578 [D loss: 0.203613, acc.: 91.02%] [G loss: 3.295485]\n",
      "2579 [D loss: 0.208572, acc.: 91.41%] [G loss: 3.191541]\n",
      "2580 [D loss: 0.197091, acc.: 92.58%] [G loss: 3.080598]\n",
      "2581 [D loss: 0.203302, acc.: 91.02%] [G loss: 3.272604]\n",
      "2582 [D loss: 0.211565, acc.: 90.23%] [G loss: 3.333205]\n",
      "2583 [D loss: 0.225321, acc.: 90.62%] [G loss: 3.147617]\n",
      "2584 [D loss: 0.206010, acc.: 91.80%] [G loss: 3.050905]\n",
      "2585 [D loss: 0.202527, acc.: 91.41%] [G loss: 3.092957]\n",
      "2586 [D loss: 0.206561, acc.: 91.80%] [G loss: 3.086343]\n",
      "2587 [D loss: 0.198062, acc.: 91.41%] [G loss: 3.476783]\n",
      "2588 [D loss: 0.198938, acc.: 90.62%] [G loss: 3.179781]\n",
      "2589 [D loss: 0.224243, acc.: 91.02%] [G loss: 3.343810]\n",
      "2590 [D loss: 0.207702, acc.: 91.02%] [G loss: 2.955513]\n",
      "2591 [D loss: 0.212323, acc.: 92.19%] [G loss: 3.161821]\n",
      "2592 [D loss: 0.201009, acc.: 91.80%] [G loss: 3.012963]\n",
      "2593 [D loss: 0.207639, acc.: 89.84%] [G loss: 3.321585]\n",
      "2594 [D loss: 0.205818, acc.: 91.80%] [G loss: 3.297851]\n",
      "2595 [D loss: 0.193496, acc.: 92.58%] [G loss: 3.316800]\n",
      "2596 [D loss: 0.202204, acc.: 90.62%] [G loss: 2.988139]\n",
      "2597 [D loss: 0.206041, acc.: 91.41%] [G loss: 3.346031]\n",
      "2598 [D loss: 0.198685, acc.: 91.80%] [G loss: 3.288394]\n",
      "2599 [D loss: 0.212392, acc.: 91.41%] [G loss: 3.098505]\n",
      "2600 [D loss: 0.218427, acc.: 91.80%] [G loss: 2.908962]\n",
      "WARNING:tensorflow:Model was constructed with shape Tensor(\"input_7:0\", shape=(128, 32), dtype=float32) for input (128, 32), but it was re-called on a Tensor with incompatible shape (432, 32).\n",
      "generated_data\n",
      "2601 [D loss: 0.177472, acc.: 91.80%] [G loss: 2.841449]\n",
      "2602 [D loss: 0.192885, acc.: 91.80%] [G loss: 3.016100]\n",
      "2603 [D loss: 0.200563, acc.: 92.58%] [G loss: 3.214754]\n",
      "2604 [D loss: 0.201795, acc.: 91.80%] [G loss: 3.049151]\n",
      "2605 [D loss: 0.220765, acc.: 90.62%] [G loss: 3.088634]\n",
      "2606 [D loss: 0.189095, acc.: 92.19%] [G loss: 2.640987]\n",
      "2607 [D loss: 0.199975, acc.: 91.41%] [G loss: 2.817945]\n",
      "2608 [D loss: 0.176435, acc.: 92.58%] [G loss: 2.997911]\n",
      "2609 [D loss: 0.200982, acc.: 92.19%] [G loss: 2.703688]\n",
      "2610 [D loss: 0.195076, acc.: 91.80%] [G loss: 3.151488]\n",
      "2611 [D loss: 0.211887, acc.: 92.19%] [G loss: 3.092947]\n",
      "2612 [D loss: 0.220258, acc.: 91.80%] [G loss: 2.979391]\n",
      "2613 [D loss: 0.195437, acc.: 92.19%] [G loss: 3.017945]\n",
      "2614 [D loss: 0.191696, acc.: 91.80%] [G loss: 3.358551]\n",
      "2615 [D loss: 0.210478, acc.: 91.41%] [G loss: 2.963512]\n",
      "2616 [D loss: 0.230302, acc.: 90.23%] [G loss: 3.048975]\n",
      "2617 [D loss: 0.205338, acc.: 92.19%] [G loss: 2.882071]\n",
      "2618 [D loss: 0.196395, acc.: 92.58%] [G loss: 3.329717]\n",
      "2619 [D loss: 0.192069, acc.: 91.41%] [G loss: 3.133219]\n",
      "2620 [D loss: 0.189700, acc.: 92.19%] [G loss: 2.996074]\n",
      "2621 [D loss: 0.196750, acc.: 91.80%] [G loss: 3.099055]\n",
      "2622 [D loss: 0.213875, acc.: 90.62%] [G loss: 3.159779]\n",
      "2623 [D loss: 0.200150, acc.: 91.41%] [G loss: 3.314397]\n",
      "2624 [D loss: 0.195421, acc.: 92.19%] [G loss: 3.144093]\n",
      "2625 [D loss: 0.191678, acc.: 92.97%] [G loss: 2.948560]\n",
      "2626 [D loss: 0.178541, acc.: 92.97%] [G loss: 3.298876]\n",
      "2627 [D loss: 0.209370, acc.: 91.41%] [G loss: 3.241537]\n",
      "2628 [D loss: 0.179757, acc.: 92.19%] [G loss: 3.089025]\n",
      "2629 [D loss: 0.206570, acc.: 92.19%] [G loss: 3.347917]\n",
      "2630 [D loss: 0.214797, acc.: 91.02%] [G loss: 3.291735]\n",
      "2631 [D loss: 0.214569, acc.: 91.02%] [G loss: 3.012210]\n",
      "2632 [D loss: 0.174052, acc.: 92.58%] [G loss: 3.340268]\n",
      "2633 [D loss: 0.205494, acc.: 91.80%] [G loss: 3.414973]\n",
      "2634 [D loss: 0.211676, acc.: 91.02%] [G loss: 3.103648]\n",
      "2635 [D loss: 0.202232, acc.: 92.19%] [G loss: 2.968362]\n",
      "2636 [D loss: 0.192364, acc.: 91.80%] [G loss: 2.978784]\n",
      "2637 [D loss: 0.211144, acc.: 90.23%] [G loss: 2.989342]\n",
      "2638 [D loss: 0.191093, acc.: 91.80%] [G loss: 3.186161]\n",
      "2639 [D loss: 0.194231, acc.: 91.80%] [G loss: 3.007586]\n",
      "2640 [D loss: 0.188827, acc.: 92.58%] [G loss: 3.146921]\n",
      "2641 [D loss: 0.201976, acc.: 91.41%] [G loss: 2.900915]\n",
      "2642 [D loss: 0.206730, acc.: 91.41%] [G loss: 3.024253]\n",
      "2643 [D loss: 0.196524, acc.: 92.19%] [G loss: 3.212299]\n",
      "2644 [D loss: 0.191774, acc.: 92.19%] [G loss: 3.217610]\n",
      "2645 [D loss: 0.186232, acc.: 91.80%] [G loss: 3.119679]\n",
      "2646 [D loss: 0.183715, acc.: 91.41%] [G loss: 3.091710]\n",
      "2647 [D loss: 0.215747, acc.: 91.80%] [G loss: 3.439066]\n",
      "2648 [D loss: 0.187291, acc.: 92.19%] [G loss: 3.580698]\n",
      "2649 [D loss: 0.200811, acc.: 92.19%] [G loss: 2.985866]\n",
      "2650 [D loss: 0.185851, acc.: 92.19%] [G loss: 2.990995]\n",
      "2651 [D loss: 0.187805, acc.: 92.58%] [G loss: 2.952895]\n",
      "2652 [D loss: 0.214485, acc.: 92.19%] [G loss: 3.223082]\n",
      "2653 [D loss: 0.200457, acc.: 92.97%] [G loss: 3.486277]\n",
      "2654 [D loss: 0.194540, acc.: 91.80%] [G loss: 3.322071]\n",
      "2655 [D loss: 0.189160, acc.: 92.19%] [G loss: 3.224674]\n",
      "2656 [D loss: 0.222528, acc.: 91.41%] [G loss: 2.890807]\n",
      "2657 [D loss: 0.187768, acc.: 91.80%] [G loss: 3.081144]\n",
      "2658 [D loss: 0.218861, acc.: 90.62%] [G loss: 3.179339]\n",
      "2659 [D loss: 0.213209, acc.: 90.62%] [G loss: 3.077206]\n",
      "2660 [D loss: 0.188967, acc.: 92.19%] [G loss: 2.883076]\n",
      "2661 [D loss: 0.193557, acc.: 91.41%] [G loss: 2.918912]\n",
      "2662 [D loss: 0.217695, acc.: 91.02%] [G loss: 3.267662]\n",
      "2663 [D loss: 0.206696, acc.: 91.80%] [G loss: 3.180214]\n",
      "2664 [D loss: 0.184583, acc.: 93.36%] [G loss: 3.120657]\n",
      "2665 [D loss: 0.211436, acc.: 89.84%] [G loss: 3.272183]\n",
      "2666 [D loss: 0.201543, acc.: 91.80%] [G loss: 2.980271]\n",
      "2667 [D loss: 0.219708, acc.: 92.19%] [G loss: 3.112053]\n",
      "2668 [D loss: 0.203437, acc.: 91.41%] [G loss: 3.141561]\n",
      "2669 [D loss: 0.175011, acc.: 92.97%] [G loss: 3.018214]\n",
      "2670 [D loss: 0.189822, acc.: 91.80%] [G loss: 3.114131]\n",
      "2671 [D loss: 0.190480, acc.: 91.41%] [G loss: 3.019609]\n",
      "2672 [D loss: 0.205035, acc.: 90.62%] [G loss: 3.353447]\n",
      "2673 [D loss: 0.203759, acc.: 92.19%] [G loss: 2.840081]\n",
      "2674 [D loss: 0.194641, acc.: 92.58%] [G loss: 3.156424]\n",
      "2675 [D loss: 0.195145, acc.: 92.58%] [G loss: 3.177274]\n",
      "2676 [D loss: 0.197129, acc.: 90.62%] [G loss: 3.160689]\n",
      "2677 [D loss: 0.202612, acc.: 91.41%] [G loss: 3.524959]\n",
      "2678 [D loss: 0.197529, acc.: 91.80%] [G loss: 3.537153]\n",
      "2679 [D loss: 0.191213, acc.: 92.19%] [G loss: 3.078196]\n",
      "2680 [D loss: 0.199064, acc.: 91.41%] [G loss: 3.421585]\n",
      "2681 [D loss: 0.205582, acc.: 91.80%] [G loss: 2.743984]\n",
      "2682 [D loss: 0.191836, acc.: 92.19%] [G loss: 3.225466]\n",
      "2683 [D loss: 0.213757, acc.: 91.41%] [G loss: 3.422205]\n",
      "2684 [D loss: 0.199982, acc.: 91.41%] [G loss: 3.301776]\n",
      "2685 [D loss: 0.196631, acc.: 91.41%] [G loss: 3.420531]\n",
      "2686 [D loss: 0.202972, acc.: 92.19%] [G loss: 3.086803]\n",
      "2687 [D loss: 0.195138, acc.: 92.19%] [G loss: 3.140709]\n",
      "2688 [D loss: 0.189280, acc.: 91.80%] [G loss: 2.806367]\n",
      "2689 [D loss: 0.170784, acc.: 92.97%] [G loss: 3.126627]\n",
      "2690 [D loss: 0.176740, acc.: 93.36%] [G loss: 2.947164]\n",
      "2691 [D loss: 0.206325, acc.: 91.41%] [G loss: 3.123900]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "2692 [D loss: 0.183564, acc.: 92.58%] [G loss: 3.339467]\n",
      "2693 [D loss: 0.196698, acc.: 91.41%] [G loss: 2.971672]\n",
      "2694 [D loss: 0.198949, acc.: 92.19%] [G loss: 3.106875]\n",
      "2695 [D loss: 0.206717, acc.: 92.19%] [G loss: 3.098854]\n",
      "2696 [D loss: 0.194169, acc.: 91.80%] [G loss: 3.122700]\n",
      "2697 [D loss: 0.200457, acc.: 92.19%] [G loss: 2.996216]\n",
      "2698 [D loss: 0.187584, acc.: 91.80%] [G loss: 3.212615]\n",
      "2699 [D loss: 0.187754, acc.: 91.02%] [G loss: 2.866210]\n",
      "2700 [D loss: 0.220381, acc.: 90.62%] [G loss: 3.291114]\n",
      "WARNING:tensorflow:Model was constructed with shape Tensor(\"input_7:0\", shape=(128, 32), dtype=float32) for input (128, 32), but it was re-called on a Tensor with incompatible shape (432, 32).\n",
      "generated_data\n",
      "2701 [D loss: 0.187226, acc.: 92.58%] [G loss: 3.471763]\n",
      "2702 [D loss: 0.183343, acc.: 91.80%] [G loss: 3.237209]\n",
      "2703 [D loss: 0.204024, acc.: 91.80%] [G loss: 2.978658]\n",
      "2704 [D loss: 0.193808, acc.: 91.80%] [G loss: 3.136049]\n",
      "2705 [D loss: 0.205059, acc.: 91.80%] [G loss: 2.980321]\n",
      "2706 [D loss: 0.246766, acc.: 91.02%] [G loss: 3.073398]\n",
      "2707 [D loss: 0.191343, acc.: 92.19%] [G loss: 2.946006]\n",
      "2708 [D loss: 0.227168, acc.: 89.84%] [G loss: 3.108296]\n",
      "2709 [D loss: 0.196212, acc.: 92.58%] [G loss: 3.121192]\n",
      "2710 [D loss: 0.210036, acc.: 92.58%] [G loss: 3.011711]\n",
      "2711 [D loss: 0.174515, acc.: 92.19%] [G loss: 3.199713]\n",
      "2712 [D loss: 0.226628, acc.: 91.80%] [G loss: 3.049682]\n",
      "2713 [D loss: 0.197123, acc.: 92.19%] [G loss: 3.166389]\n",
      "2714 [D loss: 0.202045, acc.: 91.41%] [G loss: 2.980513]\n",
      "2715 [D loss: 0.180804, acc.: 92.19%] [G loss: 2.843190]\n",
      "2716 [D loss: 0.188395, acc.: 93.36%] [G loss: 3.115120]\n",
      "2717 [D loss: 0.195843, acc.: 92.58%] [G loss: 2.984077]\n",
      "2718 [D loss: 0.178968, acc.: 92.97%] [G loss: 3.231728]\n",
      "2719 [D loss: 0.176687, acc.: 92.58%] [G loss: 2.877667]\n",
      "2720 [D loss: 0.208727, acc.: 91.80%] [G loss: 3.290338]\n",
      "2721 [D loss: 0.202079, acc.: 90.23%] [G loss: 3.462632]\n",
      "2722 [D loss: 0.186731, acc.: 92.97%] [G loss: 3.348814]\n",
      "2723 [D loss: 0.194157, acc.: 91.41%] [G loss: 3.126560]\n",
      "2724 [D loss: 0.176821, acc.: 93.36%] [G loss: 3.154726]\n",
      "2725 [D loss: 0.179870, acc.: 92.58%] [G loss: 3.431195]\n",
      "2726 [D loss: 0.200204, acc.: 92.19%] [G loss: 2.991511]\n",
      "2727 [D loss: 0.216964, acc.: 91.02%] [G loss: 2.998330]\n",
      "2728 [D loss: 0.198830, acc.: 91.41%] [G loss: 3.230288]\n",
      "2729 [D loss: 0.190698, acc.: 92.19%] [G loss: 3.181978]\n",
      "2730 [D loss: 0.175635, acc.: 92.97%] [G loss: 3.263385]\n",
      "2731 [D loss: 0.175237, acc.: 91.80%] [G loss: 3.205322]\n",
      "2732 [D loss: 0.192339, acc.: 92.58%] [G loss: 3.320219]\n",
      "2733 [D loss: 0.182178, acc.: 92.58%] [G loss: 3.107122]\n",
      "2734 [D loss: 0.180718, acc.: 92.97%] [G loss: 3.099254]\n",
      "2735 [D loss: 0.182831, acc.: 91.41%] [G loss: 2.984404]\n",
      "2736 [D loss: 0.193840, acc.: 92.19%] [G loss: 3.058979]\n",
      "2737 [D loss: 0.192941, acc.: 92.19%] [G loss: 3.276351]\n",
      "2738 [D loss: 0.198334, acc.: 92.58%] [G loss: 3.333691]\n",
      "2739 [D loss: 0.175929, acc.: 92.97%] [G loss: 3.136252]\n",
      "2740 [D loss: 0.176337, acc.: 91.80%] [G loss: 2.724141]\n",
      "2741 [D loss: 0.206197, acc.: 92.19%] [G loss: 3.174665]\n",
      "2742 [D loss: 0.231935, acc.: 90.62%] [G loss: 3.282462]\n",
      "2743 [D loss: 0.212748, acc.: 91.41%] [G loss: 3.281341]\n",
      "2744 [D loss: 0.191979, acc.: 92.58%] [G loss: 3.390211]\n",
      "2745 [D loss: 0.211532, acc.: 91.80%] [G loss: 2.819180]\n",
      "2746 [D loss: 0.175177, acc.: 91.80%] [G loss: 2.854132]\n",
      "2747 [D loss: 0.187069, acc.: 93.36%] [G loss: 2.759767]\n",
      "2748 [D loss: 0.219895, acc.: 90.23%] [G loss: 3.249165]\n",
      "2749 [D loss: 0.210271, acc.: 92.19%] [G loss: 2.997665]\n",
      "2750 [D loss: 0.197764, acc.: 91.80%] [G loss: 3.154076]\n",
      "2751 [D loss: 0.200073, acc.: 92.97%] [G loss: 3.203661]\n",
      "2752 [D loss: 0.202192, acc.: 92.58%] [G loss: 3.155603]\n",
      "2753 [D loss: 0.197776, acc.: 91.80%] [G loss: 2.993976]\n",
      "2754 [D loss: 0.220925, acc.: 91.02%] [G loss: 3.188241]\n",
      "2755 [D loss: 0.206014, acc.: 91.41%] [G loss: 3.343068]\n",
      "2756 [D loss: 0.203562, acc.: 91.41%] [G loss: 2.882113]\n",
      "2757 [D loss: 0.186479, acc.: 92.58%] [G loss: 3.158441]\n",
      "2758 [D loss: 0.209591, acc.: 92.58%] [G loss: 3.163777]\n",
      "2759 [D loss: 0.194470, acc.: 91.02%] [G loss: 3.077303]\n",
      "2760 [D loss: 0.178434, acc.: 92.19%] [G loss: 3.055920]\n",
      "2761 [D loss: 0.193492, acc.: 91.80%] [G loss: 3.218102]\n",
      "2762 [D loss: 0.212732, acc.: 92.58%] [G loss: 3.308686]\n",
      "2763 [D loss: 0.212678, acc.: 92.19%] [G loss: 3.195003]\n",
      "2764 [D loss: 0.186426, acc.: 92.19%] [G loss: 2.942188]\n",
      "2765 [D loss: 0.200196, acc.: 91.80%] [G loss: 3.253526]\n",
      "2766 [D loss: 0.186742, acc.: 92.58%] [G loss: 2.856579]\n",
      "2767 [D loss: 0.230058, acc.: 89.84%] [G loss: 3.000819]\n",
      "2768 [D loss: 0.177129, acc.: 92.58%] [G loss: 3.012698]\n",
      "2769 [D loss: 0.207228, acc.: 92.19%] [G loss: 2.998471]\n",
      "2770 [D loss: 0.193075, acc.: 91.41%] [G loss: 3.082929]\n",
      "2771 [D loss: 0.196762, acc.: 92.19%] [G loss: 3.213380]\n",
      "2772 [D loss: 0.229962, acc.: 91.41%] [G loss: 3.170138]\n",
      "2773 [D loss: 0.192702, acc.: 92.58%] [G loss: 3.112704]\n",
      "2774 [D loss: 0.210269, acc.: 92.19%] [G loss: 3.028889]\n",
      "2775 [D loss: 0.190809, acc.: 92.19%] [G loss: 3.168455]\n",
      "2776 [D loss: 0.221986, acc.: 91.02%] [G loss: 3.037665]\n",
      "2777 [D loss: 0.230902, acc.: 91.02%] [G loss: 3.143889]\n",
      "2778 [D loss: 0.200582, acc.: 92.58%] [G loss: 3.039050]\n",
      "2779 [D loss: 0.205012, acc.: 92.19%] [G loss: 3.158627]\n",
      "2780 [D loss: 0.215997, acc.: 91.02%] [G loss: 2.891519]\n",
      "2781 [D loss: 0.206225, acc.: 91.80%] [G loss: 3.171981]\n",
      "2782 [D loss: 0.207862, acc.: 91.41%] [G loss: 3.039727]\n",
      "2783 [D loss: 0.203000, acc.: 92.97%] [G loss: 3.210835]\n",
      "2784 [D loss: 0.220896, acc.: 91.41%] [G loss: 3.180539]\n",
      "2785 [D loss: 0.205646, acc.: 91.41%] [G loss: 3.110186]\n",
      "2786 [D loss: 0.205998, acc.: 92.19%] [G loss: 3.096049]\n",
      "2787 [D loss: 0.199072, acc.: 91.41%] [G loss: 2.992483]\n",
      "2788 [D loss: 0.229595, acc.: 89.84%] [G loss: 3.177026]\n",
      "2789 [D loss: 0.218794, acc.: 92.19%] [G loss: 3.473903]\n",
      "2790 [D loss: 0.199745, acc.: 91.80%] [G loss: 3.242838]\n",
      "2791 [D loss: 0.186468, acc.: 91.41%] [G loss: 3.366506]\n",
      "2792 [D loss: 0.197616, acc.: 91.80%] [G loss: 3.286633]\n",
      "2793 [D loss: 0.170243, acc.: 92.97%] [G loss: 2.983084]\n",
      "2794 [D loss: 0.191964, acc.: 92.58%] [G loss: 3.145942]\n",
      "2795 [D loss: 0.209386, acc.: 91.02%] [G loss: 3.268071]\n",
      "2796 [D loss: 0.206803, acc.: 91.80%] [G loss: 3.320187]\n",
      "2797 [D loss: 0.185958, acc.: 92.97%] [G loss: 3.203889]\n",
      "2798 [D loss: 0.185359, acc.: 92.97%] [G loss: 3.135279]\n",
      "2799 [D loss: 0.184595, acc.: 93.36%] [G loss: 2.913111]\n",
      "2800 [D loss: 0.215484, acc.: 91.80%] [G loss: 3.188277]\n",
      "WARNING:tensorflow:Model was constructed with shape Tensor(\"input_7:0\", shape=(128, 32), dtype=float32) for input (128, 32), but it was re-called on a Tensor with incompatible shape (432, 32).\n",
      "generated_data\n",
      "2801 [D loss: 0.225299, acc.: 91.02%] [G loss: 3.347689]\n",
      "2802 [D loss: 0.197258, acc.: 92.19%] [G loss: 3.226983]\n",
      "2803 [D loss: 0.217073, acc.: 92.58%] [G loss: 2.950405]\n",
      "2804 [D loss: 0.200737, acc.: 91.02%] [G loss: 2.767192]\n",
      "2805 [D loss: 0.207914, acc.: 91.80%] [G loss: 2.972013]\n",
      "2806 [D loss: 0.197954, acc.: 91.41%] [G loss: 3.446453]\n",
      "2807 [D loss: 0.195067, acc.: 91.80%] [G loss: 3.281496]\n",
      "2808 [D loss: 0.217828, acc.: 91.41%] [G loss: 2.938753]\n",
      "2809 [D loss: 0.209904, acc.: 91.80%] [G loss: 2.918459]\n",
      "2810 [D loss: 0.197597, acc.: 91.80%] [G loss: 3.475049]\n",
      "2811 [D loss: 0.178751, acc.: 92.58%] [G loss: 3.312705]\n",
      "2812 [D loss: 0.184064, acc.: 91.80%] [G loss: 2.984209]\n",
      "2813 [D loss: 0.199841, acc.: 91.80%] [G loss: 3.351465]\n",
      "2814 [D loss: 0.197159, acc.: 92.19%] [G loss: 3.084340]\n",
      "2815 [D loss: 0.199861, acc.: 91.80%] [G loss: 2.834820]\n",
      "2816 [D loss: 0.192887, acc.: 91.80%] [G loss: 2.992489]\n",
      "2817 [D loss: 0.223862, acc.: 91.02%] [G loss: 3.139903]\n",
      "2818 [D loss: 0.207532, acc.: 91.80%] [G loss: 3.435384]\n",
      "2819 [D loss: 0.197871, acc.: 92.58%] [G loss: 2.943244]\n",
      "2820 [D loss: 0.185899, acc.: 91.80%] [G loss: 3.114993]\n",
      "2821 [D loss: 0.192702, acc.: 92.97%] [G loss: 3.144472]\n",
      "2822 [D loss: 0.214552, acc.: 91.80%] [G loss: 3.250826]\n",
      "2823 [D loss: 0.215230, acc.: 91.02%] [G loss: 3.347907]\n",
      "2824 [D loss: 0.207634, acc.: 91.02%] [G loss: 2.914845]\n",
      "2825 [D loss: 0.201885, acc.: 91.41%] [G loss: 3.014530]\n",
      "2826 [D loss: 0.232939, acc.: 90.62%] [G loss: 2.838857]\n",
      "2827 [D loss: 0.214682, acc.: 89.45%] [G loss: 3.038782]\n",
      "2828 [D loss: 0.201959, acc.: 91.80%] [G loss: 3.207647]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "2829 [D loss: 0.197692, acc.: 92.58%] [G loss: 3.076239]\n",
      "2830 [D loss: 0.192673, acc.: 91.80%] [G loss: 2.757534]\n",
      "2831 [D loss: 0.199900, acc.: 92.58%] [G loss: 3.088606]\n",
      "2832 [D loss: 0.186691, acc.: 92.97%] [G loss: 3.122964]\n",
      "2833 [D loss: 0.207945, acc.: 92.97%] [G loss: 2.999573]\n",
      "2834 [D loss: 0.201760, acc.: 92.19%] [G loss: 2.897846]\n",
      "2835 [D loss: 0.210443, acc.: 90.62%] [G loss: 3.312086]\n",
      "2836 [D loss: 0.238401, acc.: 90.62%] [G loss: 3.116419]\n",
      "2837 [D loss: 0.219426, acc.: 91.02%] [G loss: 2.923131]\n",
      "2838 [D loss: 0.193112, acc.: 91.80%] [G loss: 2.840324]\n",
      "2839 [D loss: 0.202032, acc.: 91.80%] [G loss: 2.999614]\n",
      "2840 [D loss: 0.196579, acc.: 91.80%] [G loss: 3.092089]\n",
      "2841 [D loss: 0.219599, acc.: 91.41%] [G loss: 3.111595]\n",
      "2842 [D loss: 0.194792, acc.: 92.19%] [G loss: 3.028077]\n",
      "2843 [D loss: 0.184925, acc.: 92.19%] [G loss: 2.849447]\n",
      "2844 [D loss: 0.185514, acc.: 91.41%] [G loss: 3.010524]\n",
      "2845 [D loss: 0.201364, acc.: 92.19%] [G loss: 3.191613]\n",
      "2846 [D loss: 0.237917, acc.: 90.23%] [G loss: 3.353608]\n",
      "2847 [D loss: 0.229709, acc.: 91.02%] [G loss: 3.151127]\n",
      "2848 [D loss: 0.214341, acc.: 91.41%] [G loss: 3.105049]\n",
      "2849 [D loss: 0.202437, acc.: 91.80%] [G loss: 3.148440]\n",
      "2850 [D loss: 0.188721, acc.: 91.80%] [G loss: 2.871965]\n",
      "2851 [D loss: 0.205248, acc.: 91.41%] [G loss: 3.170796]\n",
      "2852 [D loss: 0.202919, acc.: 91.02%] [G loss: 2.928279]\n",
      "2853 [D loss: 0.196324, acc.: 92.58%] [G loss: 2.813753]\n",
      "2854 [D loss: 0.212852, acc.: 91.80%] [G loss: 3.182245]\n",
      "2855 [D loss: 0.196425, acc.: 92.19%] [G loss: 3.193062]\n",
      "2856 [D loss: 0.211778, acc.: 91.80%] [G loss: 2.896976]\n",
      "2857 [D loss: 0.197017, acc.: 91.41%] [G loss: 3.006513]\n",
      "2858 [D loss: 0.198799, acc.: 92.19%] [G loss: 2.855256]\n",
      "2859 [D loss: 0.203843, acc.: 91.41%] [G loss: 2.992972]\n",
      "2860 [D loss: 0.226950, acc.: 91.41%] [G loss: 2.761863]\n",
      "2861 [D loss: 0.229024, acc.: 91.02%] [G loss: 3.201246]\n",
      "2862 [D loss: 0.216858, acc.: 91.41%] [G loss: 3.122306]\n",
      "2863 [D loss: 0.214591, acc.: 90.62%] [G loss: 3.177016]\n",
      "2864 [D loss: 0.195374, acc.: 92.58%] [G loss: 3.108004]\n",
      "2865 [D loss: 0.194376, acc.: 91.41%] [G loss: 3.036119]\n",
      "2866 [D loss: 0.213015, acc.: 90.62%] [G loss: 3.145523]\n",
      "2867 [D loss: 0.226027, acc.: 90.62%] [G loss: 3.175715]\n",
      "2868 [D loss: 0.247905, acc.: 91.02%] [G loss: 2.916896]\n",
      "2869 [D loss: 0.195765, acc.: 92.19%] [G loss: 2.847701]\n",
      "2870 [D loss: 0.200345, acc.: 91.80%] [G loss: 2.988583]\n",
      "2871 [D loss: 0.201592, acc.: 92.58%] [G loss: 2.881521]\n",
      "2872 [D loss: 0.194184, acc.: 92.58%] [G loss: 3.069877]\n",
      "2873 [D loss: 0.217909, acc.: 92.19%] [G loss: 2.784443]\n",
      "2874 [D loss: 0.210713, acc.: 91.41%] [G loss: 2.898124]\n",
      "2875 [D loss: 0.189831, acc.: 91.80%] [G loss: 2.872412]\n",
      "2876 [D loss: 0.220402, acc.: 91.80%] [G loss: 3.273710]\n",
      "2877 [D loss: 0.198552, acc.: 92.58%] [G loss: 3.089790]\n",
      "2878 [D loss: 0.221874, acc.: 91.02%] [G loss: 3.180856]\n",
      "2879 [D loss: 0.195878, acc.: 91.41%] [G loss: 3.286569]\n",
      "2880 [D loss: 0.236157, acc.: 89.84%] [G loss: 3.340330]\n",
      "2881 [D loss: 0.194081, acc.: 92.58%] [G loss: 2.867018]\n",
      "2882 [D loss: 0.198263, acc.: 92.58%] [G loss: 3.170169]\n",
      "2883 [D loss: 0.191914, acc.: 90.23%] [G loss: 3.107487]\n",
      "2884 [D loss: 0.195419, acc.: 92.97%] [G loss: 2.989681]\n",
      "2885 [D loss: 0.195231, acc.: 91.41%] [G loss: 3.095371]\n",
      "2886 [D loss: 0.223156, acc.: 91.80%] [G loss: 2.921057]\n",
      "2887 [D loss: 0.187397, acc.: 91.80%] [G loss: 3.091996]\n",
      "2888 [D loss: 0.211290, acc.: 91.80%] [G loss: 3.023170]\n",
      "2889 [D loss: 0.208369, acc.: 92.19%] [G loss: 3.014465]\n",
      "2890 [D loss: 0.180503, acc.: 92.97%] [G loss: 3.011676]\n",
      "2891 [D loss: 0.218336, acc.: 89.45%] [G loss: 3.088789]\n",
      "2892 [D loss: 0.220746, acc.: 89.84%] [G loss: 3.056472]\n",
      "2893 [D loss: 0.202760, acc.: 91.41%] [G loss: 2.950046]\n",
      "2894 [D loss: 0.188250, acc.: 92.19%] [G loss: 3.049236]\n",
      "2895 [D loss: 0.222048, acc.: 90.62%] [G loss: 3.038969]\n",
      "2896 [D loss: 0.196498, acc.: 92.19%] [G loss: 2.752496]\n",
      "2897 [D loss: 0.183001, acc.: 92.97%] [G loss: 2.881371]\n",
      "2898 [D loss: 0.187936, acc.: 91.80%] [G loss: 3.012705]\n",
      "2899 [D loss: 0.234562, acc.: 91.41%] [G loss: 2.955270]\n",
      "2900 [D loss: 0.212824, acc.: 91.80%] [G loss: 2.981419]\n",
      "WARNING:tensorflow:Model was constructed with shape Tensor(\"input_7:0\", shape=(128, 32), dtype=float32) for input (128, 32), but it was re-called on a Tensor with incompatible shape (432, 32).\n",
      "generated_data\n",
      "2901 [D loss: 0.193826, acc.: 92.19%] [G loss: 3.049769]\n",
      "2902 [D loss: 0.202428, acc.: 92.58%] [G loss: 3.114769]\n",
      "2903 [D loss: 0.204875, acc.: 91.41%] [G loss: 2.966331]\n",
      "2904 [D loss: 0.195272, acc.: 92.19%] [G loss: 2.865334]\n",
      "2905 [D loss: 0.210300, acc.: 91.41%] [G loss: 2.892242]\n",
      "2906 [D loss: 0.185237, acc.: 92.97%] [G loss: 3.013826]\n",
      "2907 [D loss: 0.244549, acc.: 90.23%] [G loss: 3.146888]\n",
      "2908 [D loss: 0.227493, acc.: 91.02%] [G loss: 2.826329]\n",
      "2909 [D loss: 0.231662, acc.: 90.23%] [G loss: 3.196001]\n",
      "2910 [D loss: 0.216022, acc.: 92.19%] [G loss: 2.940460]\n",
      "2911 [D loss: 0.237446, acc.: 87.89%] [G loss: 3.211831]\n",
      "2912 [D loss: 0.201222, acc.: 92.19%] [G loss: 3.084432]\n",
      "2913 [D loss: 0.208929, acc.: 91.80%] [G loss: 2.797223]\n",
      "2914 [D loss: 0.213460, acc.: 91.02%] [G loss: 2.889083]\n",
      "2915 [D loss: 0.197486, acc.: 92.19%] [G loss: 2.980533]\n",
      "2916 [D loss: 0.189114, acc.: 91.02%] [G loss: 3.283514]\n",
      "2917 [D loss: 0.193486, acc.: 92.19%] [G loss: 3.276775]\n",
      "2918 [D loss: 0.183310, acc.: 92.58%] [G loss: 3.111853]\n",
      "2919 [D loss: 0.188797, acc.: 92.97%] [G loss: 3.122875]\n",
      "2920 [D loss: 0.196459, acc.: 92.19%] [G loss: 3.081626]\n",
      "2921 [D loss: 0.208129, acc.: 91.80%] [G loss: 2.851459]\n",
      "2922 [D loss: 0.212289, acc.: 92.58%] [G loss: 3.020831]\n",
      "2923 [D loss: 0.211705, acc.: 91.80%] [G loss: 3.006248]\n",
      "2924 [D loss: 0.231312, acc.: 91.02%] [G loss: 2.777347]\n",
      "2925 [D loss: 0.196498, acc.: 91.80%] [G loss: 3.103058]\n",
      "2926 [D loss: 0.226846, acc.: 91.41%] [G loss: 3.176237]\n",
      "2927 [D loss: 0.213058, acc.: 91.41%] [G loss: 2.981483]\n",
      "2928 [D loss: 0.203492, acc.: 91.41%] [G loss: 3.142831]\n",
      "2929 [D loss: 0.200633, acc.: 92.19%] [G loss: 2.995716]\n",
      "2930 [D loss: 0.219098, acc.: 90.62%] [G loss: 2.911825]\n",
      "2931 [D loss: 0.215368, acc.: 90.23%] [G loss: 2.974401]\n",
      "2932 [D loss: 0.200404, acc.: 92.58%] [G loss: 3.278839]\n",
      "2933 [D loss: 0.224111, acc.: 91.41%] [G loss: 2.744743]\n",
      "2934 [D loss: 0.193974, acc.: 92.97%] [G loss: 2.892764]\n",
      "2935 [D loss: 0.170252, acc.: 92.97%] [G loss: 2.854875]\n",
      "2936 [D loss: 0.196790, acc.: 91.80%] [G loss: 3.046536]\n",
      "2937 [D loss: 0.193655, acc.: 92.19%] [G loss: 2.813740]\n",
      "2938 [D loss: 0.212771, acc.: 91.41%] [G loss: 2.832853]\n",
      "2939 [D loss: 0.179517, acc.: 91.02%] [G loss: 3.042560]\n",
      "2940 [D loss: 0.189532, acc.: 92.97%] [G loss: 3.171314]\n",
      "2941 [D loss: 0.207870, acc.: 91.02%] [G loss: 2.865581]\n",
      "2942 [D loss: 0.223522, acc.: 91.02%] [G loss: 3.225549]\n",
      "2943 [D loss: 0.216427, acc.: 91.41%] [G loss: 2.941887]\n",
      "2944 [D loss: 0.201324, acc.: 92.58%] [G loss: 3.097327]\n",
      "2945 [D loss: 0.206203, acc.: 92.58%] [G loss: 2.986698]\n",
      "2946 [D loss: 0.195997, acc.: 92.19%] [G loss: 3.173590]\n",
      "2947 [D loss: 0.177438, acc.: 92.58%] [G loss: 3.172335]\n",
      "2948 [D loss: 0.201803, acc.: 91.80%] [G loss: 3.030846]\n",
      "2949 [D loss: 0.205440, acc.: 91.41%] [G loss: 3.122514]\n",
      "2950 [D loss: 0.214300, acc.: 91.80%] [G loss: 3.010181]\n",
      "2951 [D loss: 0.216185, acc.: 91.80%] [G loss: 2.793322]\n",
      "2952 [D loss: 0.204665, acc.: 91.80%] [G loss: 2.971758]\n",
      "2953 [D loss: 0.207302, acc.: 92.58%] [G loss: 3.364384]\n",
      "2954 [D loss: 0.206104, acc.: 91.41%] [G loss: 3.372524]\n",
      "2955 [D loss: 0.189340, acc.: 93.36%] [G loss: 3.012065]\n",
      "2956 [D loss: 0.187504, acc.: 92.19%] [G loss: 3.016787]\n",
      "2957 [D loss: 0.208613, acc.: 91.41%] [G loss: 2.862689]\n",
      "2958 [D loss: 0.223521, acc.: 91.41%] [G loss: 2.953081]\n",
      "2959 [D loss: 0.203465, acc.: 91.80%] [G loss: 3.010175]\n",
      "2960 [D loss: 0.205827, acc.: 92.58%] [G loss: 3.155010]\n",
      "2961 [D loss: 0.227473, acc.: 91.80%] [G loss: 3.145937]\n",
      "2962 [D loss: 0.218455, acc.: 91.02%] [G loss: 3.046394]\n",
      "2963 [D loss: 0.205563, acc.: 92.19%] [G loss: 3.062053]\n",
      "2964 [D loss: 0.179986, acc.: 92.58%] [G loss: 2.863165]\n",
      "2965 [D loss: 0.190168, acc.: 92.97%] [G loss: 3.019655]\n",
      "2966 [D loss: 0.241111, acc.: 90.62%] [G loss: 3.066465]\n",
      "2967 [D loss: 0.205489, acc.: 91.41%] [G loss: 2.924895]\n",
      "2968 [D loss: 0.219718, acc.: 92.19%] [G loss: 3.028724]\n",
      "2969 [D loss: 0.211510, acc.: 91.80%] [G loss: 2.898454]\n",
      "2970 [D loss: 0.192607, acc.: 91.80%] [G loss: 2.847649]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "2971 [D loss: 0.214050, acc.: 91.80%] [G loss: 2.770327]\n",
      "2972 [D loss: 0.178086, acc.: 92.97%] [G loss: 2.894073]\n",
      "2973 [D loss: 0.200797, acc.: 92.19%] [G loss: 3.055795]\n",
      "2974 [D loss: 0.206244, acc.: 92.19%] [G loss: 2.936987]\n",
      "2975 [D loss: 0.231601, acc.: 90.62%] [G loss: 3.034416]\n",
      "2976 [D loss: 0.192744, acc.: 91.80%] [G loss: 3.170062]\n",
      "2977 [D loss: 0.206179, acc.: 92.19%] [G loss: 2.953900]\n",
      "2978 [D loss: 0.197246, acc.: 92.97%] [G loss: 2.874907]\n",
      "2979 [D loss: 0.205613, acc.: 92.58%] [G loss: 2.898720]\n",
      "2980 [D loss: 0.196556, acc.: 91.80%] [G loss: 2.984944]\n",
      "2981 [D loss: 0.218893, acc.: 91.41%] [G loss: 2.855459]\n",
      "2982 [D loss: 0.193149, acc.: 92.58%] [G loss: 3.010463]\n",
      "2983 [D loss: 0.195399, acc.: 91.80%] [G loss: 2.911571]\n",
      "2984 [D loss: 0.200202, acc.: 91.02%] [G loss: 3.113387]\n",
      "2985 [D loss: 0.222246, acc.: 90.62%] [G loss: 2.958227]\n",
      "2986 [D loss: 0.195406, acc.: 91.80%] [G loss: 2.981807]\n",
      "2987 [D loss: 0.193383, acc.: 91.41%] [G loss: 3.274608]\n",
      "2988 [D loss: 0.197252, acc.: 92.19%] [G loss: 2.988590]\n",
      "2989 [D loss: 0.210300, acc.: 91.41%] [G loss: 3.032784]\n",
      "2990 [D loss: 0.205648, acc.: 91.80%] [G loss: 3.119188]\n",
      "2991 [D loss: 0.200953, acc.: 91.02%] [G loss: 3.099630]\n",
      "2992 [D loss: 0.198526, acc.: 91.41%] [G loss: 2.828629]\n",
      "2993 [D loss: 0.201197, acc.: 92.19%] [G loss: 3.006719]\n",
      "2994 [D loss: 0.200346, acc.: 91.41%] [G loss: 2.853083]\n",
      "2995 [D loss: 0.200001, acc.: 92.19%] [G loss: 3.083105]\n",
      "2996 [D loss: 0.203585, acc.: 90.62%] [G loss: 3.289016]\n",
      "2997 [D loss: 0.200092, acc.: 91.80%] [G loss: 2.789685]\n",
      "2998 [D loss: 0.178943, acc.: 92.58%] [G loss: 2.838706]\n",
      "2999 [D loss: 0.223409, acc.: 91.41%] [G loss: 3.129478]\n",
      "3000 [D loss: 0.199065, acc.: 91.80%] [G loss: 3.081719]\n",
      "WARNING:tensorflow:Model was constructed with shape Tensor(\"input_7:0\", shape=(128, 32), dtype=float32) for input (128, 32), but it was re-called on a Tensor with incompatible shape (432, 32).\n",
      "generated_data\n",
      "3001 [D loss: 0.219666, acc.: 91.80%] [G loss: 3.006634]\n",
      "3002 [D loss: 0.200019, acc.: 92.19%] [G loss: 2.943297]\n",
      "3003 [D loss: 0.199681, acc.: 91.80%] [G loss: 3.182823]\n",
      "3004 [D loss: 0.199187, acc.: 91.41%] [G loss: 2.838384]\n",
      "3005 [D loss: 0.185750, acc.: 92.97%] [G loss: 3.079914]\n",
      "3006 [D loss: 0.199356, acc.: 91.80%] [G loss: 3.026311]\n",
      "3007 [D loss: 0.194951, acc.: 91.80%] [G loss: 2.854005]\n",
      "3008 [D loss: 0.199128, acc.: 92.58%] [G loss: 2.996125]\n",
      "3009 [D loss: 0.210253, acc.: 92.97%] [G loss: 3.147392]\n",
      "3010 [D loss: 0.189976, acc.: 92.58%] [G loss: 2.971353]\n",
      "3011 [D loss: 0.210704, acc.: 92.19%] [G loss: 3.062876]\n",
      "3012 [D loss: 0.191872, acc.: 91.41%] [G loss: 3.108843]\n",
      "3013 [D loss: 0.200748, acc.: 91.80%] [G loss: 2.947544]\n",
      "3014 [D loss: 0.204419, acc.: 92.19%] [G loss: 2.877896]\n",
      "3015 [D loss: 0.200986, acc.: 91.80%] [G loss: 3.217315]\n",
      "3016 [D loss: 0.205171, acc.: 91.80%] [G loss: 3.234984]\n",
      "3017 [D loss: 0.199229, acc.: 91.41%] [G loss: 2.958475]\n",
      "3018 [D loss: 0.205796, acc.: 91.80%] [G loss: 2.985736]\n",
      "3019 [D loss: 0.215650, acc.: 91.02%] [G loss: 3.191950]\n",
      "3020 [D loss: 0.209364, acc.: 91.80%] [G loss: 3.212656]\n",
      "3021 [D loss: 0.213591, acc.: 92.19%] [G loss: 3.078174]\n",
      "3022 [D loss: 0.208972, acc.: 92.19%] [G loss: 3.084961]\n",
      "3023 [D loss: 0.216367, acc.: 91.80%] [G loss: 3.204034]\n",
      "3024 [D loss: 0.224647, acc.: 91.80%] [G loss: 2.960362]\n",
      "3025 [D loss: 0.191102, acc.: 92.19%] [G loss: 2.744502]\n",
      "3026 [D loss: 0.205432, acc.: 92.19%] [G loss: 3.020979]\n",
      "3027 [D loss: 0.197539, acc.: 91.80%] [G loss: 3.106087]\n",
      "3028 [D loss: 0.185412, acc.: 92.19%] [G loss: 2.830086]\n",
      "3029 [D loss: 0.199922, acc.: 90.62%] [G loss: 2.960736]\n",
      "3030 [D loss: 0.219660, acc.: 91.41%] [G loss: 3.039757]\n",
      "3031 [D loss: 0.194897, acc.: 91.80%] [G loss: 3.011371]\n",
      "3032 [D loss: 0.196807, acc.: 92.97%] [G loss: 2.803847]\n",
      "3033 [D loss: 0.187273, acc.: 92.58%] [G loss: 3.234089]\n",
      "3034 [D loss: 0.182604, acc.: 92.58%] [G loss: 3.219326]\n",
      "3035 [D loss: 0.196268, acc.: 92.19%] [G loss: 3.154550]\n",
      "3036 [D loss: 0.214061, acc.: 91.41%] [G loss: 3.221049]\n",
      "3037 [D loss: 0.188641, acc.: 92.58%] [G loss: 2.817309]\n",
      "3038 [D loss: 0.172309, acc.: 92.19%] [G loss: 3.384314]\n",
      "3039 [D loss: 0.191212, acc.: 93.36%] [G loss: 3.356284]\n",
      "3040 [D loss: 0.200686, acc.: 92.58%] [G loss: 3.278433]\n",
      "3041 [D loss: 0.201454, acc.: 92.19%] [G loss: 3.073432]\n",
      "3042 [D loss: 0.171059, acc.: 92.58%] [G loss: 3.721217]\n",
      "3043 [D loss: 0.189571, acc.: 92.58%] [G loss: 2.881618]\n",
      "3044 [D loss: 0.190670, acc.: 92.97%] [G loss: 3.134784]\n",
      "3045 [D loss: 0.200479, acc.: 92.19%] [G loss: 3.015623]\n",
      "3046 [D loss: 0.178292, acc.: 92.58%] [G loss: 3.061401]\n",
      "3047 [D loss: 0.215251, acc.: 92.58%] [G loss: 3.096511]\n",
      "3048 [D loss: 0.183480, acc.: 92.58%] [G loss: 3.055336]\n",
      "3049 [D loss: 0.215432, acc.: 91.41%] [G loss: 3.073200]\n",
      "3050 [D loss: 0.196895, acc.: 92.19%] [G loss: 3.169657]\n",
      "3051 [D loss: 0.212395, acc.: 91.02%] [G loss: 2.967401]\n",
      "3052 [D loss: 0.186468, acc.: 91.80%] [G loss: 3.019184]\n",
      "3053 [D loss: 0.200479, acc.: 92.58%] [G loss: 2.939194]\n",
      "3054 [D loss: 0.172257, acc.: 93.36%] [G loss: 3.276007]\n",
      "3055 [D loss: 0.210069, acc.: 91.02%] [G loss: 2.952869]\n",
      "3056 [D loss: 0.201285, acc.: 92.19%] [G loss: 3.116741]\n",
      "3057 [D loss: 0.221877, acc.: 92.19%] [G loss: 3.199693]\n",
      "3058 [D loss: 0.198199, acc.: 92.58%] [G loss: 3.066138]\n",
      "3059 [D loss: 0.189305, acc.: 92.19%] [G loss: 2.873475]\n",
      "3060 [D loss: 0.209525, acc.: 91.41%] [G loss: 3.137379]\n",
      "3061 [D loss: 0.207048, acc.: 92.19%] [G loss: 3.021058]\n",
      "3062 [D loss: 0.209246, acc.: 91.80%] [G loss: 2.940194]\n",
      "3063 [D loss: 0.182165, acc.: 92.19%] [G loss: 3.154040]\n",
      "3064 [D loss: 0.184022, acc.: 92.19%] [G loss: 3.208188]\n",
      "3065 [D loss: 0.193317, acc.: 92.58%] [G loss: 2.989287]\n",
      "3066 [D loss: 0.224097, acc.: 91.41%] [G loss: 3.085964]\n",
      "3067 [D loss: 0.180273, acc.: 92.19%] [G loss: 3.303788]\n",
      "3068 [D loss: 0.194446, acc.: 92.19%] [G loss: 3.150804]\n",
      "3069 [D loss: 0.176365, acc.: 91.80%] [G loss: 3.253173]\n",
      "3070 [D loss: 0.187170, acc.: 92.19%] [G loss: 3.135784]\n",
      "3071 [D loss: 0.206938, acc.: 92.19%] [G loss: 3.265785]\n",
      "3072 [D loss: 0.192532, acc.: 92.97%] [G loss: 3.104024]\n",
      "3073 [D loss: 0.179494, acc.: 91.80%] [G loss: 2.924252]\n",
      "3074 [D loss: 0.214919, acc.: 91.80%] [G loss: 2.993079]\n",
      "3075 [D loss: 0.187779, acc.: 92.19%] [G loss: 3.059802]\n",
      "3076 [D loss: 0.192284, acc.: 91.41%] [G loss: 3.283151]\n",
      "3077 [D loss: 0.191672, acc.: 92.19%] [G loss: 2.907848]\n",
      "3078 [D loss: 0.183330, acc.: 92.97%] [G loss: 3.037058]\n",
      "3079 [D loss: 0.197145, acc.: 92.19%] [G loss: 3.152835]\n",
      "3080 [D loss: 0.217745, acc.: 90.62%] [G loss: 3.174050]\n",
      "3081 [D loss: 0.193275, acc.: 91.80%] [G loss: 2.962687]\n",
      "3082 [D loss: 0.204401, acc.: 92.19%] [G loss: 2.890679]\n",
      "3083 [D loss: 0.204799, acc.: 91.41%] [G loss: 2.943239]\n",
      "3084 [D loss: 0.214145, acc.: 92.97%] [G loss: 3.102204]\n",
      "3085 [D loss: 0.208235, acc.: 92.19%] [G loss: 3.070371]\n",
      "3086 [D loss: 0.196025, acc.: 92.58%] [G loss: 2.910307]\n",
      "3087 [D loss: 0.217722, acc.: 90.62%] [G loss: 3.042482]\n",
      "3088 [D loss: 0.198092, acc.: 91.80%] [G loss: 3.267539]\n",
      "3089 [D loss: 0.209764, acc.: 91.41%] [G loss: 3.235781]\n",
      "3090 [D loss: 0.204356, acc.: 92.19%] [G loss: 3.004853]\n",
      "3091 [D loss: 0.202953, acc.: 92.19%] [G loss: 3.241750]\n",
      "3092 [D loss: 0.212241, acc.: 91.02%] [G loss: 3.298831]\n",
      "3093 [D loss: 0.215336, acc.: 91.02%] [G loss: 2.898320]\n",
      "3094 [D loss: 0.212538, acc.: 91.80%] [G loss: 2.924710]\n",
      "3095 [D loss: 0.204247, acc.: 91.80%] [G loss: 3.072965]\n",
      "3096 [D loss: 0.216864, acc.: 92.58%] [G loss: 3.044406]\n",
      "3097 [D loss: 0.198870, acc.: 92.19%] [G loss: 2.938906]\n",
      "3098 [D loss: 0.200072, acc.: 91.02%] [G loss: 2.857064]\n",
      "3099 [D loss: 0.202978, acc.: 91.80%] [G loss: 3.005241]\n",
      "3100 [D loss: 0.214895, acc.: 92.19%] [G loss: 3.575653]\n",
      "WARNING:tensorflow:Model was constructed with shape Tensor(\"input_7:0\", shape=(128, 32), dtype=float32) for input (128, 32), but it was re-called on a Tensor with incompatible shape (432, 32).\n",
      "generated_data\n",
      "3101 [D loss: 0.216600, acc.: 91.80%] [G loss: 3.128771]\n",
      "3102 [D loss: 0.199843, acc.: 91.80%] [G loss: 2.949063]\n",
      "3103 [D loss: 0.213812, acc.: 92.19%] [G loss: 2.996686]\n",
      "3104 [D loss: 0.194606, acc.: 91.80%] [G loss: 3.096678]\n",
      "3105 [D loss: 0.209299, acc.: 91.80%] [G loss: 3.086001]\n",
      "3106 [D loss: 0.190442, acc.: 92.58%] [G loss: 3.045586]\n",
      "3107 [D loss: 0.193933, acc.: 92.97%] [G loss: 3.351768]\n",
      "3108 [D loss: 0.211465, acc.: 89.84%] [G loss: 3.314331]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "3109 [D loss: 0.195120, acc.: 92.19%] [G loss: 2.970064]\n",
      "3110 [D loss: 0.213615, acc.: 92.19%] [G loss: 3.297168]\n",
      "3111 [D loss: 0.202367, acc.: 92.58%] [G loss: 3.143643]\n",
      "3112 [D loss: 0.196316, acc.: 91.41%] [G loss: 2.960117]\n",
      "3113 [D loss: 0.203782, acc.: 92.58%] [G loss: 3.115172]\n",
      "3114 [D loss: 0.204880, acc.: 92.19%] [G loss: 3.113722]\n",
      "3115 [D loss: 0.210732, acc.: 91.80%] [G loss: 2.890198]\n",
      "3116 [D loss: 0.200607, acc.: 92.19%] [G loss: 3.285150]\n",
      "3117 [D loss: 0.219412, acc.: 91.02%] [G loss: 2.816831]\n",
      "3118 [D loss: 0.192495, acc.: 92.19%] [G loss: 2.912499]\n",
      "3119 [D loss: 0.198913, acc.: 92.19%] [G loss: 3.013567]\n",
      "3120 [D loss: 0.213638, acc.: 91.80%] [G loss: 2.738363]\n",
      "3121 [D loss: 0.208423, acc.: 92.58%] [G loss: 2.990940]\n",
      "3122 [D loss: 0.206147, acc.: 92.19%] [G loss: 3.056832]\n",
      "3123 [D loss: 0.218498, acc.: 91.02%] [G loss: 3.109236]\n",
      "3124 [D loss: 0.216685, acc.: 92.19%] [G loss: 3.219441]\n",
      "3125 [D loss: 0.222133, acc.: 91.41%] [G loss: 2.784816]\n",
      "3126 [D loss: 0.207557, acc.: 92.19%] [G loss: 3.027308]\n",
      "3127 [D loss: 0.201685, acc.: 92.19%] [G loss: 2.783267]\n",
      "3128 [D loss: 0.216758, acc.: 92.58%] [G loss: 2.895839]\n",
      "3129 [D loss: 0.196764, acc.: 91.80%] [G loss: 2.928041]\n",
      "3130 [D loss: 0.216869, acc.: 91.02%] [G loss: 3.167836]\n",
      "3131 [D loss: 0.204478, acc.: 91.80%] [G loss: 3.110657]\n",
      "3132 [D loss: 0.205153, acc.: 92.19%] [G loss: 3.031885]\n",
      "3133 [D loss: 0.201410, acc.: 92.97%] [G loss: 3.130812]\n",
      "3134 [D loss: 0.195397, acc.: 91.80%] [G loss: 3.012995]\n",
      "3135 [D loss: 0.206874, acc.: 92.19%] [G loss: 3.180647]\n",
      "3136 [D loss: 0.214908, acc.: 91.41%] [G loss: 2.971770]\n",
      "3137 [D loss: 0.195727, acc.: 91.41%] [G loss: 3.163111]\n",
      "3138 [D loss: 0.208596, acc.: 91.80%] [G loss: 2.932161]\n",
      "3139 [D loss: 0.229407, acc.: 91.02%] [G loss: 3.242198]\n",
      "3140 [D loss: 0.196744, acc.: 92.19%] [G loss: 3.046766]\n",
      "3141 [D loss: 0.214245, acc.: 92.19%] [G loss: 3.134850]\n",
      "3142 [D loss: 0.214644, acc.: 91.80%] [G loss: 3.090023]\n",
      "3143 [D loss: 0.220574, acc.: 91.41%] [G loss: 2.932695]\n",
      "3144 [D loss: 0.223025, acc.: 91.41%] [G loss: 2.962850]\n",
      "3145 [D loss: 0.200569, acc.: 92.97%] [G loss: 3.177485]\n",
      "3146 [D loss: 0.202996, acc.: 92.58%] [G loss: 2.961736]\n",
      "3147 [D loss: 0.196427, acc.: 92.58%] [G loss: 3.045233]\n",
      "3148 [D loss: 0.197700, acc.: 92.19%] [G loss: 3.117314]\n",
      "3149 [D loss: 0.192857, acc.: 92.19%] [G loss: 3.191318]\n",
      "3150 [D loss: 0.177052, acc.: 92.58%] [G loss: 2.979466]\n",
      "3151 [D loss: 0.195785, acc.: 92.58%] [G loss: 3.200348]\n",
      "3152 [D loss: 0.213861, acc.: 90.62%] [G loss: 2.998312]\n",
      "3153 [D loss: 0.202295, acc.: 91.80%] [G loss: 3.086252]\n",
      "3154 [D loss: 0.238952, acc.: 89.06%] [G loss: 3.263472]\n",
      "3155 [D loss: 0.212186, acc.: 91.41%] [G loss: 2.966185]\n",
      "3156 [D loss: 0.203345, acc.: 91.80%] [G loss: 2.836736]\n",
      "3157 [D loss: 0.207605, acc.: 92.19%] [G loss: 2.984277]\n",
      "3158 [D loss: 0.207388, acc.: 91.41%] [G loss: 3.346805]\n",
      "3159 [D loss: 0.196644, acc.: 91.41%] [G loss: 3.193412]\n",
      "3160 [D loss: 0.199401, acc.: 92.19%] [G loss: 3.067464]\n",
      "3161 [D loss: 0.183191, acc.: 92.58%] [G loss: 3.050009]\n",
      "3162 [D loss: 0.171526, acc.: 92.58%] [G loss: 2.968272]\n",
      "3163 [D loss: 0.184232, acc.: 91.80%] [G loss: 3.126369]\n",
      "3164 [D loss: 0.176099, acc.: 92.97%] [G loss: 3.328753]\n",
      "3165 [D loss: 0.192031, acc.: 92.58%] [G loss: 3.285704]\n",
      "3166 [D loss: 0.190842, acc.: 92.19%] [G loss: 3.198995]\n",
      "3167 [D loss: 0.227108, acc.: 91.80%] [G loss: 3.069353]\n",
      "3168 [D loss: 0.192388, acc.: 92.19%] [G loss: 3.311901]\n",
      "3169 [D loss: 0.176670, acc.: 92.97%] [G loss: 3.097754]\n",
      "3170 [D loss: 0.191907, acc.: 92.19%] [G loss: 2.898893]\n",
      "3171 [D loss: 0.190328, acc.: 92.19%] [G loss: 2.797740]\n",
      "3172 [D loss: 0.210714, acc.: 92.19%] [G loss: 2.985563]\n",
      "3173 [D loss: 0.187819, acc.: 92.19%] [G loss: 3.243428]\n",
      "3174 [D loss: 0.207692, acc.: 91.41%] [G loss: 2.938481]\n",
      "3175 [D loss: 0.189222, acc.: 91.80%] [G loss: 3.060089]\n",
      "3176 [D loss: 0.191313, acc.: 92.58%] [G loss: 2.841995]\n",
      "3177 [D loss: 0.192910, acc.: 93.75%] [G loss: 3.124574]\n",
      "3178 [D loss: 0.205079, acc.: 91.80%] [G loss: 3.289408]\n",
      "3179 [D loss: 0.212791, acc.: 90.62%] [G loss: 3.239431]\n",
      "3180 [D loss: 0.210174, acc.: 91.41%] [G loss: 3.152783]\n",
      "3181 [D loss: 0.225330, acc.: 90.23%] [G loss: 3.086495]\n",
      "3182 [D loss: 0.213597, acc.: 91.80%] [G loss: 3.299721]\n",
      "3183 [D loss: 0.231513, acc.: 90.23%] [G loss: 3.158409]\n",
      "3184 [D loss: 0.185176, acc.: 92.19%] [G loss: 2.941358]\n",
      "3185 [D loss: 0.195652, acc.: 91.41%] [G loss: 2.849362]\n",
      "3186 [D loss: 0.208902, acc.: 92.19%] [G loss: 2.973412]\n",
      "3187 [D loss: 0.194985, acc.: 92.19%] [G loss: 2.945348]\n",
      "3188 [D loss: 0.202580, acc.: 92.58%] [G loss: 3.092545]\n",
      "3189 [D loss: 0.248228, acc.: 91.02%] [G loss: 3.107184]\n",
      "3190 [D loss: 0.216926, acc.: 91.80%] [G loss: 3.311963]\n",
      "3191 [D loss: 0.202643, acc.: 91.02%] [G loss: 3.049945]\n",
      "3192 [D loss: 0.198481, acc.: 92.58%] [G loss: 2.985137]\n",
      "3193 [D loss: 0.204856, acc.: 92.97%] [G loss: 2.729020]\n",
      "3194 [D loss: 0.214368, acc.: 92.19%] [G loss: 2.847614]\n",
      "3195 [D loss: 0.206787, acc.: 90.62%] [G loss: 3.169285]\n",
      "3196 [D loss: 0.223913, acc.: 91.41%] [G loss: 2.998331]\n",
      "3197 [D loss: 0.200892, acc.: 92.19%] [G loss: 3.173880]\n",
      "3198 [D loss: 0.199468, acc.: 92.58%] [G loss: 3.062809]\n",
      "3199 [D loss: 0.207491, acc.: 91.80%] [G loss: 3.219226]\n",
      "3200 [D loss: 0.199748, acc.: 92.97%] [G loss: 2.993294]\n",
      "WARNING:tensorflow:Model was constructed with shape Tensor(\"input_7:0\", shape=(128, 32), dtype=float32) for input (128, 32), but it was re-called on a Tensor with incompatible shape (432, 32).\n",
      "generated_data\n",
      "3201 [D loss: 0.197006, acc.: 91.80%] [G loss: 2.970557]\n",
      "3202 [D loss: 0.195794, acc.: 92.58%] [G loss: 3.092570]\n",
      "3203 [D loss: 0.232295, acc.: 91.02%] [G loss: 3.021037]\n",
      "3204 [D loss: 0.200259, acc.: 92.19%] [G loss: 3.111698]\n",
      "3205 [D loss: 0.186448, acc.: 92.58%] [G loss: 3.053582]\n",
      "3206 [D loss: 0.182956, acc.: 92.97%] [G loss: 2.882925]\n",
      "3207 [D loss: 0.205590, acc.: 92.19%] [G loss: 3.107445]\n",
      "3208 [D loss: 0.202700, acc.: 91.80%] [G loss: 3.008328]\n",
      "3209 [D loss: 0.194587, acc.: 92.97%] [G loss: 3.344058]\n",
      "3210 [D loss: 0.221492, acc.: 92.19%] [G loss: 3.390203]\n",
      "3211 [D loss: 0.191658, acc.: 92.19%] [G loss: 3.104951]\n",
      "3212 [D loss: 0.190816, acc.: 91.02%] [G loss: 3.024812]\n",
      "3213 [D loss: 0.186954, acc.: 92.58%] [G loss: 3.308146]\n",
      "3214 [D loss: 0.216088, acc.: 91.41%] [G loss: 2.879424]\n",
      "3215 [D loss: 0.201448, acc.: 92.58%] [G loss: 2.871478]\n",
      "3216 [D loss: 0.209226, acc.: 91.80%] [G loss: 2.901002]\n",
      "3217 [D loss: 0.209423, acc.: 91.80%] [G loss: 3.303156]\n",
      "3218 [D loss: 0.238630, acc.: 91.41%] [G loss: 3.048984]\n",
      "3219 [D loss: 0.192655, acc.: 91.80%] [G loss: 2.951589]\n",
      "3220 [D loss: 0.212616, acc.: 92.19%] [G loss: 3.265260]\n",
      "3221 [D loss: 0.206983, acc.: 91.80%] [G loss: 2.868861]\n",
      "3222 [D loss: 0.211884, acc.: 91.80%] [G loss: 2.785369]\n",
      "3223 [D loss: 0.195574, acc.: 92.58%] [G loss: 2.824061]\n",
      "3224 [D loss: 0.230140, acc.: 90.23%] [G loss: 3.143992]\n",
      "3225 [D loss: 0.202299, acc.: 91.41%] [G loss: 2.952973]\n",
      "3226 [D loss: 0.191724, acc.: 92.19%] [G loss: 3.308264]\n",
      "3227 [D loss: 0.175773, acc.: 92.58%] [G loss: 2.985926]\n",
      "3228 [D loss: 0.211127, acc.: 91.80%] [G loss: 3.043481]\n",
      "3229 [D loss: 0.212823, acc.: 92.19%] [G loss: 3.031348]\n",
      "3230 [D loss: 0.199437, acc.: 91.41%] [G loss: 3.223351]\n",
      "3231 [D loss: 0.188650, acc.: 92.19%] [G loss: 3.093821]\n",
      "3232 [D loss: 0.188955, acc.: 92.19%] [G loss: 2.908400]\n",
      "3233 [D loss: 0.215690, acc.: 90.62%] [G loss: 2.984136]\n",
      "3234 [D loss: 0.199392, acc.: 92.19%] [G loss: 3.125771]\n",
      "3235 [D loss: 0.191639, acc.: 93.36%] [G loss: 2.773687]\n",
      "3236 [D loss: 0.215121, acc.: 91.41%] [G loss: 3.076550]\n",
      "3237 [D loss: 0.188552, acc.: 92.97%] [G loss: 2.990323]\n",
      "3238 [D loss: 0.193314, acc.: 91.80%] [G loss: 2.798944]\n",
      "3239 [D loss: 0.193320, acc.: 91.80%] [G loss: 3.193762]\n",
      "3240 [D loss: 0.201491, acc.: 93.36%] [G loss: 3.039083]\n",
      "3241 [D loss: 0.205942, acc.: 91.02%] [G loss: 3.093987]\n",
      "3242 [D loss: 0.202974, acc.: 91.80%] [G loss: 2.993859]\n",
      "3243 [D loss: 0.231893, acc.: 91.80%] [G loss: 2.938241]\n",
      "3244 [D loss: 0.198274, acc.: 91.80%] [G loss: 2.970427]\n",
      "3245 [D loss: 0.202536, acc.: 91.80%] [G loss: 2.555490]\n",
      "3246 [D loss: 0.187190, acc.: 92.58%] [G loss: 2.763856]\n",
      "3247 [D loss: 0.212231, acc.: 91.02%] [G loss: 2.840871]\n",
      "3248 [D loss: 0.189617, acc.: 91.41%] [G loss: 3.266661]\n",
      "3249 [D loss: 0.197546, acc.: 91.41%] [G loss: 2.740757]\n",
      "3250 [D loss: 0.208551, acc.: 92.58%] [G loss: 2.831024]\n",
      "3251 [D loss: 0.209786, acc.: 91.41%] [G loss: 3.225010]\n",
      "3252 [D loss: 0.213079, acc.: 91.02%] [G loss: 3.139199]\n",
      "3253 [D loss: 0.206193, acc.: 91.80%] [G loss: 3.171345]\n",
      "3254 [D loss: 0.230017, acc.: 90.62%] [G loss: 3.219282]\n",
      "3255 [D loss: 0.199457, acc.: 91.80%] [G loss: 3.201149]\n",
      "3256 [D loss: 0.177816, acc.: 92.58%] [G loss: 3.039556]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "3257 [D loss: 0.196629, acc.: 91.02%] [G loss: 2.929859]\n",
      "3258 [D loss: 0.191403, acc.: 92.97%] [G loss: 3.193298]\n",
      "3259 [D loss: 0.212646, acc.: 91.80%] [G loss: 3.034886]\n",
      "3260 [D loss: 0.204439, acc.: 92.58%] [G loss: 3.157506]\n",
      "3261 [D loss: 0.204672, acc.: 92.58%] [G loss: 2.899711]\n",
      "3262 [D loss: 0.201665, acc.: 91.80%] [G loss: 2.930047]\n",
      "3263 [D loss: 0.214831, acc.: 91.02%] [G loss: 2.975755]\n",
      "3264 [D loss: 0.220546, acc.: 91.02%] [G loss: 3.258767]\n",
      "3265 [D loss: 0.230459, acc.: 91.41%] [G loss: 3.309653]\n",
      "3266 [D loss: 0.196356, acc.: 91.02%] [G loss: 2.952168]\n",
      "3267 [D loss: 0.192743, acc.: 91.80%] [G loss: 3.112851]\n",
      "3268 [D loss: 0.211525, acc.: 92.58%] [G loss: 2.925320]\n",
      "3269 [D loss: 0.192142, acc.: 92.58%] [G loss: 3.099318]\n",
      "3270 [D loss: 0.219773, acc.: 91.80%] [G loss: 3.163571]\n",
      "3271 [D loss: 0.187245, acc.: 92.97%] [G loss: 3.123157]\n",
      "3272 [D loss: 0.210249, acc.: 91.80%] [G loss: 2.916937]\n",
      "3273 [D loss: 0.212932, acc.: 91.80%] [G loss: 2.938893]\n",
      "3274 [D loss: 0.208253, acc.: 91.41%] [G loss: 3.117337]\n",
      "3275 [D loss: 0.191135, acc.: 92.19%] [G loss: 3.244518]\n",
      "3276 [D loss: 0.193665, acc.: 92.97%] [G loss: 2.856127]\n",
      "3277 [D loss: 0.192238, acc.: 92.58%] [G loss: 2.941055]\n",
      "3278 [D loss: 0.198561, acc.: 91.41%] [G loss: 3.033134]\n",
      "3279 [D loss: 0.176053, acc.: 93.36%] [G loss: 3.339779]\n",
      "3280 [D loss: 0.188787, acc.: 92.58%] [G loss: 3.013340]\n",
      "3281 [D loss: 0.197173, acc.: 92.58%] [G loss: 3.378928]\n",
      "3282 [D loss: 0.195334, acc.: 91.80%] [G loss: 2.921981]\n",
      "3283 [D loss: 0.208108, acc.: 91.80%] [G loss: 2.955873]\n",
      "3284 [D loss: 0.201425, acc.: 92.19%] [G loss: 3.394857]\n",
      "3285 [D loss: 0.219162, acc.: 91.80%] [G loss: 2.866231]\n",
      "3286 [D loss: 0.211555, acc.: 91.80%] [G loss: 3.009336]\n",
      "3287 [D loss: 0.198155, acc.: 92.97%] [G loss: 3.087295]\n",
      "3288 [D loss: 0.206054, acc.: 92.19%] [G loss: 2.922121]\n",
      "3289 [D loss: 0.222243, acc.: 91.41%] [G loss: 3.024463]\n",
      "3290 [D loss: 0.208124, acc.: 92.58%] [G loss: 3.117616]\n",
      "3291 [D loss: 0.197584, acc.: 92.58%] [G loss: 3.057198]\n",
      "3292 [D loss: 0.198316, acc.: 92.58%] [G loss: 3.007372]\n",
      "3293 [D loss: 0.178526, acc.: 92.19%] [G loss: 2.927270]\n",
      "3294 [D loss: 0.194614, acc.: 91.80%] [G loss: 2.991742]\n",
      "3295 [D loss: 0.217579, acc.: 91.41%] [G loss: 3.072197]\n",
      "3296 [D loss: 0.192649, acc.: 92.97%] [G loss: 3.092685]\n",
      "3297 [D loss: 0.207990, acc.: 91.41%] [G loss: 2.879680]\n",
      "3298 [D loss: 0.198760, acc.: 92.97%] [G loss: 2.808066]\n",
      "3299 [D loss: 0.208373, acc.: 92.58%] [G loss: 3.047040]\n",
      "3300 [D loss: 0.215760, acc.: 91.02%] [G loss: 3.094613]\n",
      "WARNING:tensorflow:Model was constructed with shape Tensor(\"input_7:0\", shape=(128, 32), dtype=float32) for input (128, 32), but it was re-called on a Tensor with incompatible shape (432, 32).\n",
      "generated_data\n",
      "3301 [D loss: 0.198417, acc.: 91.80%] [G loss: 3.122253]\n",
      "3302 [D loss: 0.202013, acc.: 91.41%] [G loss: 2.892803]\n",
      "3303 [D loss: 0.205470, acc.: 91.80%] [G loss: 2.975028]\n",
      "3304 [D loss: 0.182525, acc.: 92.58%] [G loss: 3.091636]\n",
      "3305 [D loss: 0.196126, acc.: 92.58%] [G loss: 3.104952]\n",
      "3306 [D loss: 0.206843, acc.: 92.58%] [G loss: 2.958069]\n",
      "3307 [D loss: 0.194579, acc.: 92.19%] [G loss: 3.008599]\n",
      "3308 [D loss: 0.197384, acc.: 92.58%] [G loss: 2.795636]\n",
      "3309 [D loss: 0.203866, acc.: 91.41%] [G loss: 3.011204]\n",
      "3310 [D loss: 0.210564, acc.: 91.80%] [G loss: 3.037354]\n",
      "3311 [D loss: 0.194008, acc.: 92.58%] [G loss: 3.006040]\n",
      "3312 [D loss: 0.188344, acc.: 92.58%] [G loss: 2.998630]\n",
      "3313 [D loss: 0.202010, acc.: 92.19%] [G loss: 2.973841]\n",
      "3314 [D loss: 0.191879, acc.: 92.58%] [G loss: 2.909631]\n",
      "3315 [D loss: 0.191473, acc.: 92.97%] [G loss: 2.947853]\n",
      "3316 [D loss: 0.197793, acc.: 91.41%] [G loss: 2.896430]\n",
      "3317 [D loss: 0.192831, acc.: 93.75%] [G loss: 3.130971]\n",
      "3318 [D loss: 0.209598, acc.: 91.41%] [G loss: 2.960120]\n",
      "3319 [D loss: 0.225475, acc.: 90.23%] [G loss: 3.129508]\n",
      "3320 [D loss: 0.204048, acc.: 92.19%] [G loss: 2.886268]\n",
      "3321 [D loss: 0.238957, acc.: 91.80%] [G loss: 2.795357]\n",
      "3322 [D loss: 0.203081, acc.: 92.19%] [G loss: 2.831785]\n",
      "3323 [D loss: 0.206011, acc.: 92.58%] [G loss: 2.981030]\n",
      "3324 [D loss: 0.190708, acc.: 92.19%] [G loss: 2.951387]\n",
      "3325 [D loss: 0.215153, acc.: 91.41%] [G loss: 2.979562]\n",
      "3326 [D loss: 0.213233, acc.: 91.41%] [G loss: 2.991414]\n",
      "3327 [D loss: 0.184875, acc.: 92.19%] [G loss: 3.121021]\n",
      "3328 [D loss: 0.213687, acc.: 91.02%] [G loss: 3.021797]\n",
      "3329 [D loss: 0.203709, acc.: 92.58%] [G loss: 3.160472]\n",
      "3330 [D loss: 0.194714, acc.: 92.19%] [G loss: 2.883749]\n",
      "3331 [D loss: 0.198562, acc.: 92.58%] [G loss: 3.010877]\n",
      "3332 [D loss: 0.229951, acc.: 90.62%] [G loss: 3.311559]\n",
      "3333 [D loss: 0.229789, acc.: 91.02%] [G loss: 3.080304]\n",
      "3334 [D loss: 0.200204, acc.: 92.19%] [G loss: 2.923597]\n",
      "3335 [D loss: 0.218662, acc.: 91.02%] [G loss: 2.807167]\n",
      "3336 [D loss: 0.203347, acc.: 91.80%] [G loss: 2.920905]\n",
      "3337 [D loss: 0.211022, acc.: 92.19%] [G loss: 3.039980]\n",
      "3338 [D loss: 0.192898, acc.: 91.80%] [G loss: 2.903780]\n",
      "3339 [D loss: 0.191114, acc.: 92.19%] [G loss: 3.041750]\n",
      "3340 [D loss: 0.204869, acc.: 91.41%] [G loss: 3.007298]\n",
      "3341 [D loss: 0.212222, acc.: 92.19%] [G loss: 2.966238]\n",
      "3342 [D loss: 0.185230, acc.: 92.58%] [G loss: 3.324728]\n",
      "3343 [D loss: 0.200449, acc.: 91.80%] [G loss: 2.981466]\n",
      "3344 [D loss: 0.195949, acc.: 91.80%] [G loss: 2.889493]\n",
      "3345 [D loss: 0.204766, acc.: 92.58%] [G loss: 3.070592]\n",
      "3346 [D loss: 0.183123, acc.: 92.58%] [G loss: 2.715237]\n",
      "3347 [D loss: 0.209370, acc.: 91.80%] [G loss: 3.019928]\n",
      "3348 [D loss: 0.210027, acc.: 92.97%] [G loss: 3.007371]\n",
      "3349 [D loss: 0.213426, acc.: 91.80%] [G loss: 3.056723]\n",
      "3350 [D loss: 0.208183, acc.: 92.19%] [G loss: 2.922110]\n",
      "3351 [D loss: 0.197991, acc.: 92.19%] [G loss: 3.000231]\n",
      "3352 [D loss: 0.191956, acc.: 92.19%] [G loss: 3.088244]\n",
      "3353 [D loss: 0.211232, acc.: 91.41%] [G loss: 3.574769]\n",
      "3354 [D loss: 0.182644, acc.: 92.58%] [G loss: 2.887468]\n",
      "3355 [D loss: 0.187565, acc.: 92.58%] [G loss: 3.112052]\n",
      "3356 [D loss: 0.219349, acc.: 91.41%] [G loss: 3.098372]\n",
      "3357 [D loss: 0.216494, acc.: 92.19%] [G loss: 2.954117]\n",
      "3358 [D loss: 0.196478, acc.: 92.19%] [G loss: 2.941448]\n",
      "3359 [D loss: 0.198134, acc.: 92.19%] [G loss: 3.160239]\n",
      "3360 [D loss: 0.186072, acc.: 92.19%] [G loss: 2.934322]\n",
      "3361 [D loss: 0.186456, acc.: 92.19%] [G loss: 2.920803]\n",
      "3362 [D loss: 0.210677, acc.: 91.80%] [G loss: 2.910124]\n",
      "3363 [D loss: 0.196529, acc.: 92.58%] [G loss: 2.989993]\n",
      "3364 [D loss: 0.206649, acc.: 91.80%] [G loss: 3.239728]\n",
      "3365 [D loss: 0.211246, acc.: 91.80%] [G loss: 3.164928]\n",
      "3366 [D loss: 0.197684, acc.: 91.80%] [G loss: 3.126195]\n",
      "3367 [D loss: 0.202975, acc.: 91.41%] [G loss: 3.120183]\n",
      "3368 [D loss: 0.198744, acc.: 92.58%] [G loss: 3.118767]\n",
      "3369 [D loss: 0.175694, acc.: 92.97%] [G loss: 2.982869]\n",
      "3370 [D loss: 0.191433, acc.: 92.19%] [G loss: 3.056800]\n",
      "3371 [D loss: 0.196829, acc.: 93.75%] [G loss: 2.963388]\n",
      "3372 [D loss: 0.198253, acc.: 92.58%] [G loss: 2.817738]\n",
      "3373 [D loss: 0.191871, acc.: 92.19%] [G loss: 2.905766]\n",
      "3374 [D loss: 0.208965, acc.: 91.02%] [G loss: 3.200142]\n",
      "3375 [D loss: 0.196958, acc.: 91.80%] [G loss: 2.928086]\n",
      "3376 [D loss: 0.200799, acc.: 91.80%] [G loss: 3.117276]\n",
      "3377 [D loss: 0.203775, acc.: 92.58%] [G loss: 3.123412]\n",
      "3378 [D loss: 0.190524, acc.: 92.58%] [G loss: 3.176934]\n",
      "3379 [D loss: 0.218167, acc.: 91.80%] [G loss: 3.242819]\n",
      "3380 [D loss: 0.205937, acc.: 92.19%] [G loss: 3.188685]\n",
      "3381 [D loss: 0.191768, acc.: 93.75%] [G loss: 2.806766]\n",
      "3382 [D loss: 0.214676, acc.: 91.80%] [G loss: 3.168149]\n",
      "3383 [D loss: 0.194671, acc.: 91.80%] [G loss: 3.301708]\n",
      "3384 [D loss: 0.200904, acc.: 92.58%] [G loss: 3.018378]\n",
      "3385 [D loss: 0.189953, acc.: 92.58%] [G loss: 3.060150]\n",
      "3386 [D loss: 0.187366, acc.: 92.97%] [G loss: 2.885740]\n",
      "3387 [D loss: 0.181489, acc.: 92.19%] [G loss: 3.152368]\n",
      "3388 [D loss: 0.181008, acc.: 92.58%] [G loss: 3.265190]\n",
      "3389 [D loss: 0.158228, acc.: 92.97%] [G loss: 3.186686]\n",
      "3390 [D loss: 0.173948, acc.: 92.97%] [G loss: 2.911196]\n",
      "3391 [D loss: 0.191568, acc.: 91.80%] [G loss: 3.301062]\n",
      "3392 [D loss: 0.192166, acc.: 92.58%] [G loss: 3.189749]\n",
      "3393 [D loss: 0.169196, acc.: 92.97%] [G loss: 3.139794]\n",
      "3394 [D loss: 0.197153, acc.: 91.41%] [G loss: 3.401431]\n",
      "3395 [D loss: 0.188881, acc.: 92.58%] [G loss: 2.893151]\n",
      "3396 [D loss: 0.199775, acc.: 92.19%] [G loss: 3.114676]\n",
      "3397 [D loss: 0.198977, acc.: 93.36%] [G loss: 3.212744]\n",
      "3398 [D loss: 0.201964, acc.: 91.41%] [G loss: 3.519244]\n",
      "3399 [D loss: 0.185723, acc.: 91.80%] [G loss: 3.168905]\n",
      "3400 [D loss: 0.198142, acc.: 91.02%] [G loss: 3.248284]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "WARNING:tensorflow:Model was constructed with shape Tensor(\"input_7:0\", shape=(128, 32), dtype=float32) for input (128, 32), but it was re-called on a Tensor with incompatible shape (432, 32).\n",
      "generated_data\n",
      "3401 [D loss: 0.201262, acc.: 91.02%] [G loss: 3.260374]\n",
      "3402 [D loss: 0.212069, acc.: 91.80%] [G loss: 3.073696]\n",
      "3403 [D loss: 0.192697, acc.: 92.19%] [G loss: 3.132722]\n",
      "3404 [D loss: 0.212777, acc.: 91.41%] [G loss: 3.225975]\n",
      "3405 [D loss: 0.221844, acc.: 90.62%] [G loss: 3.243871]\n",
      "3406 [D loss: 0.215632, acc.: 91.80%] [G loss: 2.945268]\n",
      "3407 [D loss: 0.201389, acc.: 91.80%] [G loss: 2.915151]\n",
      "3408 [D loss: 0.219896, acc.: 91.02%] [G loss: 2.855424]\n",
      "3409 [D loss: 0.183922, acc.: 92.97%] [G loss: 2.957535]\n",
      "3410 [D loss: 0.209037, acc.: 92.19%] [G loss: 2.811974]\n",
      "3411 [D loss: 0.182956, acc.: 91.80%] [G loss: 3.134614]\n",
      "3412 [D loss: 0.198613, acc.: 92.58%] [G loss: 3.052625]\n",
      "3413 [D loss: 0.189322, acc.: 92.58%] [G loss: 2.924185]\n",
      "3414 [D loss: 0.195103, acc.: 91.80%] [G loss: 3.201572]\n",
      "3415 [D loss: 0.188995, acc.: 92.19%] [G loss: 3.179905]\n",
      "3416 [D loss: 0.196609, acc.: 92.97%] [G loss: 2.943358]\n",
      "3417 [D loss: 0.192694, acc.: 91.02%] [G loss: 3.246084]\n",
      "3418 [D loss: 0.189908, acc.: 91.80%] [G loss: 2.894509]\n",
      "3419 [D loss: 0.216379, acc.: 89.84%] [G loss: 3.058635]\n",
      "3420 [D loss: 0.209232, acc.: 92.97%] [G loss: 3.032042]\n",
      "3421 [D loss: 0.201252, acc.: 92.58%] [G loss: 2.723884]\n",
      "3422 [D loss: 0.219806, acc.: 92.58%] [G loss: 2.656429]\n",
      "3423 [D loss: 0.213319, acc.: 89.45%] [G loss: 2.964379]\n",
      "3424 [D loss: 0.199436, acc.: 92.97%] [G loss: 3.248388]\n",
      "3425 [D loss: 0.194067, acc.: 91.41%] [G loss: 3.049736]\n",
      "3426 [D loss: 0.207853, acc.: 92.58%] [G loss: 3.095451]\n",
      "3427 [D loss: 0.196231, acc.: 92.97%] [G loss: 2.964707]\n",
      "3428 [D loss: 0.184285, acc.: 92.58%] [G loss: 3.143070]\n",
      "3429 [D loss: 0.201119, acc.: 92.19%] [G loss: 2.826663]\n",
      "3430 [D loss: 0.184209, acc.: 92.97%] [G loss: 2.915344]\n",
      "3431 [D loss: 0.215613, acc.: 91.80%] [G loss: 3.026017]\n",
      "3432 [D loss: 0.196699, acc.: 91.80%] [G loss: 3.192910]\n",
      "3433 [D loss: 0.205260, acc.: 92.58%] [G loss: 3.272176]\n",
      "3434 [D loss: 0.201585, acc.: 92.19%] [G loss: 3.073621]\n",
      "3435 [D loss: 0.186632, acc.: 92.19%] [G loss: 2.975452]\n",
      "3436 [D loss: 0.190493, acc.: 92.19%] [G loss: 3.177698]\n",
      "3437 [D loss: 0.194775, acc.: 91.80%] [G loss: 3.340140]\n",
      "3438 [D loss: 0.200397, acc.: 91.80%] [G loss: 3.176379]\n",
      "3439 [D loss: 0.216291, acc.: 91.80%] [G loss: 3.072042]\n",
      "3440 [D loss: 0.188934, acc.: 92.19%] [G loss: 3.135561]\n",
      "3441 [D loss: 0.203054, acc.: 91.41%] [G loss: 3.070589]\n",
      "3442 [D loss: 0.173837, acc.: 92.19%] [G loss: 2.969627]\n",
      "3443 [D loss: 0.197278, acc.: 92.97%] [G loss: 2.906105]\n",
      "3444 [D loss: 0.192878, acc.: 92.19%] [G loss: 2.810921]\n",
      "3445 [D loss: 0.195297, acc.: 92.58%] [G loss: 3.119924]\n",
      "3446 [D loss: 0.216744, acc.: 91.80%] [G loss: 3.246197]\n",
      "3447 [D loss: 0.209488, acc.: 91.80%] [G loss: 3.291684]\n",
      "3448 [D loss: 0.201026, acc.: 92.19%] [G loss: 2.988166]\n",
      "3449 [D loss: 0.215317, acc.: 91.80%] [G loss: 2.748341]\n",
      "3450 [D loss: 0.205325, acc.: 91.41%] [G loss: 2.980293]\n",
      "3451 [D loss: 0.201711, acc.: 91.80%] [G loss: 3.143270]\n",
      "3452 [D loss: 0.203104, acc.: 92.19%] [G loss: 3.136564]\n",
      "3453 [D loss: 0.207551, acc.: 92.19%] [G loss: 3.370853]\n",
      "3454 [D loss: 0.215075, acc.: 92.19%] [G loss: 2.917487]\n",
      "3455 [D loss: 0.203996, acc.: 91.02%] [G loss: 2.855606]\n",
      "3456 [D loss: 0.213564, acc.: 92.19%] [G loss: 3.134712]\n",
      "3457 [D loss: 0.205671, acc.: 91.80%] [G loss: 3.112979]\n",
      "3458 [D loss: 0.199173, acc.: 92.19%] [G loss: 3.118927]\n",
      "3459 [D loss: 0.199190, acc.: 92.58%] [G loss: 2.944075]\n",
      "3460 [D loss: 0.233041, acc.: 91.80%] [G loss: 2.936666]\n",
      "3461 [D loss: 0.214963, acc.: 92.58%] [G loss: 3.203675]\n",
      "3462 [D loss: 0.200146, acc.: 92.19%] [G loss: 2.896147]\n",
      "3463 [D loss: 0.183461, acc.: 92.19%] [G loss: 3.030116]\n",
      "3464 [D loss: 0.200141, acc.: 91.80%] [G loss: 2.782035]\n",
      "3465 [D loss: 0.187592, acc.: 92.19%] [G loss: 3.224976]\n",
      "3466 [D loss: 0.203504, acc.: 91.41%] [G loss: 3.015342]\n",
      "3467 [D loss: 0.190965, acc.: 92.19%] [G loss: 3.171781]\n",
      "3468 [D loss: 0.185783, acc.: 92.58%] [G loss: 2.922322]\n",
      "3469 [D loss: 0.216228, acc.: 91.80%] [G loss: 3.244357]\n",
      "3470 [D loss: 0.209429, acc.: 91.41%] [G loss: 3.449107]\n",
      "3471 [D loss: 0.215674, acc.: 91.41%] [G loss: 2.861571]\n",
      "3472 [D loss: 0.189114, acc.: 92.19%] [G loss: 2.931036]\n",
      "3473 [D loss: 0.190721, acc.: 92.58%] [G loss: 2.904549]\n",
      "3474 [D loss: 0.189255, acc.: 92.19%] [G loss: 3.497961]\n",
      "3475 [D loss: 0.178364, acc.: 92.58%] [G loss: 3.281933]\n",
      "3476 [D loss: 0.194496, acc.: 91.80%] [G loss: 3.253917]\n",
      "3477 [D loss: 0.213321, acc.: 92.58%] [G loss: 2.992939]\n",
      "3478 [D loss: 0.191656, acc.: 92.58%] [G loss: 3.181036]\n",
      "3479 [D loss: 0.208112, acc.: 92.58%] [G loss: 3.191061]\n",
      "3480 [D loss: 0.188398, acc.: 92.19%] [G loss: 3.002871]\n",
      "3481 [D loss: 0.186065, acc.: 92.19%] [G loss: 3.120215]\n",
      "3482 [D loss: 0.193243, acc.: 92.58%] [G loss: 3.137767]\n",
      "3483 [D loss: 0.193493, acc.: 92.58%] [G loss: 3.074880]\n",
      "3484 [D loss: 0.185709, acc.: 92.97%] [G loss: 3.113209]\n",
      "3485 [D loss: 0.207841, acc.: 92.19%] [G loss: 3.151277]\n",
      "3486 [D loss: 0.214004, acc.: 91.02%] [G loss: 2.934738]\n",
      "3487 [D loss: 0.175937, acc.: 92.97%] [G loss: 3.309455]\n",
      "3488 [D loss: 0.186040, acc.: 92.58%] [G loss: 3.100796]\n",
      "3489 [D loss: 0.179610, acc.: 93.36%] [G loss: 3.097668]\n",
      "3490 [D loss: 0.208922, acc.: 92.19%] [G loss: 3.026612]\n",
      "3491 [D loss: 0.198651, acc.: 92.19%] [G loss: 2.900723]\n",
      "3492 [D loss: 0.189972, acc.: 92.58%] [G loss: 3.233380]\n",
      "3493 [D loss: 0.176737, acc.: 92.19%] [G loss: 3.117779]\n",
      "3494 [D loss: 0.200020, acc.: 91.41%] [G loss: 3.210142]\n",
      "3495 [D loss: 0.198426, acc.: 91.80%] [G loss: 2.994124]\n",
      "3496 [D loss: 0.208208, acc.: 91.80%] [G loss: 3.093256]\n",
      "3497 [D loss: 0.199167, acc.: 91.80%] [G loss: 3.224130]\n",
      "3498 [D loss: 0.217202, acc.: 92.19%] [G loss: 3.043006]\n",
      "3499 [D loss: 0.231499, acc.: 91.41%] [G loss: 2.797078]\n",
      "3500 [D loss: 0.204982, acc.: 92.58%] [G loss: 2.790545]\n",
      "WARNING:tensorflow:Model was constructed with shape Tensor(\"input_7:0\", shape=(128, 32), dtype=float32) for input (128, 32), but it was re-called on a Tensor with incompatible shape (432, 32).\n",
      "generated_data\n",
      "3501 [D loss: 0.193468, acc.: 92.19%] [G loss: 2.831816]\n",
      "3502 [D loss: 0.189866, acc.: 92.19%] [G loss: 3.099766]\n",
      "3503 [D loss: 0.199397, acc.: 91.80%] [G loss: 3.093302]\n",
      "3504 [D loss: 0.206424, acc.: 91.41%] [G loss: 2.921210]\n",
      "3505 [D loss: 0.207614, acc.: 91.02%] [G loss: 3.231030]\n",
      "3506 [D loss: 0.194128, acc.: 92.97%] [G loss: 3.151249]\n",
      "3507 [D loss: 0.211475, acc.: 91.41%] [G loss: 2.981644]\n",
      "3508 [D loss: 0.178427, acc.: 92.58%] [G loss: 3.139133]\n",
      "3509 [D loss: 0.191464, acc.: 91.80%] [G loss: 2.939284]\n",
      "3510 [D loss: 0.195770, acc.: 92.19%] [G loss: 3.343750]\n",
      "3511 [D loss: 0.190109, acc.: 91.80%] [G loss: 3.077592]\n",
      "3512 [D loss: 0.189738, acc.: 92.19%] [G loss: 3.257996]\n",
      "3513 [D loss: 0.194617, acc.: 92.58%] [G loss: 2.852110]\n",
      "3514 [D loss: 0.208646, acc.: 90.62%] [G loss: 2.938725]\n",
      "3515 [D loss: 0.166840, acc.: 92.19%] [G loss: 2.987805]\n",
      "3516 [D loss: 0.197473, acc.: 92.58%] [G loss: 3.226385]\n",
      "3517 [D loss: 0.172964, acc.: 92.97%] [G loss: 3.608521]\n",
      "3518 [D loss: 0.202311, acc.: 91.80%] [G loss: 3.200174]\n",
      "3519 [D loss: 0.173519, acc.: 92.58%] [G loss: 3.089559]\n",
      "3520 [D loss: 0.220220, acc.: 91.02%] [G loss: 3.063233]\n",
      "3521 [D loss: 0.184184, acc.: 91.80%] [G loss: 3.490806]\n",
      "3522 [D loss: 0.181503, acc.: 91.41%] [G loss: 3.135525]\n",
      "3523 [D loss: 0.168191, acc.: 93.36%] [G loss: 3.047102]\n",
      "3524 [D loss: 0.208946, acc.: 91.41%] [G loss: 2.823761]\n",
      "3525 [D loss: 0.195878, acc.: 92.19%] [G loss: 3.119070]\n",
      "3526 [D loss: 0.205595, acc.: 92.19%] [G loss: 3.035775]\n",
      "3527 [D loss: 0.181146, acc.: 92.58%] [G loss: 3.162993]\n",
      "3528 [D loss: 0.188412, acc.: 92.19%] [G loss: 3.357752]\n",
      "3529 [D loss: 0.197625, acc.: 91.80%] [G loss: 2.857169]\n",
      "3530 [D loss: 0.204975, acc.: 91.41%] [G loss: 2.960871]\n",
      "3531 [D loss: 0.190548, acc.: 91.02%] [G loss: 2.971565]\n",
      "3532 [D loss: 0.196384, acc.: 92.97%] [G loss: 3.006073]\n",
      "3533 [D loss: 0.199201, acc.: 92.19%] [G loss: 3.018535]\n",
      "3534 [D loss: 0.229644, acc.: 91.80%] [G loss: 3.011403]\n",
      "3535 [D loss: 0.212654, acc.: 91.02%] [G loss: 2.851331]\n",
      "3536 [D loss: 0.216895, acc.: 91.41%] [G loss: 3.382450]\n",
      "3537 [D loss: 0.185733, acc.: 92.19%] [G loss: 3.156780]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "3538 [D loss: 0.170040, acc.: 92.58%] [G loss: 3.020536]\n",
      "3539 [D loss: 0.184862, acc.: 92.97%] [G loss: 3.050853]\n",
      "3540 [D loss: 0.192166, acc.: 92.97%] [G loss: 2.931714]\n",
      "3541 [D loss: 0.201441, acc.: 91.80%] [G loss: 3.205993]\n",
      "3542 [D loss: 0.179526, acc.: 92.97%] [G loss: 3.265146]\n",
      "3543 [D loss: 0.200583, acc.: 91.41%] [G loss: 2.995341]\n",
      "3544 [D loss: 0.200466, acc.: 92.19%] [G loss: 3.152292]\n",
      "3545 [D loss: 0.179411, acc.: 92.97%] [G loss: 2.990067]\n",
      "3546 [D loss: 0.188668, acc.: 92.19%] [G loss: 3.315190]\n",
      "3547 [D loss: 0.190114, acc.: 91.80%] [G loss: 2.835218]\n",
      "3548 [D loss: 0.194511, acc.: 92.58%] [G loss: 2.858169]\n",
      "3549 [D loss: 0.188538, acc.: 92.58%] [G loss: 3.146778]\n",
      "3550 [D loss: 0.219297, acc.: 91.80%] [G loss: 2.890516]\n",
      "3551 [D loss: 0.211955, acc.: 92.19%] [G loss: 3.161516]\n",
      "3552 [D loss: 0.203516, acc.: 92.58%] [G loss: 3.098177]\n",
      "3553 [D loss: 0.207982, acc.: 92.58%] [G loss: 2.902529]\n",
      "3554 [D loss: 0.190227, acc.: 92.58%] [G loss: 3.370212]\n",
      "3555 [D loss: 0.212077, acc.: 91.80%] [G loss: 3.057621]\n",
      "3556 [D loss: 0.221866, acc.: 90.23%] [G loss: 2.940908]\n",
      "3557 [D loss: 0.226580, acc.: 91.80%] [G loss: 3.181070]\n",
      "3558 [D loss: 0.191227, acc.: 92.19%] [G loss: 3.144895]\n",
      "3559 [D loss: 0.201032, acc.: 91.80%] [G loss: 3.067522]\n",
      "3560 [D loss: 0.195326, acc.: 91.80%] [G loss: 3.283276]\n",
      "3561 [D loss: 0.192560, acc.: 92.19%] [G loss: 3.184628]\n",
      "3562 [D loss: 0.213800, acc.: 92.58%] [G loss: 3.027644]\n",
      "3563 [D loss: 0.191364, acc.: 92.97%] [G loss: 3.108925]\n",
      "3564 [D loss: 0.203169, acc.: 91.41%] [G loss: 2.970960]\n",
      "3565 [D loss: 0.202785, acc.: 90.23%] [G loss: 3.044479]\n",
      "3566 [D loss: 0.179868, acc.: 92.58%] [G loss: 2.950520]\n",
      "3567 [D loss: 0.198287, acc.: 92.19%] [G loss: 2.895355]\n",
      "3568 [D loss: 0.197883, acc.: 91.80%] [G loss: 3.162043]\n",
      "3569 [D loss: 0.211097, acc.: 92.58%] [G loss: 3.031158]\n",
      "3570 [D loss: 0.201919, acc.: 92.97%] [G loss: 3.019697]\n",
      "3571 [D loss: 0.183096, acc.: 92.58%] [G loss: 2.889834]\n",
      "3572 [D loss: 0.202660, acc.: 92.19%] [G loss: 3.032612]\n",
      "3573 [D loss: 0.195323, acc.: 92.58%] [G loss: 3.157451]\n",
      "3574 [D loss: 0.205938, acc.: 92.19%] [G loss: 3.271686]\n",
      "3575 [D loss: 0.195672, acc.: 91.80%] [G loss: 3.188068]\n",
      "3576 [D loss: 0.207431, acc.: 92.19%] [G loss: 3.043745]\n",
      "3577 [D loss: 0.187808, acc.: 92.97%] [G loss: 3.089377]\n",
      "3578 [D loss: 0.210870, acc.: 91.02%] [G loss: 3.168142]\n",
      "3579 [D loss: 0.196204, acc.: 92.19%] [G loss: 2.790591]\n",
      "3580 [D loss: 0.197457, acc.: 92.19%] [G loss: 3.004412]\n",
      "3581 [D loss: 0.200913, acc.: 92.19%] [G loss: 3.373469]\n",
      "3582 [D loss: 0.192370, acc.: 91.41%] [G loss: 3.120114]\n",
      "3583 [D loss: 0.167875, acc.: 91.80%] [G loss: 3.260299]\n",
      "3584 [D loss: 0.192715, acc.: 91.41%] [G loss: 3.216457]\n",
      "3585 [D loss: 0.181382, acc.: 92.19%] [G loss: 3.610395]\n",
      "3586 [D loss: 0.209240, acc.: 92.19%] [G loss: 3.049890]\n",
      "3587 [D loss: 0.208084, acc.: 91.02%] [G loss: 3.503679]\n",
      "3588 [D loss: 0.208209, acc.: 92.19%] [G loss: 3.048788]\n",
      "3589 [D loss: 0.180880, acc.: 92.19%] [G loss: 3.453132]\n",
      "3590 [D loss: 0.186477, acc.: 92.19%] [G loss: 2.940517]\n",
      "3591 [D loss: 0.180086, acc.: 92.58%] [G loss: 3.202589]\n",
      "3592 [D loss: 0.176902, acc.: 93.36%] [G loss: 2.902822]\n",
      "3593 [D loss: 0.189241, acc.: 91.80%] [G loss: 3.336101]\n",
      "3594 [D loss: 0.173298, acc.: 92.97%] [G loss: 3.127156]\n",
      "3595 [D loss: 0.189489, acc.: 92.19%] [G loss: 3.224763]\n",
      "3596 [D loss: 0.189803, acc.: 92.19%] [G loss: 3.174723]\n",
      "3597 [D loss: 0.237188, acc.: 91.02%] [G loss: 3.598672]\n",
      "3598 [D loss: 0.201080, acc.: 91.41%] [G loss: 3.479025]\n",
      "3599 [D loss: 0.167024, acc.: 92.97%] [G loss: 3.226456]\n",
      "3600 [D loss: 0.197466, acc.: 91.41%] [G loss: 2.818436]\n",
      "WARNING:tensorflow:Model was constructed with shape Tensor(\"input_7:0\", shape=(128, 32), dtype=float32) for input (128, 32), but it was re-called on a Tensor with incompatible shape (432, 32).\n",
      "generated_data\n",
      "3601 [D loss: 0.184381, acc.: 91.02%] [G loss: 3.464813]\n",
      "3602 [D loss: 0.199448, acc.: 91.80%] [G loss: 3.347008]\n",
      "3603 [D loss: 0.200095, acc.: 91.80%] [G loss: 3.194414]\n",
      "3604 [D loss: 0.196933, acc.: 92.19%] [G loss: 3.076357]\n",
      "3605 [D loss: 0.199779, acc.: 92.58%] [G loss: 3.141545]\n",
      "3606 [D loss: 0.196465, acc.: 91.80%] [G loss: 2.888970]\n",
      "3607 [D loss: 0.205609, acc.: 91.41%] [G loss: 3.288280]\n",
      "3608 [D loss: 0.200631, acc.: 92.19%] [G loss: 3.182127]\n",
      "3609 [D loss: 0.221582, acc.: 91.02%] [G loss: 3.181357]\n",
      "3610 [D loss: 0.199070, acc.: 92.19%] [G loss: 3.224610]\n",
      "3611 [D loss: 0.228503, acc.: 91.02%] [G loss: 2.843901]\n",
      "3612 [D loss: 0.201339, acc.: 92.19%] [G loss: 3.121566]\n",
      "3613 [D loss: 0.214150, acc.: 91.80%] [G loss: 3.041756]\n",
      "3614 [D loss: 0.203053, acc.: 92.97%] [G loss: 2.961996]\n",
      "3615 [D loss: 0.233595, acc.: 91.41%] [G loss: 2.770485]\n",
      "3616 [D loss: 0.246698, acc.: 88.28%] [G loss: 3.432892]\n",
      "3617 [D loss: 0.211544, acc.: 91.80%] [G loss: 3.098028]\n",
      "3618 [D loss: 0.202198, acc.: 91.80%] [G loss: 2.932091]\n",
      "3619 [D loss: 0.205691, acc.: 91.80%] [G loss: 3.130148]\n",
      "3620 [D loss: 0.192564, acc.: 92.97%] [G loss: 2.876480]\n",
      "3621 [D loss: 0.204208, acc.: 92.58%] [G loss: 3.028322]\n",
      "3622 [D loss: 0.197115, acc.: 92.58%] [G loss: 3.130045]\n",
      "3623 [D loss: 0.195988, acc.: 91.41%] [G loss: 2.866082]\n",
      "3624 [D loss: 0.214194, acc.: 91.80%] [G loss: 2.962185]\n",
      "3625 [D loss: 0.219372, acc.: 92.19%] [G loss: 2.857924]\n",
      "3626 [D loss: 0.200387, acc.: 91.80%] [G loss: 3.357028]\n",
      "3627 [D loss: 0.218735, acc.: 92.58%] [G loss: 3.117993]\n",
      "3628 [D loss: 0.196517, acc.: 91.02%] [G loss: 3.374128]\n",
      "3629 [D loss: 0.201711, acc.: 91.80%] [G loss: 2.960549]\n",
      "3630 [D loss: 0.199131, acc.: 92.58%] [G loss: 2.824545]\n",
      "3631 [D loss: 0.216753, acc.: 91.41%] [G loss: 2.854293]\n",
      "3632 [D loss: 0.195236, acc.: 92.19%] [G loss: 3.029830]\n",
      "3633 [D loss: 0.223215, acc.: 91.80%] [G loss: 2.742242]\n",
      "3634 [D loss: 0.224547, acc.: 92.58%] [G loss: 2.981162]\n",
      "3635 [D loss: 0.194627, acc.: 92.58%] [G loss: 3.082248]\n",
      "3636 [D loss: 0.208285, acc.: 92.19%] [G loss: 3.087204]\n",
      "3637 [D loss: 0.185008, acc.: 92.58%] [G loss: 3.067237]\n",
      "3638 [D loss: 0.185299, acc.: 92.19%] [G loss: 2.847761]\n",
      "3639 [D loss: 0.202633, acc.: 91.80%] [G loss: 3.063589]\n",
      "3640 [D loss: 0.186175, acc.: 92.97%] [G loss: 2.959017]\n",
      "3641 [D loss: 0.172151, acc.: 93.36%] [G loss: 3.338388]\n",
      "3642 [D loss: 0.180356, acc.: 92.58%] [G loss: 3.159700]\n",
      "3643 [D loss: 0.175998, acc.: 92.97%] [G loss: 3.405688]\n",
      "3644 [D loss: 0.188621, acc.: 92.19%] [G loss: 3.128307]\n",
      "3645 [D loss: 0.180048, acc.: 92.97%] [G loss: 3.212283]\n",
      "3646 [D loss: 0.168712, acc.: 92.97%] [G loss: 2.926490]\n",
      "3647 [D loss: 0.221510, acc.: 91.02%] [G loss: 3.137908]\n",
      "3648 [D loss: 0.206900, acc.: 91.80%] [G loss: 3.188886]\n",
      "3649 [D loss: 0.213449, acc.: 91.80%] [G loss: 3.144286]\n",
      "3650 [D loss: 0.198446, acc.: 91.80%] [G loss: 3.050588]\n",
      "3651 [D loss: 0.219029, acc.: 91.80%] [G loss: 2.878979]\n",
      "3652 [D loss: 0.208629, acc.: 92.97%] [G loss: 2.924861]\n",
      "3653 [D loss: 0.198206, acc.: 92.19%] [G loss: 3.185179]\n",
      "3654 [D loss: 0.192738, acc.: 92.19%] [G loss: 2.999171]\n",
      "3655 [D loss: 0.212726, acc.: 91.80%] [G loss: 2.808913]\n",
      "3656 [D loss: 0.191772, acc.: 91.41%] [G loss: 3.098079]\n",
      "3657 [D loss: 0.195154, acc.: 92.58%] [G loss: 3.129415]\n",
      "3658 [D loss: 0.216726, acc.: 92.19%] [G loss: 2.969563]\n",
      "3659 [D loss: 0.188130, acc.: 92.97%] [G loss: 3.147138]\n",
      "3660 [D loss: 0.193386, acc.: 92.97%] [G loss: 3.106749]\n",
      "3661 [D loss: 0.208571, acc.: 91.80%] [G loss: 3.608382]\n",
      "3662 [D loss: 0.223038, acc.: 91.80%] [G loss: 2.880709]\n",
      "3663 [D loss: 0.216775, acc.: 92.19%] [G loss: 3.143271]\n",
      "3664 [D loss: 0.226937, acc.: 89.84%] [G loss: 3.175573]\n",
      "3665 [D loss: 0.194134, acc.: 92.97%] [G loss: 3.192200]\n",
      "3666 [D loss: 0.219120, acc.: 91.80%] [G loss: 3.104801]\n",
      "3667 [D loss: 0.204580, acc.: 92.19%] [G loss: 2.834350]\n",
      "3668 [D loss: 0.189812, acc.: 92.58%] [G loss: 3.299996]\n",
      "3669 [D loss: 0.181453, acc.: 92.58%] [G loss: 2.943660]\n",
      "3670 [D loss: 0.193002, acc.: 92.19%] [G loss: 2.994466]\n",
      "3671 [D loss: 0.204265, acc.: 92.58%] [G loss: 2.999755]\n",
      "3672 [D loss: 0.201065, acc.: 92.58%] [G loss: 2.888103]\n",
      "3673 [D loss: 0.176580, acc.: 92.58%] [G loss: 2.937600]\n",
      "3674 [D loss: 0.182486, acc.: 92.58%] [G loss: 2.991745]\n",
      "3675 [D loss: 0.177447, acc.: 92.58%] [G loss: 2.965238]\n",
      "3676 [D loss: 0.197541, acc.: 92.19%] [G loss: 3.294274]\n",
      "3677 [D loss: 0.201594, acc.: 91.41%] [G loss: 2.803769]\n",
      "3678 [D loss: 0.218109, acc.: 91.41%] [G loss: 3.131619]\n",
      "3679 [D loss: 0.201131, acc.: 91.80%] [G loss: 3.041453]\n",
      "3680 [D loss: 0.201858, acc.: 91.80%] [G loss: 3.040981]\n",
      "3681 [D loss: 0.202474, acc.: 92.19%] [G loss: 2.913606]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "3682 [D loss: 0.207455, acc.: 92.58%] [G loss: 2.997825]\n",
      "3683 [D loss: 0.170648, acc.: 92.97%] [G loss: 3.022218]\n",
      "3684 [D loss: 0.199654, acc.: 91.80%] [G loss: 2.883525]\n",
      "3685 [D loss: 0.182481, acc.: 92.97%] [G loss: 3.410897]\n",
      "3686 [D loss: 0.176130, acc.: 92.97%] [G loss: 3.047462]\n",
      "3687 [D loss: 0.148854, acc.: 93.36%] [G loss: 3.429088]\n",
      "3688 [D loss: 0.185254, acc.: 92.97%] [G loss: 3.179977]\n",
      "3689 [D loss: 0.165324, acc.: 92.58%] [G loss: 3.277752]\n",
      "3690 [D loss: 0.204794, acc.: 92.58%] [G loss: 2.895177]\n",
      "3691 [D loss: 0.181475, acc.: 92.97%] [G loss: 3.064578]\n",
      "3692 [D loss: 0.182028, acc.: 92.97%] [G loss: 3.336381]\n",
      "3693 [D loss: 0.191548, acc.: 91.80%] [G loss: 3.264523]\n",
      "3694 [D loss: 0.179496, acc.: 92.58%] [G loss: 3.199198]\n",
      "3695 [D loss: 0.202985, acc.: 91.80%] [G loss: 3.161003]\n",
      "3696 [D loss: 0.226864, acc.: 91.80%] [G loss: 3.167849]\n",
      "3697 [D loss: 0.207883, acc.: 91.80%] [G loss: 2.846569]\n",
      "3698 [D loss: 0.215605, acc.: 91.80%] [G loss: 3.086550]\n",
      "3699 [D loss: 0.193452, acc.: 92.97%] [G loss: 3.169277]\n",
      "3700 [D loss: 0.196671, acc.: 92.58%] [G loss: 3.211568]\n",
      "WARNING:tensorflow:Model was constructed with shape Tensor(\"input_7:0\", shape=(128, 32), dtype=float32) for input (128, 32), but it was re-called on a Tensor with incompatible shape (432, 32).\n",
      "generated_data\n",
      "3701 [D loss: 0.176000, acc.: 93.36%] [G loss: 3.132678]\n",
      "3702 [D loss: 0.209417, acc.: 91.41%] [G loss: 3.117644]\n",
      "3703 [D loss: 0.185730, acc.: 93.36%] [G loss: 3.207635]\n",
      "3704 [D loss: 0.187346, acc.: 93.36%] [G loss: 3.411937]\n",
      "3705 [D loss: 0.214001, acc.: 91.80%] [G loss: 3.244362]\n",
      "3706 [D loss: 0.163884, acc.: 93.36%] [G loss: 3.486406]\n",
      "3707 [D loss: 0.210872, acc.: 91.02%] [G loss: 3.329762]\n",
      "3708 [D loss: 0.208860, acc.: 92.58%] [G loss: 3.173405]\n",
      "3709 [D loss: 0.211480, acc.: 92.19%] [G loss: 3.040792]\n",
      "3710 [D loss: 0.174299, acc.: 92.58%] [G loss: 3.222790]\n",
      "3711 [D loss: 0.217081, acc.: 91.02%] [G loss: 3.495262]\n",
      "3712 [D loss: 0.208155, acc.: 91.80%] [G loss: 3.201802]\n",
      "3713 [D loss: 0.173702, acc.: 92.58%] [G loss: 3.278797]\n",
      "3714 [D loss: 0.198966, acc.: 91.80%] [G loss: 2.926030]\n",
      "3715 [D loss: 0.218438, acc.: 92.58%] [G loss: 3.144533]\n",
      "3716 [D loss: 0.197788, acc.: 92.58%] [G loss: 2.873520]\n",
      "3717 [D loss: 0.201076, acc.: 92.19%] [G loss: 3.109062]\n",
      "3718 [D loss: 0.200694, acc.: 92.58%] [G loss: 3.130278]\n",
      "3719 [D loss: 0.211823, acc.: 92.19%] [G loss: 2.988056]\n",
      "3720 [D loss: 0.224559, acc.: 91.41%] [G loss: 3.028440]\n",
      "3721 [D loss: 0.230134, acc.: 90.62%] [G loss: 3.243182]\n",
      "3722 [D loss: 0.201656, acc.: 92.19%] [G loss: 3.171897]\n",
      "3723 [D loss: 0.185002, acc.: 92.58%] [G loss: 3.090576]\n",
      "3724 [D loss: 0.220331, acc.: 91.02%] [G loss: 3.005200]\n",
      "3725 [D loss: 0.197998, acc.: 92.19%] [G loss: 2.874217]\n",
      "3726 [D loss: 0.202932, acc.: 92.58%] [G loss: 2.725077]\n",
      "3727 [D loss: 0.198227, acc.: 91.80%] [G loss: 3.056195]\n",
      "3728 [D loss: 0.206304, acc.: 92.19%] [G loss: 3.224373]\n",
      "3729 [D loss: 0.198439, acc.: 92.97%] [G loss: 3.171585]\n",
      "3730 [D loss: 0.186828, acc.: 93.36%] [G loss: 2.868804]\n",
      "3731 [D loss: 0.216229, acc.: 91.41%] [G loss: 2.722644]\n",
      "3732 [D loss: 0.200480, acc.: 91.80%] [G loss: 3.361163]\n",
      "3733 [D loss: 0.191811, acc.: 92.58%] [G loss: 3.009602]\n",
      "3734 [D loss: 0.197037, acc.: 92.58%] [G loss: 2.979587]\n",
      "3735 [D loss: 0.218751, acc.: 91.02%] [G loss: 3.204397]\n",
      "3736 [D loss: 0.221572, acc.: 92.58%] [G loss: 2.892134]\n",
      "3737 [D loss: 0.194243, acc.: 91.80%] [G loss: 3.188633]\n",
      "3738 [D loss: 0.218240, acc.: 91.80%] [G loss: 3.009182]\n",
      "3739 [D loss: 0.226766, acc.: 90.62%] [G loss: 3.147375]\n",
      "3740 [D loss: 0.210314, acc.: 92.58%] [G loss: 2.865475]\n",
      "3741 [D loss: 0.212439, acc.: 91.80%] [G loss: 3.139174]\n",
      "3742 [D loss: 0.191737, acc.: 92.58%] [G loss: 2.981070]\n",
      "3743 [D loss: 0.202187, acc.: 92.97%] [G loss: 2.922992]\n",
      "3744 [D loss: 0.189967, acc.: 92.58%] [G loss: 3.288160]\n",
      "3745 [D loss: 0.189094, acc.: 92.19%] [G loss: 3.139467]\n",
      "3746 [D loss: 0.208533, acc.: 92.97%] [G loss: 2.964890]\n",
      "3747 [D loss: 0.228573, acc.: 92.19%] [G loss: 2.925436]\n",
      "3748 [D loss: 0.212286, acc.: 91.41%] [G loss: 3.131594]\n",
      "3749 [D loss: 0.224649, acc.: 92.19%] [G loss: 2.985811]\n",
      "3750 [D loss: 0.195998, acc.: 92.58%] [G loss: 3.364537]\n",
      "3751 [D loss: 0.184268, acc.: 92.97%] [G loss: 3.140989]\n",
      "3752 [D loss: 0.202616, acc.: 91.80%] [G loss: 2.933858]\n",
      "3753 [D loss: 0.211999, acc.: 91.41%] [G loss: 2.960230]\n",
      "3754 [D loss: 0.203733, acc.: 91.80%] [G loss: 2.718606]\n",
      "3755 [D loss: 0.190788, acc.: 92.97%] [G loss: 2.862357]\n",
      "3756 [D loss: 0.205206, acc.: 91.80%] [G loss: 3.113543]\n",
      "3757 [D loss: 0.230504, acc.: 91.41%] [G loss: 3.032717]\n",
      "3758 [D loss: 0.214816, acc.: 91.80%] [G loss: 2.838716]\n",
      "3759 [D loss: 0.209159, acc.: 91.80%] [G loss: 3.172047]\n",
      "3760 [D loss: 0.192047, acc.: 92.19%] [G loss: 3.291682]\n",
      "3761 [D loss: 0.182706, acc.: 92.97%] [G loss: 2.964313]\n",
      "3762 [D loss: 0.197171, acc.: 91.80%] [G loss: 3.161849]\n",
      "3763 [D loss: 0.195766, acc.: 91.80%] [G loss: 3.129707]\n",
      "3764 [D loss: 0.199177, acc.: 92.19%] [G loss: 3.155677]\n",
      "3765 [D loss: 0.200871, acc.: 91.80%] [G loss: 3.083213]\n",
      "3766 [D loss: 0.191606, acc.: 92.19%] [G loss: 3.162550]\n",
      "3767 [D loss: 0.217258, acc.: 92.58%] [G loss: 3.220122]\n",
      "3768 [D loss: 0.209447, acc.: 91.80%] [G loss: 2.812316]\n",
      "3769 [D loss: 0.228061, acc.: 91.02%] [G loss: 3.059445]\n",
      "3770 [D loss: 0.204300, acc.: 92.19%] [G loss: 3.289805]\n",
      "3771 [D loss: 0.211179, acc.: 91.80%] [G loss: 3.122304]\n",
      "3772 [D loss: 0.216453, acc.: 92.58%] [G loss: 2.909987]\n",
      "3773 [D loss: 0.215493, acc.: 91.41%] [G loss: 3.252431]\n",
      "3774 [D loss: 0.214300, acc.: 92.19%] [G loss: 3.108426]\n",
      "3775 [D loss: 0.187009, acc.: 92.19%] [G loss: 3.132596]\n",
      "3776 [D loss: 0.175874, acc.: 92.97%] [G loss: 3.046012]\n",
      "3777 [D loss: 0.216988, acc.: 91.80%] [G loss: 2.907640]\n",
      "3778 [D loss: 0.201461, acc.: 92.97%] [G loss: 3.028229]\n",
      "3779 [D loss: 0.203103, acc.: 91.80%] [G loss: 3.150835]\n",
      "3780 [D loss: 0.210327, acc.: 92.19%] [G loss: 3.188780]\n",
      "3781 [D loss: 0.214307, acc.: 90.62%] [G loss: 2.971553]\n",
      "3782 [D loss: 0.192439, acc.: 92.97%] [G loss: 2.867107]\n",
      "3783 [D loss: 0.208754, acc.: 92.97%] [G loss: 2.913414]\n",
      "3784 [D loss: 0.204278, acc.: 92.19%] [G loss: 2.910732]\n",
      "3785 [D loss: 0.208362, acc.: 92.58%] [G loss: 3.061158]\n",
      "3786 [D loss: 0.203867, acc.: 92.19%] [G loss: 3.502940]\n",
      "3787 [D loss: 0.185804, acc.: 92.58%] [G loss: 3.416266]\n",
      "3788 [D loss: 0.190052, acc.: 92.58%] [G loss: 3.084379]\n",
      "3789 [D loss: 0.203228, acc.: 92.19%] [G loss: 3.159897]\n",
      "3790 [D loss: 0.184703, acc.: 92.58%] [G loss: 3.420935]\n",
      "3791 [D loss: 0.185572, acc.: 92.58%] [G loss: 3.505531]\n",
      "3792 [D loss: 0.190922, acc.: 92.97%] [G loss: 3.033915]\n",
      "3793 [D loss: 0.186340, acc.: 92.58%] [G loss: 2.869820]\n",
      "3794 [D loss: 0.194678, acc.: 91.80%] [G loss: 2.964530]\n",
      "3795 [D loss: 0.206905, acc.: 91.41%] [G loss: 3.099505]\n",
      "3796 [D loss: 0.229546, acc.: 91.80%] [G loss: 2.928830]\n",
      "3797 [D loss: 0.203906, acc.: 91.80%] [G loss: 2.878129]\n",
      "3798 [D loss: 0.221671, acc.: 90.23%] [G loss: 2.998045]\n",
      "3799 [D loss: 0.190501, acc.: 92.58%] [G loss: 2.969875]\n",
      "3800 [D loss: 0.202795, acc.: 92.97%] [G loss: 2.826899]\n",
      "WARNING:tensorflow:Model was constructed with shape Tensor(\"input_7:0\", shape=(128, 32), dtype=float32) for input (128, 32), but it was re-called on a Tensor with incompatible shape (432, 32).\n",
      "generated_data\n",
      "3801 [D loss: 0.209187, acc.: 92.58%] [G loss: 2.892584]\n",
      "3802 [D loss: 0.210702, acc.: 91.80%] [G loss: 2.993412]\n",
      "3803 [D loss: 0.188366, acc.: 91.80%] [G loss: 3.028769]\n",
      "3804 [D loss: 0.199032, acc.: 92.58%] [G loss: 2.745855]\n",
      "3805 [D loss: 0.192920, acc.: 92.58%] [G loss: 2.983581]\n",
      "3806 [D loss: 0.202922, acc.: 92.58%] [G loss: 3.163087]\n",
      "3807 [D loss: 0.209146, acc.: 92.19%] [G loss: 3.235934]\n",
      "3808 [D loss: 0.186869, acc.: 92.19%] [G loss: 3.222871]\n",
      "3809 [D loss: 0.151787, acc.: 92.97%] [G loss: 3.015775]\n",
      "3810 [D loss: 0.190784, acc.: 92.58%] [G loss: 3.075949]\n",
      "3811 [D loss: 0.184178, acc.: 92.19%] [G loss: 3.404990]\n",
      "3812 [D loss: 0.201760, acc.: 92.19%] [G loss: 3.302249]\n",
      "3813 [D loss: 0.189697, acc.: 92.58%] [G loss: 3.524054]\n",
      "3814 [D loss: 0.210181, acc.: 91.80%] [G loss: 3.267697]\n",
      "3815 [D loss: 0.189275, acc.: 92.58%] [G loss: 3.085493]\n",
      "3816 [D loss: 0.187035, acc.: 92.97%] [G loss: 3.085376]\n",
      "3817 [D loss: 0.196184, acc.: 91.80%] [G loss: 2.802196]\n",
      "3818 [D loss: 0.187897, acc.: 92.58%] [G loss: 3.179304]\n",
      "3819 [D loss: 0.209078, acc.: 91.80%] [G loss: 3.156062]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "3820 [D loss: 0.206381, acc.: 91.41%] [G loss: 2.954880]\n",
      "3821 [D loss: 0.218240, acc.: 91.02%] [G loss: 2.834054]\n",
      "3822 [D loss: 0.203456, acc.: 92.58%] [G loss: 2.981791]\n",
      "3823 [D loss: 0.202388, acc.: 92.58%] [G loss: 2.892620]\n",
      "3824 [D loss: 0.196385, acc.: 92.19%] [G loss: 2.992304]\n",
      "3825 [D loss: 0.196972, acc.: 92.58%] [G loss: 2.879248]\n",
      "3826 [D loss: 0.209791, acc.: 92.19%] [G loss: 3.089916]\n",
      "3827 [D loss: 0.218615, acc.: 90.62%] [G loss: 3.204661]\n",
      "3828 [D loss: 0.190321, acc.: 91.80%] [G loss: 2.832343]\n",
      "3829 [D loss: 0.196380, acc.: 92.58%] [G loss: 3.200726]\n",
      "3830 [D loss: 0.209910, acc.: 91.02%] [G loss: 3.263830]\n",
      "3831 [D loss: 0.170868, acc.: 92.58%] [G loss: 2.973640]\n",
      "3832 [D loss: 0.190952, acc.: 91.80%] [G loss: 3.210544]\n",
      "3833 [D loss: 0.178194, acc.: 92.19%] [G loss: 3.149668]\n",
      "3834 [D loss: 0.183622, acc.: 92.58%] [G loss: 3.319989]\n",
      "3835 [D loss: 0.214726, acc.: 93.36%] [G loss: 2.994025]\n",
      "3836 [D loss: 0.199525, acc.: 92.19%] [G loss: 3.585258]\n",
      "3837 [D loss: 0.188455, acc.: 92.19%] [G loss: 3.346419]\n",
      "3838 [D loss: 0.210621, acc.: 90.62%] [G loss: 3.065876]\n",
      "3839 [D loss: 0.197970, acc.: 91.41%] [G loss: 3.112364]\n",
      "3840 [D loss: 0.190546, acc.: 91.41%] [G loss: 3.182018]\n",
      "3841 [D loss: 0.176833, acc.: 92.97%] [G loss: 2.931477]\n",
      "3842 [D loss: 0.176627, acc.: 93.75%] [G loss: 3.118501]\n",
      "3843 [D loss: 0.178167, acc.: 92.58%] [G loss: 3.182515]\n",
      "3844 [D loss: 0.209635, acc.: 91.41%] [G loss: 2.977721]\n",
      "3845 [D loss: 0.225171, acc.: 91.02%] [G loss: 2.917326]\n",
      "3846 [D loss: 0.203218, acc.: 92.58%] [G loss: 3.100787]\n",
      "3847 [D loss: 0.203589, acc.: 91.41%] [G loss: 2.811282]\n",
      "3848 [D loss: 0.199744, acc.: 92.97%] [G loss: 3.271172]\n",
      "3849 [D loss: 0.207732, acc.: 92.58%] [G loss: 3.140297]\n",
      "3850 [D loss: 0.232162, acc.: 90.62%] [G loss: 3.277598]\n",
      "3851 [D loss: 0.216546, acc.: 91.80%] [G loss: 3.149964]\n",
      "3852 [D loss: 0.233596, acc.: 91.41%] [G loss: 2.992723]\n",
      "3853 [D loss: 0.221091, acc.: 92.19%] [G loss: 2.931662]\n",
      "3854 [D loss: 0.196102, acc.: 92.19%] [G loss: 3.163919]\n",
      "3855 [D loss: 0.214004, acc.: 91.02%] [G loss: 3.268119]\n",
      "3856 [D loss: 0.186148, acc.: 92.58%] [G loss: 2.933702]\n",
      "3857 [D loss: 0.194863, acc.: 91.41%] [G loss: 3.070693]\n",
      "3858 [D loss: 0.194335, acc.: 92.19%] [G loss: 3.074613]\n",
      "3859 [D loss: 0.218533, acc.: 91.02%] [G loss: 3.333298]\n",
      "3860 [D loss: 0.197277, acc.: 92.19%] [G loss: 2.910943]\n",
      "3861 [D loss: 0.190078, acc.: 92.58%] [G loss: 3.175760]\n",
      "3862 [D loss: 0.202121, acc.: 91.02%] [G loss: 2.741236]\n",
      "3863 [D loss: 0.195449, acc.: 92.19%] [G loss: 2.823966]\n",
      "3864 [D loss: 0.208884, acc.: 91.41%] [G loss: 3.157737]\n",
      "3865 [D loss: 0.188714, acc.: 92.58%] [G loss: 2.684041]\n",
      "3866 [D loss: 0.198988, acc.: 91.41%] [G loss: 2.634239]\n",
      "3867 [D loss: 0.195103, acc.: 91.41%] [G loss: 2.946793]\n",
      "3868 [D loss: 0.182757, acc.: 92.19%] [G loss: 3.160954]\n",
      "3869 [D loss: 0.213586, acc.: 91.02%] [G loss: 3.072021]\n",
      "3870 [D loss: 0.207676, acc.: 92.19%] [G loss: 2.987915]\n",
      "3871 [D loss: 0.197143, acc.: 92.19%] [G loss: 2.993285]\n",
      "3872 [D loss: 0.193009, acc.: 93.36%] [G loss: 3.031843]\n",
      "3873 [D loss: 0.194339, acc.: 91.80%] [G loss: 3.215116]\n",
      "3874 [D loss: 0.198265, acc.: 92.58%] [G loss: 3.279027]\n",
      "3875 [D loss: 0.217950, acc.: 92.58%] [G loss: 3.020577]\n",
      "3876 [D loss: 0.217651, acc.: 91.80%] [G loss: 3.114226]\n",
      "3877 [D loss: 0.200198, acc.: 92.97%] [G loss: 2.818483]\n",
      "3878 [D loss: 0.220719, acc.: 91.80%] [G loss: 2.947054]\n",
      "3879 [D loss: 0.218945, acc.: 91.41%] [G loss: 2.915385]\n",
      "3880 [D loss: 0.193176, acc.: 92.19%] [G loss: 2.952456]\n",
      "3881 [D loss: 0.192141, acc.: 92.58%] [G loss: 3.175699]\n",
      "3882 [D loss: 0.221451, acc.: 91.80%] [G loss: 2.933840]\n",
      "3883 [D loss: 0.215198, acc.: 92.97%] [G loss: 2.938713]\n",
      "3884 [D loss: 0.213801, acc.: 92.19%] [G loss: 3.009188]\n",
      "3885 [D loss: 0.188482, acc.: 91.80%] [G loss: 3.077448]\n",
      "3886 [D loss: 0.201398, acc.: 92.19%] [G loss: 2.873626]\n",
      "3887 [D loss: 0.218010, acc.: 91.80%] [G loss: 2.804606]\n",
      "3888 [D loss: 0.209039, acc.: 92.19%] [G loss: 3.072891]\n",
      "3889 [D loss: 0.202021, acc.: 91.80%] [G loss: 3.035492]\n",
      "3890 [D loss: 0.198753, acc.: 92.19%] [G loss: 3.280131]\n",
      "3891 [D loss: 0.205707, acc.: 92.58%] [G loss: 2.920716]\n",
      "3892 [D loss: 0.213625, acc.: 92.19%] [G loss: 3.369143]\n",
      "3893 [D loss: 0.204429, acc.: 92.19%] [G loss: 3.394515]\n",
      "3894 [D loss: 0.206020, acc.: 92.58%] [G loss: 3.048956]\n",
      "3895 [D loss: 0.206174, acc.: 91.80%] [G loss: 3.090219]\n",
      "3896 [D loss: 0.206551, acc.: 92.58%] [G loss: 3.270589]\n",
      "3897 [D loss: 0.190813, acc.: 92.19%] [G loss: 3.025358]\n",
      "3898 [D loss: 0.190147, acc.: 91.80%] [G loss: 3.219754]\n",
      "3899 [D loss: 0.226513, acc.: 91.80%] [G loss: 2.824354]\n",
      "3900 [D loss: 0.215571, acc.: 91.41%] [G loss: 3.083100]\n",
      "WARNING:tensorflow:Model was constructed with shape Tensor(\"input_7:0\", shape=(128, 32), dtype=float32) for input (128, 32), but it was re-called on a Tensor with incompatible shape (432, 32).\n",
      "generated_data\n",
      "3901 [D loss: 0.198321, acc.: 92.97%] [G loss: 3.267655]\n",
      "3902 [D loss: 0.181092, acc.: 92.19%] [G loss: 3.072275]\n",
      "3903 [D loss: 0.172160, acc.: 92.97%] [G loss: 3.115409]\n",
      "3904 [D loss: 0.185730, acc.: 91.80%] [G loss: 3.131022]\n",
      "3905 [D loss: 0.184324, acc.: 91.80%] [G loss: 3.250176]\n",
      "3906 [D loss: 0.194827, acc.: 91.80%] [G loss: 3.028834]\n",
      "3907 [D loss: 0.204207, acc.: 92.97%] [G loss: 2.985857]\n",
      "3908 [D loss: 0.179433, acc.: 92.58%] [G loss: 3.507549]\n",
      "3909 [D loss: 0.193839, acc.: 92.19%] [G loss: 2.987792]\n",
      "3910 [D loss: 0.193824, acc.: 91.80%] [G loss: 3.126875]\n",
      "3911 [D loss: 0.188674, acc.: 92.97%] [G loss: 3.129379]\n",
      "3912 [D loss: 0.226266, acc.: 92.19%] [G loss: 3.024101]\n",
      "3913 [D loss: 0.206677, acc.: 91.41%] [G loss: 2.928489]\n",
      "3914 [D loss: 0.177386, acc.: 92.19%] [G loss: 3.206766]\n",
      "3915 [D loss: 0.197426, acc.: 92.19%] [G loss: 3.069869]\n",
      "3916 [D loss: 0.182432, acc.: 93.36%] [G loss: 3.118808]\n",
      "3917 [D loss: 0.208911, acc.: 92.19%] [G loss: 2.894215]\n",
      "3918 [D loss: 0.191054, acc.: 92.58%] [G loss: 3.173706]\n",
      "3919 [D loss: 0.204525, acc.: 92.19%] [G loss: 3.214449]\n",
      "3920 [D loss: 0.197187, acc.: 92.58%] [G loss: 3.223062]\n",
      "3921 [D loss: 0.183638, acc.: 92.97%] [G loss: 3.176685]\n",
      "3922 [D loss: 0.207047, acc.: 92.97%] [G loss: 2.985626]\n",
      "3923 [D loss: 0.216909, acc.: 91.41%] [G loss: 3.504957]\n",
      "3924 [D loss: 0.208192, acc.: 91.80%] [G loss: 3.218016]\n",
      "3925 [D loss: 0.204623, acc.: 92.19%] [G loss: 3.079390]\n",
      "3926 [D loss: 0.180139, acc.: 92.58%] [G loss: 3.030915]\n",
      "3927 [D loss: 0.217156, acc.: 92.19%] [G loss: 3.154418]\n",
      "3928 [D loss: 0.178273, acc.: 92.19%] [G loss: 3.603941]\n",
      "3929 [D loss: 0.204719, acc.: 92.97%] [G loss: 2.950986]\n",
      "3930 [D loss: 0.186961, acc.: 92.19%] [G loss: 3.051144]\n",
      "3931 [D loss: 0.201313, acc.: 92.58%] [G loss: 2.840870]\n",
      "3932 [D loss: 0.203265, acc.: 92.19%] [G loss: 2.911186]\n",
      "3933 [D loss: 0.180457, acc.: 93.75%] [G loss: 3.078427]\n",
      "3934 [D loss: 0.198986, acc.: 91.80%] [G loss: 3.492285]\n",
      "3935 [D loss: 0.184593, acc.: 92.19%] [G loss: 3.361907]\n",
      "3936 [D loss: 0.189772, acc.: 91.80%] [G loss: 3.252965]\n",
      "3937 [D loss: 0.167100, acc.: 92.58%] [G loss: 3.062991]\n",
      "3938 [D loss: 0.198168, acc.: 92.19%] [G loss: 3.157092]\n",
      "3939 [D loss: 0.183117, acc.: 91.80%] [G loss: 3.335765]\n",
      "3940 [D loss: 0.184045, acc.: 92.97%] [G loss: 2.941726]\n",
      "3941 [D loss: 0.178374, acc.: 92.97%] [G loss: 3.282166]\n",
      "3942 [D loss: 0.181209, acc.: 92.19%] [G loss: 3.265025]\n",
      "3943 [D loss: 0.194449, acc.: 91.80%] [G loss: 3.340051]\n",
      "3944 [D loss: 0.225625, acc.: 90.23%] [G loss: 3.239011]\n",
      "3945 [D loss: 0.198423, acc.: 91.80%] [G loss: 2.938665]\n",
      "3946 [D loss: 0.170271, acc.: 92.97%] [G loss: 3.011499]\n",
      "3947 [D loss: 0.228807, acc.: 91.02%] [G loss: 3.187994]\n",
      "3948 [D loss: 0.203806, acc.: 92.19%] [G loss: 3.236928]\n",
      "3949 [D loss: 0.207248, acc.: 92.19%] [G loss: 2.997916]\n",
      "3950 [D loss: 0.241087, acc.: 90.23%] [G loss: 3.233428]\n",
      "3951 [D loss: 0.180031, acc.: 92.58%] [G loss: 3.199474]\n",
      "3952 [D loss: 0.188144, acc.: 92.97%] [G loss: 3.067209]\n",
      "3953 [D loss: 0.206575, acc.: 92.58%] [G loss: 2.965577]\n",
      "3954 [D loss: 0.204731, acc.: 92.58%] [G loss: 3.015754]\n",
      "3955 [D loss: 0.210025, acc.: 92.19%] [G loss: 3.017068]\n",
      "3956 [D loss: 0.212341, acc.: 91.80%] [G loss: 3.281111]\n",
      "3957 [D loss: 0.200977, acc.: 92.58%] [G loss: 2.875195]\n",
      "3958 [D loss: 0.209043, acc.: 91.80%] [G loss: 2.903165]\n",
      "3959 [D loss: 0.200595, acc.: 92.97%] [G loss: 2.880657]\n",
      "3960 [D loss: 0.194342, acc.: 92.58%] [G loss: 3.147547]\n",
      "3961 [D loss: 0.187635, acc.: 92.97%] [G loss: 3.180968]\n",
      "3962 [D loss: 0.203356, acc.: 92.58%] [G loss: 2.911120]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "3963 [D loss: 0.189333, acc.: 92.58%] [G loss: 3.088719]\n",
      "3964 [D loss: 0.203820, acc.: 92.97%] [G loss: 3.202411]\n",
      "3965 [D loss: 0.192850, acc.: 92.58%] [G loss: 3.093203]\n",
      "3966 [D loss: 0.191060, acc.: 93.75%] [G loss: 2.905670]\n",
      "3967 [D loss: 0.202724, acc.: 91.41%] [G loss: 3.638504]\n",
      "3968 [D loss: 0.182543, acc.: 92.97%] [G loss: 2.781820]\n",
      "3969 [D loss: 0.175486, acc.: 92.58%] [G loss: 3.407020]\n",
      "3970 [D loss: 0.202193, acc.: 91.80%] [G loss: 3.212248]\n",
      "3971 [D loss: 0.181111, acc.: 93.36%] [G loss: 3.294103]\n",
      "3972 [D loss: 0.189521, acc.: 91.41%] [G loss: 3.277445]\n",
      "3973 [D loss: 0.152415, acc.: 92.97%] [G loss: 3.317199]\n",
      "3974 [D loss: 0.189502, acc.: 92.97%] [G loss: 3.069149]\n",
      "3975 [D loss: 0.174103, acc.: 92.19%] [G loss: 3.712795]\n",
      "3976 [D loss: 0.180120, acc.: 93.36%] [G loss: 2.885736]\n",
      "3977 [D loss: 0.185643, acc.: 93.75%] [G loss: 2.775621]\n",
      "3978 [D loss: 0.185032, acc.: 91.02%] [G loss: 3.295838]\n",
      "3979 [D loss: 0.193162, acc.: 93.36%] [G loss: 2.921827]\n",
      "3980 [D loss: 0.186593, acc.: 93.36%] [G loss: 2.987166]\n",
      "3981 [D loss: 0.213258, acc.: 92.58%] [G loss: 3.417989]\n",
      "3982 [D loss: 0.220718, acc.: 92.58%] [G loss: 2.925678]\n",
      "3983 [D loss: 0.207084, acc.: 91.80%] [G loss: 3.199112]\n",
      "3984 [D loss: 0.206530, acc.: 92.19%] [G loss: 2.992620]\n",
      "3985 [D loss: 0.182965, acc.: 91.80%] [G loss: 3.229919]\n",
      "3986 [D loss: 0.183540, acc.: 92.19%] [G loss: 3.330452]\n",
      "3987 [D loss: 0.181001, acc.: 92.97%] [G loss: 3.225904]\n",
      "3988 [D loss: 0.197077, acc.: 91.80%] [G loss: 2.801318]\n",
      "3989 [D loss: 0.204586, acc.: 92.19%] [G loss: 3.247055]\n",
      "3990 [D loss: 0.207043, acc.: 92.58%] [G loss: 3.580383]\n",
      "3991 [D loss: 0.196341, acc.: 92.58%] [G loss: 3.393288]\n",
      "3992 [D loss: 0.188894, acc.: 91.41%] [G loss: 3.182176]\n",
      "3993 [D loss: 0.183889, acc.: 93.36%] [G loss: 3.058458]\n",
      "3994 [D loss: 0.212854, acc.: 91.41%] [G loss: 2.942017]\n",
      "3995 [D loss: 0.193753, acc.: 91.80%] [G loss: 3.138115]\n",
      "3996 [D loss: 0.193492, acc.: 91.80%] [G loss: 3.241534]\n",
      "3997 [D loss: 0.199579, acc.: 92.97%] [G loss: 3.059325]\n",
      "3998 [D loss: 0.188161, acc.: 92.97%] [G loss: 2.965535]\n",
      "3999 [D loss: 0.202740, acc.: 92.19%] [G loss: 2.863700]\n",
      "4000 [D loss: 0.223050, acc.: 91.80%] [G loss: 3.150146]\n",
      "WARNING:tensorflow:Model was constructed with shape Tensor(\"input_7:0\", shape=(128, 32), dtype=float32) for input (128, 32), but it was re-called on a Tensor with incompatible shape (432, 32).\n",
      "generated_data\n",
      "4001 [D loss: 0.200326, acc.: 92.19%] [G loss: 3.175664]\n",
      "4002 [D loss: 0.185587, acc.: 92.58%] [G loss: 2.974633]\n",
      "4003 [D loss: 0.193928, acc.: 92.19%] [G loss: 2.784836]\n",
      "4004 [D loss: 0.217752, acc.: 92.19%] [G loss: 3.174392]\n",
      "4005 [D loss: 0.211677, acc.: 91.41%] [G loss: 3.362828]\n",
      "4006 [D loss: 0.185991, acc.: 92.97%] [G loss: 2.985369]\n",
      "4007 [D loss: 0.162697, acc.: 93.36%] [G loss: 3.071950]\n",
      "4008 [D loss: 0.200043, acc.: 92.19%] [G loss: 2.910640]\n",
      "4009 [D loss: 0.171561, acc.: 93.36%] [G loss: 3.066525]\n",
      "4010 [D loss: 0.185587, acc.: 92.97%] [G loss: 3.156638]\n",
      "4011 [D loss: 0.181936, acc.: 92.97%] [G loss: 2.842078]\n",
      "4012 [D loss: 0.194058, acc.: 92.58%] [G loss: 3.045517]\n",
      "4013 [D loss: 0.202308, acc.: 92.58%] [G loss: 2.866463]\n",
      "4014 [D loss: 0.209543, acc.: 92.19%] [G loss: 2.959630]\n",
      "4015 [D loss: 0.228236, acc.: 92.58%] [G loss: 3.204287]\n",
      "4016 [D loss: 0.215345, acc.: 92.97%] [G loss: 3.116647]\n",
      "4017 [D loss: 0.196558, acc.: 92.58%] [G loss: 2.844611]\n",
      "4018 [D loss: 0.218689, acc.: 92.58%] [G loss: 3.201682]\n",
      "4019 [D loss: 0.201703, acc.: 92.58%] [G loss: 3.020008]\n",
      "4020 [D loss: 0.205265, acc.: 92.19%] [G loss: 3.228427]\n",
      "4021 [D loss: 0.209522, acc.: 91.80%] [G loss: 2.910709]\n",
      "4022 [D loss: 0.272435, acc.: 91.80%] [G loss: 2.862425]\n",
      "4023 [D loss: 0.213475, acc.: 91.80%] [G loss: 2.936601]\n",
      "4024 [D loss: 0.208512, acc.: 92.19%] [G loss: 3.132732]\n",
      "4025 [D loss: 0.195944, acc.: 91.41%] [G loss: 2.941430]\n",
      "4026 [D loss: 0.215627, acc.: 91.80%] [G loss: 3.354383]\n",
      "4027 [D loss: 0.191412, acc.: 92.58%] [G loss: 2.985885]\n",
      "4028 [D loss: 0.207003, acc.: 92.97%] [G loss: 3.065719]\n",
      "4029 [D loss: 0.186352, acc.: 92.58%] [G loss: 3.283269]\n",
      "4030 [D loss: 0.187796, acc.: 93.36%] [G loss: 3.124714]\n",
      "4031 [D loss: 0.174643, acc.: 92.97%] [G loss: 3.282925]\n",
      "4032 [D loss: 0.182297, acc.: 92.97%] [G loss: 3.070487]\n",
      "4033 [D loss: 0.221933, acc.: 91.80%] [G loss: 2.875311]\n",
      "4034 [D loss: 0.223485, acc.: 91.41%] [G loss: 3.116360]\n",
      "4035 [D loss: 0.187540, acc.: 92.58%] [G loss: 3.141563]\n",
      "4036 [D loss: 0.206638, acc.: 92.58%] [G loss: 2.822195]\n",
      "4037 [D loss: 0.214255, acc.: 92.58%] [G loss: 3.012521]\n",
      "4038 [D loss: 0.190468, acc.: 92.97%] [G loss: 2.981721]\n",
      "4039 [D loss: 0.223027, acc.: 91.02%] [G loss: 3.137422]\n",
      "4040 [D loss: 0.207171, acc.: 92.97%] [G loss: 3.022878]\n",
      "4041 [D loss: 0.206414, acc.: 91.80%] [G loss: 2.918487]\n",
      "4042 [D loss: 0.234400, acc.: 91.41%] [G loss: 2.961268]\n",
      "4043 [D loss: 0.225019, acc.: 91.02%] [G loss: 3.214777]\n",
      "4044 [D loss: 0.216239, acc.: 92.58%] [G loss: 3.021550]\n",
      "4045 [D loss: 0.189000, acc.: 92.19%] [G loss: 2.942384]\n",
      "4046 [D loss: 0.186484, acc.: 92.58%] [G loss: 3.038081]\n",
      "4047 [D loss: 0.205203, acc.: 91.02%] [G loss: 3.112178]\n",
      "4048 [D loss: 0.213089, acc.: 91.02%] [G loss: 3.058499]\n",
      "4049 [D loss: 0.204021, acc.: 92.19%] [G loss: 3.208240]\n",
      "4050 [D loss: 0.207512, acc.: 92.58%] [G loss: 3.000487]\n",
      "4051 [D loss: 0.196041, acc.: 92.58%] [G loss: 3.208518]\n",
      "4052 [D loss: 0.209808, acc.: 91.80%] [G loss: 2.968477]\n",
      "4053 [D loss: 0.194770, acc.: 92.97%] [G loss: 2.813528]\n",
      "4054 [D loss: 0.215859, acc.: 92.19%] [G loss: 3.197328]\n",
      "4055 [D loss: 0.209812, acc.: 91.80%] [G loss: 3.344457]\n",
      "4056 [D loss: 0.176209, acc.: 92.97%] [G loss: 3.035484]\n",
      "4057 [D loss: 0.173212, acc.: 92.97%] [G loss: 3.200143]\n",
      "4058 [D loss: 0.173171, acc.: 93.36%] [G loss: 2.867196]\n",
      "4059 [D loss: 0.190918, acc.: 92.97%] [G loss: 3.037031]\n",
      "4060 [D loss: 0.187371, acc.: 92.19%] [G loss: 3.078088]\n",
      "4061 [D loss: 0.184705, acc.: 92.58%] [G loss: 3.227611]\n",
      "4062 [D loss: 0.213704, acc.: 91.41%] [G loss: 3.117738]\n",
      "4063 [D loss: 0.192737, acc.: 92.58%] [G loss: 3.397316]\n",
      "4064 [D loss: 0.206025, acc.: 91.80%] [G loss: 3.267273]\n",
      "4065 [D loss: 0.204345, acc.: 92.19%] [G loss: 2.915919]\n",
      "4066 [D loss: 0.193115, acc.: 92.19%] [G loss: 3.289180]\n",
      "4067 [D loss: 0.177199, acc.: 92.97%] [G loss: 3.203001]\n",
      "4068 [D loss: 0.162102, acc.: 93.75%] [G loss: 3.083670]\n",
      "4069 [D loss: 0.175527, acc.: 92.58%] [G loss: 3.579108]\n",
      "4070 [D loss: 0.203566, acc.: 91.80%] [G loss: 2.973266]\n",
      "4071 [D loss: 0.207430, acc.: 91.80%] [G loss: 3.165231]\n",
      "4072 [D loss: 0.213991, acc.: 91.02%] [G loss: 3.269906]\n",
      "4073 [D loss: 0.206960, acc.: 91.41%] [G loss: 2.980195]\n",
      "4074 [D loss: 0.166853, acc.: 92.19%] [G loss: 3.050882]\n",
      "4075 [D loss: 0.214357, acc.: 91.02%] [G loss: 3.077281]\n",
      "4076 [D loss: 0.177493, acc.: 92.58%] [G loss: 2.902488]\n",
      "4077 [D loss: 0.182499, acc.: 91.80%] [G loss: 2.868271]\n",
      "4078 [D loss: 0.197948, acc.: 92.58%] [G loss: 2.957920]\n",
      "4079 [D loss: 0.224674, acc.: 91.41%] [G loss: 2.994762]\n",
      "4080 [D loss: 0.188083, acc.: 92.58%] [G loss: 3.345155]\n",
      "4081 [D loss: 0.204180, acc.: 91.80%] [G loss: 3.013965]\n",
      "4082 [D loss: 0.180587, acc.: 92.58%] [G loss: 3.117750]\n",
      "4083 [D loss: 0.203894, acc.: 92.58%] [G loss: 2.975892]\n",
      "4084 [D loss: 0.218277, acc.: 91.41%] [G loss: 3.324919]\n",
      "4085 [D loss: 0.195570, acc.: 92.58%] [G loss: 3.007669]\n",
      "4086 [D loss: 0.246299, acc.: 89.84%] [G loss: 2.978493]\n",
      "4087 [D loss: 0.207686, acc.: 91.80%] [G loss: 2.987159]\n",
      "4088 [D loss: 0.204462, acc.: 92.58%] [G loss: 3.134193]\n",
      "4089 [D loss: 0.198626, acc.: 92.19%] [G loss: 2.992300]\n",
      "4090 [D loss: 0.213032, acc.: 92.58%] [G loss: 3.362378]\n",
      "4091 [D loss: 0.199879, acc.: 92.58%] [G loss: 3.139000]\n",
      "4092 [D loss: 0.215497, acc.: 92.97%] [G loss: 2.975287]\n",
      "4093 [D loss: 0.187624, acc.: 92.97%] [G loss: 2.973321]\n",
      "4094 [D loss: 0.193178, acc.: 93.36%] [G loss: 3.107373]\n",
      "4095 [D loss: 0.207479, acc.: 92.97%] [G loss: 2.863039]\n",
      "4096 [D loss: 0.195669, acc.: 91.80%] [G loss: 2.876032]\n",
      "4097 [D loss: 0.211864, acc.: 91.80%] [G loss: 2.943495]\n",
      "4098 [D loss: 0.211311, acc.: 92.19%] [G loss: 2.945396]\n",
      "4099 [D loss: 0.207977, acc.: 91.80%] [G loss: 3.379316]\n",
      "4100 [D loss: 0.180197, acc.: 92.58%] [G loss: 3.526343]\n",
      "WARNING:tensorflow:Model was constructed with shape Tensor(\"input_7:0\", shape=(128, 32), dtype=float32) for input (128, 32), but it was re-called on a Tensor with incompatible shape (432, 32).\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "generated_data\n",
      "4101 [D loss: 0.192115, acc.: 92.97%] [G loss: 2.852526]\n",
      "4102 [D loss: 0.201140, acc.: 91.80%] [G loss: 2.963243]\n",
      "4103 [D loss: 0.206216, acc.: 92.19%] [G loss: 2.955365]\n",
      "4104 [D loss: 0.188744, acc.: 92.97%] [G loss: 2.946686]\n",
      "4105 [D loss: 0.173779, acc.: 92.58%] [G loss: 3.962328]\n",
      "4106 [D loss: 0.173669, acc.: 92.58%] [G loss: 3.383140]\n",
      "4107 [D loss: 0.153577, acc.: 92.97%] [G loss: 3.364258]\n",
      "4108 [D loss: 0.174204, acc.: 92.97%] [G loss: 2.947690]\n",
      "4109 [D loss: 0.167198, acc.: 93.36%] [G loss: 3.360899]\n",
      "4110 [D loss: 0.198703, acc.: 92.97%] [G loss: 3.188787]\n",
      "4111 [D loss: 0.185362, acc.: 92.97%] [G loss: 3.344616]\n",
      "4112 [D loss: 0.181990, acc.: 92.19%] [G loss: 3.256187]\n",
      "4113 [D loss: 0.210769, acc.: 92.58%] [G loss: 2.816457]\n",
      "4114 [D loss: 0.186765, acc.: 92.97%] [G loss: 3.072851]\n",
      "4115 [D loss: 0.196747, acc.: 92.19%] [G loss: 3.068596]\n",
      "4116 [D loss: 0.181473, acc.: 92.97%] [G loss: 3.547223]\n",
      "4117 [D loss: 0.168474, acc.: 93.36%] [G loss: 3.270884]\n",
      "4118 [D loss: 0.184653, acc.: 92.58%] [G loss: 3.358121]\n",
      "4119 [D loss: 0.179931, acc.: 92.58%] [G loss: 3.181221]\n",
      "4120 [D loss: 0.198257, acc.: 92.58%] [G loss: 3.498646]\n",
      "4121 [D loss: 0.187232, acc.: 92.97%] [G loss: 3.083313]\n",
      "4122 [D loss: 0.225015, acc.: 91.41%] [G loss: 2.893052]\n",
      "4123 [D loss: 0.193319, acc.: 91.41%] [G loss: 2.972024]\n",
      "4124 [D loss: 0.187773, acc.: 92.58%] [G loss: 3.186032]\n",
      "4125 [D loss: 0.218984, acc.: 91.41%] [G loss: 3.056097]\n",
      "4126 [D loss: 0.184336, acc.: 92.58%] [G loss: 3.280073]\n",
      "4127 [D loss: 0.180317, acc.: 92.58%] [G loss: 3.259058]\n",
      "4128 [D loss: 0.190270, acc.: 92.97%] [G loss: 2.805753]\n",
      "4129 [D loss: 0.200443, acc.: 92.19%] [G loss: 3.131657]\n",
      "4130 [D loss: 0.203727, acc.: 92.19%] [G loss: 3.388360]\n",
      "4131 [D loss: 0.209354, acc.: 91.02%] [G loss: 3.008777]\n",
      "4132 [D loss: 0.197746, acc.: 92.19%] [G loss: 3.224176]\n",
      "4133 [D loss: 0.214649, acc.: 91.41%] [G loss: 3.377252]\n",
      "4134 [D loss: 0.213505, acc.: 91.80%] [G loss: 3.139657]\n",
      "4135 [D loss: 0.181116, acc.: 92.58%] [G loss: 3.123468]\n",
      "4136 [D loss: 0.220228, acc.: 91.80%] [G loss: 3.320456]\n",
      "4137 [D loss: 0.218554, acc.: 91.02%] [G loss: 3.244867]\n",
      "4138 [D loss: 0.170679, acc.: 93.75%] [G loss: 3.279875]\n",
      "4139 [D loss: 0.199383, acc.: 92.97%] [G loss: 3.511436]\n",
      "4140 [D loss: 0.184077, acc.: 92.97%] [G loss: 3.135639]\n",
      "4141 [D loss: 0.189835, acc.: 91.41%] [G loss: 3.378829]\n",
      "4142 [D loss: 0.204348, acc.: 92.97%] [G loss: 3.005772]\n",
      "4143 [D loss: 0.201049, acc.: 92.58%] [G loss: 3.081350]\n",
      "4144 [D loss: 0.198945, acc.: 92.58%] [G loss: 3.336057]\n",
      "4145 [D loss: 0.201801, acc.: 92.19%] [G loss: 3.138670]\n",
      "4146 [D loss: 0.162579, acc.: 92.19%] [G loss: 3.332989]\n",
      "4147 [D loss: 0.213267, acc.: 92.19%] [G loss: 2.812469]\n",
      "4148 [D loss: 0.196015, acc.: 92.19%] [G loss: 3.222465]\n",
      "4149 [D loss: 0.203063, acc.: 91.80%] [G loss: 3.436346]\n",
      "4150 [D loss: 0.206092, acc.: 92.19%] [G loss: 2.996150]\n",
      "4151 [D loss: 0.206243, acc.: 92.58%] [G loss: 3.306136]\n",
      "4152 [D loss: 0.194192, acc.: 92.19%] [G loss: 3.327399]\n",
      "4153 [D loss: 0.185339, acc.: 92.58%] [G loss: 2.987736]\n",
      "4154 [D loss: 0.187015, acc.: 92.19%] [G loss: 3.020885]\n",
      "4155 [D loss: 0.200598, acc.: 91.02%] [G loss: 2.937513]\n",
      "4156 [D loss: 0.222156, acc.: 92.19%] [G loss: 3.090058]\n",
      "4157 [D loss: 0.209505, acc.: 92.19%] [G loss: 2.869480]\n",
      "4158 [D loss: 0.216952, acc.: 92.97%] [G loss: 3.253315]\n",
      "4159 [D loss: 0.209422, acc.: 91.41%] [G loss: 3.079951]\n",
      "4160 [D loss: 0.188119, acc.: 92.19%] [G loss: 2.980256]\n",
      "4161 [D loss: 0.156270, acc.: 92.97%] [G loss: 3.359270]\n",
      "4162 [D loss: 0.191507, acc.: 93.36%] [G loss: 3.164565]\n",
      "4163 [D loss: 0.179984, acc.: 93.36%] [G loss: 3.122998]\n",
      "4164 [D loss: 0.190868, acc.: 91.80%] [G loss: 3.280514]\n",
      "4165 [D loss: 0.200441, acc.: 93.36%] [G loss: 2.951428]\n",
      "4166 [D loss: 0.208582, acc.: 91.80%] [G loss: 3.112296]\n",
      "4167 [D loss: 0.200456, acc.: 91.80%] [G loss: 2.977453]\n",
      "4168 [D loss: 0.198901, acc.: 93.36%] [G loss: 3.144409]\n",
      "4169 [D loss: 0.203895, acc.: 92.19%] [G loss: 3.245892]\n",
      "4170 [D loss: 0.232108, acc.: 91.41%] [G loss: 3.300457]\n",
      "4171 [D loss: 0.187645, acc.: 91.41%] [G loss: 3.205839]\n",
      "4172 [D loss: 0.190737, acc.: 92.19%] [G loss: 3.345466]\n",
      "4173 [D loss: 0.186873, acc.: 92.58%] [G loss: 3.061011]\n",
      "4174 [D loss: 0.197251, acc.: 92.19%] [G loss: 2.950027]\n",
      "4175 [D loss: 0.199807, acc.: 92.58%] [G loss: 3.116437]\n",
      "4176 [D loss: 0.198471, acc.: 91.80%] [G loss: 2.861776]\n",
      "4177 [D loss: 0.212357, acc.: 91.80%] [G loss: 3.121153]\n",
      "4178 [D loss: 0.202348, acc.: 92.58%] [G loss: 3.385460]\n",
      "4179 [D loss: 0.203112, acc.: 91.80%] [G loss: 3.306415]\n",
      "4180 [D loss: 0.207033, acc.: 92.97%] [G loss: 3.307728]\n",
      "4181 [D loss: 0.198954, acc.: 91.80%] [G loss: 3.526786]\n",
      "4182 [D loss: 0.208222, acc.: 92.58%] [G loss: 2.982796]\n",
      "4183 [D loss: 0.190496, acc.: 92.19%] [G loss: 3.416240]\n",
      "4184 [D loss: 0.182225, acc.: 92.19%] [G loss: 3.483829]\n",
      "4185 [D loss: 0.170724, acc.: 92.97%] [G loss: 3.237971]\n",
      "4186 [D loss: 0.177501, acc.: 93.75%] [G loss: 3.185144]\n",
      "4187 [D loss: 0.209493, acc.: 91.41%] [G loss: 2.998573]\n",
      "4188 [D loss: 0.183945, acc.: 93.36%] [G loss: 2.913408]\n",
      "4189 [D loss: 0.217923, acc.: 91.02%] [G loss: 3.076507]\n",
      "4190 [D loss: 0.221327, acc.: 91.41%] [G loss: 3.131821]\n",
      "4191 [D loss: 0.221626, acc.: 91.02%] [G loss: 3.049803]\n",
      "4192 [D loss: 0.201543, acc.: 91.41%] [G loss: 3.385832]\n",
      "4193 [D loss: 0.195478, acc.: 92.58%] [G loss: 3.029253]\n",
      "4194 [D loss: 0.188942, acc.: 92.58%] [G loss: 2.991294]\n",
      "4195 [D loss: 0.195860, acc.: 92.58%] [G loss: 3.183389]\n",
      "4196 [D loss: 0.198196, acc.: 92.58%] [G loss: 3.356589]\n",
      "4197 [D loss: 0.208898, acc.: 92.19%] [G loss: 2.883214]\n",
      "4198 [D loss: 0.208569, acc.: 92.19%] [G loss: 3.021214]\n",
      "4199 [D loss: 0.196735, acc.: 92.58%] [G loss: 3.262672]\n",
      "4200 [D loss: 0.182762, acc.: 93.36%] [G loss: 2.959674]\n",
      "WARNING:tensorflow:Model was constructed with shape Tensor(\"input_7:0\", shape=(128, 32), dtype=float32) for input (128, 32), but it was re-called on a Tensor with incompatible shape (432, 32).\n",
      "generated_data\n",
      "4201 [D loss: 0.170519, acc.: 92.58%] [G loss: 3.395733]\n",
      "4202 [D loss: 0.219579, acc.: 91.80%] [G loss: 3.374468]\n",
      "4203 [D loss: 0.207862, acc.: 91.80%] [G loss: 3.313382]\n",
      "4204 [D loss: 0.229746, acc.: 91.41%] [G loss: 2.735254]\n",
      "4205 [D loss: 0.197365, acc.: 92.19%] [G loss: 3.486320]\n",
      "4206 [D loss: 0.190611, acc.: 92.58%] [G loss: 2.918746]\n",
      "4207 [D loss: 0.186435, acc.: 92.19%] [G loss: 3.271560]\n",
      "4208 [D loss: 0.195018, acc.: 92.19%] [G loss: 3.033269]\n",
      "4209 [D loss: 0.193402, acc.: 92.97%] [G loss: 3.222628]\n",
      "4210 [D loss: 0.210023, acc.: 91.80%] [G loss: 2.868367]\n",
      "4211 [D loss: 0.193035, acc.: 92.58%] [G loss: 3.327019]\n",
      "4212 [D loss: 0.190407, acc.: 92.58%] [G loss: 3.199943]\n",
      "4213 [D loss: 0.155980, acc.: 93.75%] [G loss: 3.177529]\n",
      "4214 [D loss: 0.174822, acc.: 92.97%] [G loss: 3.157419]\n",
      "4215 [D loss: 0.206489, acc.: 91.80%] [G loss: 3.174623]\n",
      "4216 [D loss: 0.208701, acc.: 92.19%] [G loss: 3.283693]\n",
      "4217 [D loss: 0.199783, acc.: 91.80%] [G loss: 2.877737]\n",
      "4218 [D loss: 0.190385, acc.: 91.80%] [G loss: 3.164744]\n",
      "4219 [D loss: 0.189148, acc.: 93.36%] [G loss: 2.957374]\n",
      "4220 [D loss: 0.190648, acc.: 92.58%] [G loss: 3.297136]\n",
      "4221 [D loss: 0.197488, acc.: 91.80%] [G loss: 2.869322]\n",
      "4222 [D loss: 0.215735, acc.: 92.19%] [G loss: 3.275784]\n",
      "4223 [D loss: 0.217981, acc.: 91.80%] [G loss: 3.199087]\n",
      "4224 [D loss: 0.227176, acc.: 91.80%] [G loss: 2.709177]\n",
      "4225 [D loss: 0.203277, acc.: 92.58%] [G loss: 2.739205]\n",
      "4226 [D loss: 0.204326, acc.: 92.58%] [G loss: 3.047546]\n",
      "4227 [D loss: 0.202075, acc.: 92.58%] [G loss: 2.894351]\n",
      "4228 [D loss: 0.201691, acc.: 91.80%] [G loss: 2.757746]\n",
      "4229 [D loss: 0.211320, acc.: 91.80%] [G loss: 2.680896]\n",
      "4230 [D loss: 0.193356, acc.: 92.97%] [G loss: 2.897352]\n",
      "4231 [D loss: 0.199102, acc.: 92.19%] [G loss: 2.903979]\n",
      "4232 [D loss: 0.194157, acc.: 92.19%] [G loss: 2.821041]\n",
      "4233 [D loss: 0.212405, acc.: 92.19%] [G loss: 2.848895]\n",
      "4234 [D loss: 0.206061, acc.: 91.80%] [G loss: 3.048344]\n",
      "4235 [D loss: 0.225411, acc.: 91.80%] [G loss: 3.045593]\n",
      "4236 [D loss: 0.222157, acc.: 91.02%] [G loss: 2.938317]\n",
      "4237 [D loss: 0.219851, acc.: 91.41%] [G loss: 2.753357]\n",
      "4238 [D loss: 0.227438, acc.: 91.02%] [G loss: 3.121299]\n",
      "4239 [D loss: 0.186074, acc.: 92.19%] [G loss: 3.267681]\n",
      "4240 [D loss: 0.195163, acc.: 92.58%] [G loss: 2.869550]\n",
      "4241 [D loss: 0.204792, acc.: 89.45%] [G loss: 3.012401]\n",
      "4242 [D loss: 0.190769, acc.: 92.58%] [G loss: 2.935818]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "4243 [D loss: 0.209029, acc.: 92.19%] [G loss: 3.064068]\n",
      "4244 [D loss: 0.192138, acc.: 92.97%] [G loss: 2.940747]\n",
      "4245 [D loss: 0.216819, acc.: 91.80%] [G loss: 3.068375]\n",
      "4246 [D loss: 0.197788, acc.: 92.97%] [G loss: 3.527912]\n",
      "4247 [D loss: 0.188579, acc.: 92.58%] [G loss: 3.411461]\n",
      "4248 [D loss: 0.179544, acc.: 92.19%] [G loss: 3.400347]\n",
      "4249 [D loss: 0.215079, acc.: 92.19%] [G loss: 3.006893]\n",
      "4250 [D loss: 0.220788, acc.: 90.62%] [G loss: 3.011783]\n",
      "4251 [D loss: 0.235590, acc.: 92.19%] [G loss: 3.045141]\n",
      "4252 [D loss: 0.183937, acc.: 92.19%] [G loss: 3.168298]\n",
      "4253 [D loss: 0.214368, acc.: 91.80%] [G loss: 2.831774]\n",
      "4254 [D loss: 0.198881, acc.: 92.19%] [G loss: 3.071270]\n",
      "4255 [D loss: 0.190118, acc.: 92.97%] [G loss: 3.196101]\n",
      "4256 [D loss: 0.188767, acc.: 92.19%] [G loss: 2.903936]\n",
      "4257 [D loss: 0.184641, acc.: 92.58%] [G loss: 3.094198]\n",
      "4258 [D loss: 0.196710, acc.: 91.80%] [G loss: 3.057808]\n",
      "4259 [D loss: 0.201316, acc.: 91.80%] [G loss: 3.273284]\n",
      "4260 [D loss: 0.221767, acc.: 91.80%] [G loss: 2.889091]\n",
      "4261 [D loss: 0.248247, acc.: 90.62%] [G loss: 2.950573]\n",
      "4262 [D loss: 0.198847, acc.: 91.41%] [G loss: 3.289537]\n",
      "4263 [D loss: 0.196561, acc.: 91.80%] [G loss: 3.254692]\n",
      "4264 [D loss: 0.191519, acc.: 92.19%] [G loss: 3.155274]\n",
      "4265 [D loss: 0.213290, acc.: 91.02%] [G loss: 2.913553]\n",
      "4266 [D loss: 0.201963, acc.: 91.80%] [G loss: 2.888613]\n",
      "4267 [D loss: 0.204395, acc.: 92.58%] [G loss: 2.835026]\n",
      "4268 [D loss: 0.200196, acc.: 92.58%] [G loss: 3.012118]\n",
      "4269 [D loss: 0.201626, acc.: 91.80%] [G loss: 3.202207]\n",
      "4270 [D loss: 0.207707, acc.: 91.80%] [G loss: 2.866229]\n",
      "4271 [D loss: 0.227461, acc.: 91.02%] [G loss: 3.118212]\n",
      "4272 [D loss: 0.183629, acc.: 92.58%] [G loss: 3.192708]\n",
      "4273 [D loss: 0.202088, acc.: 91.02%] [G loss: 2.802318]\n",
      "4274 [D loss: 0.194296, acc.: 92.19%] [G loss: 3.293728]\n",
      "4275 [D loss: 0.192514, acc.: 92.97%] [G loss: 3.076763]\n",
      "4276 [D loss: 0.197629, acc.: 91.41%] [G loss: 3.305858]\n",
      "4277 [D loss: 0.214786, acc.: 90.23%] [G loss: 3.223891]\n",
      "4278 [D loss: 0.202650, acc.: 91.80%] [G loss: 3.315455]\n",
      "4279 [D loss: 0.191170, acc.: 92.19%] [G loss: 3.045555]\n",
      "4280 [D loss: 0.193592, acc.: 92.19%] [G loss: 2.812239]\n",
      "4281 [D loss: 0.202712, acc.: 92.58%] [G loss: 3.023047]\n",
      "4282 [D loss: 0.195941, acc.: 92.19%] [G loss: 3.259216]\n",
      "4283 [D loss: 0.217116, acc.: 91.80%] [G loss: 3.002145]\n",
      "4284 [D loss: 0.214514, acc.: 91.41%] [G loss: 2.938005]\n",
      "4285 [D loss: 0.210418, acc.: 91.41%] [G loss: 2.917337]\n",
      "4286 [D loss: 0.222994, acc.: 91.02%] [G loss: 2.818178]\n",
      "4287 [D loss: 0.210437, acc.: 92.19%] [G loss: 2.936955]\n",
      "4288 [D loss: 0.200975, acc.: 92.58%] [G loss: 2.967989]\n",
      "4289 [D loss: 0.217134, acc.: 91.02%] [G loss: 3.127654]\n",
      "4290 [D loss: 0.204497, acc.: 92.19%] [G loss: 3.117227]\n",
      "4291 [D loss: 0.237327, acc.: 90.62%] [G loss: 2.735236]\n",
      "4292 [D loss: 0.212865, acc.: 92.19%] [G loss: 2.969090]\n",
      "4293 [D loss: 0.214515, acc.: 91.80%] [G loss: 3.144572]\n",
      "4294 [D loss: 0.209168, acc.: 91.80%] [G loss: 3.603355]\n",
      "4295 [D loss: 0.235142, acc.: 91.41%] [G loss: 3.304185]\n",
      "4296 [D loss: 0.175537, acc.: 92.97%] [G loss: 3.072425]\n",
      "4297 [D loss: 0.183393, acc.: 91.80%] [G loss: 3.395346]\n",
      "4298 [D loss: 0.210098, acc.: 91.41%] [G loss: 3.255646]\n",
      "4299 [D loss: 0.224949, acc.: 91.02%] [G loss: 3.361381]\n",
      "4300 [D loss: 0.187979, acc.: 91.80%] [G loss: 3.329303]\n",
      "WARNING:tensorflow:Model was constructed with shape Tensor(\"input_7:0\", shape=(128, 32), dtype=float32) for input (128, 32), but it was re-called on a Tensor with incompatible shape (432, 32).\n",
      "generated_data\n",
      "4301 [D loss: 0.180988, acc.: 92.97%] [G loss: 3.131581]\n",
      "4302 [D loss: 0.181801, acc.: 92.58%] [G loss: 2.928879]\n",
      "4303 [D loss: 0.182553, acc.: 92.97%] [G loss: 3.036320]\n",
      "4304 [D loss: 0.206863, acc.: 93.75%] [G loss: 3.150256]\n",
      "4305 [D loss: 0.216902, acc.: 91.41%] [G loss: 3.036005]\n",
      "4306 [D loss: 0.215178, acc.: 91.02%] [G loss: 3.074360]\n",
      "4307 [D loss: 0.180121, acc.: 92.19%] [G loss: 3.170782]\n",
      "4308 [D loss: 0.174226, acc.: 92.19%] [G loss: 3.120539]\n",
      "4309 [D loss: 0.170399, acc.: 92.58%] [G loss: 3.363923]\n",
      "4310 [D loss: 0.191449, acc.: 92.19%] [G loss: 3.282006]\n",
      "4311 [D loss: 0.239619, acc.: 91.41%] [G loss: 2.919932]\n",
      "4312 [D loss: 0.203257, acc.: 90.62%] [G loss: 3.312941]\n",
      "4313 [D loss: 0.183540, acc.: 92.58%] [G loss: 3.061786]\n",
      "4314 [D loss: 0.195181, acc.: 93.36%] [G loss: 3.187006]\n",
      "4315 [D loss: 0.197242, acc.: 91.02%] [G loss: 3.146463]\n",
      "4316 [D loss: 0.199873, acc.: 92.19%] [G loss: 3.063688]\n",
      "4317 [D loss: 0.203335, acc.: 90.62%] [G loss: 3.037825]\n",
      "4318 [D loss: 0.212626, acc.: 92.58%] [G loss: 2.972116]\n",
      "4319 [D loss: 0.193483, acc.: 91.80%] [G loss: 3.371066]\n",
      "4320 [D loss: 0.203023, acc.: 92.19%] [G loss: 2.829234]\n",
      "4321 [D loss: 0.187454, acc.: 93.36%] [G loss: 2.923124]\n",
      "4322 [D loss: 0.196435, acc.: 91.41%] [G loss: 2.857714]\n",
      "4323 [D loss: 0.207422, acc.: 91.02%] [G loss: 2.835810]\n",
      "4324 [D loss: 0.194794, acc.: 91.80%] [G loss: 3.120632]\n",
      "4325 [D loss: 0.214924, acc.: 91.80%] [G loss: 3.226985]\n",
      "4326 [D loss: 0.203852, acc.: 91.80%] [G loss: 3.188509]\n",
      "4327 [D loss: 0.216809, acc.: 91.02%] [G loss: 3.480646]\n",
      "4328 [D loss: 0.216819, acc.: 91.02%] [G loss: 3.413457]\n",
      "4329 [D loss: 0.187715, acc.: 92.19%] [G loss: 3.021706]\n",
      "4330 [D loss: 0.202947, acc.: 92.97%] [G loss: 3.054160]\n",
      "4331 [D loss: 0.197207, acc.: 92.19%] [G loss: 2.949638]\n",
      "4332 [D loss: 0.217108, acc.: 91.02%] [G loss: 3.129676]\n",
      "4333 [D loss: 0.219173, acc.: 91.80%] [G loss: 3.255413]\n",
      "4334 [D loss: 0.235125, acc.: 89.84%] [G loss: 3.183112]\n",
      "4335 [D loss: 0.189024, acc.: 92.19%] [G loss: 2.960764]\n",
      "4336 [D loss: 0.220797, acc.: 90.62%] [G loss: 2.909167]\n",
      "4337 [D loss: 0.201064, acc.: 92.19%] [G loss: 2.942679]\n",
      "4338 [D loss: 0.215848, acc.: 91.80%] [G loss: 3.263683]\n",
      "4339 [D loss: 0.207219, acc.: 92.19%] [G loss: 2.726567]\n",
      "4340 [D loss: 0.222073, acc.: 92.58%] [G loss: 3.021005]\n",
      "4341 [D loss: 0.205305, acc.: 91.02%] [G loss: 2.980454]\n",
      "4342 [D loss: 0.225886, acc.: 92.19%] [G loss: 2.705904]\n",
      "4343 [D loss: 0.200532, acc.: 92.19%] [G loss: 2.941047]\n",
      "4344 [D loss: 0.227784, acc.: 90.62%] [G loss: 3.025390]\n",
      "4345 [D loss: 0.219637, acc.: 91.41%] [G loss: 3.245595]\n",
      "4346 [D loss: 0.206602, acc.: 91.80%] [G loss: 2.728556]\n",
      "4347 [D loss: 0.210582, acc.: 91.41%] [G loss: 2.918402]\n",
      "4348 [D loss: 0.206897, acc.: 91.80%] [G loss: 2.942935]\n",
      "4349 [D loss: 0.222706, acc.: 91.41%] [G loss: 3.055027]\n",
      "4350 [D loss: 0.223789, acc.: 91.02%] [G loss: 2.925783]\n",
      "4351 [D loss: 0.229390, acc.: 90.62%] [G loss: 2.900154]\n",
      "4352 [D loss: 0.180374, acc.: 91.80%] [G loss: 2.948442]\n",
      "4353 [D loss: 0.212592, acc.: 91.41%] [G loss: 3.154151]\n",
      "4354 [D loss: 0.222237, acc.: 91.80%] [G loss: 2.981479]\n",
      "4355 [D loss: 0.200683, acc.: 92.19%] [G loss: 2.964360]\n",
      "4356 [D loss: 0.220719, acc.: 91.41%] [G loss: 2.878093]\n",
      "4357 [D loss: 0.185472, acc.: 91.41%] [G loss: 3.298997]\n",
      "4358 [D loss: 0.196047, acc.: 92.19%] [G loss: 3.152459]\n",
      "4359 [D loss: 0.198859, acc.: 92.19%] [G loss: 3.144934]\n",
      "4360 [D loss: 0.202374, acc.: 92.19%] [G loss: 3.042715]\n",
      "4361 [D loss: 0.203169, acc.: 91.41%] [G loss: 3.044618]\n",
      "4362 [D loss: 0.200815, acc.: 92.58%] [G loss: 3.053232]\n",
      "4363 [D loss: 0.209233, acc.: 90.23%] [G loss: 2.992533]\n",
      "4364 [D loss: 0.201432, acc.: 91.41%] [G loss: 2.946599]\n",
      "4365 [D loss: 0.207145, acc.: 92.58%] [G loss: 3.104873]\n",
      "4366 [D loss: 0.190885, acc.: 92.58%] [G loss: 2.930662]\n",
      "4367 [D loss: 0.216401, acc.: 92.19%] [G loss: 2.729223]\n",
      "4368 [D loss: 0.202782, acc.: 92.58%] [G loss: 2.968010]\n",
      "4369 [D loss: 0.223694, acc.: 91.80%] [G loss: 3.249598]\n",
      "4370 [D loss: 0.216870, acc.: 91.41%] [G loss: 3.108488]\n",
      "4371 [D loss: 0.198520, acc.: 92.19%] [G loss: 2.692003]\n",
      "4372 [D loss: 0.190789, acc.: 93.36%] [G loss: 2.907237]\n",
      "4373 [D loss: 0.196734, acc.: 92.58%] [G loss: 2.794405]\n",
      "4374 [D loss: 0.212966, acc.: 91.80%] [G loss: 3.679597]\n",
      "4375 [D loss: 0.226285, acc.: 91.41%] [G loss: 3.339269]\n",
      "4376 [D loss: 0.200244, acc.: 92.19%] [G loss: 3.120975]\n",
      "4377 [D loss: 0.200520, acc.: 92.19%] [G loss: 2.951489]\n",
      "4378 [D loss: 0.216689, acc.: 92.58%] [G loss: 2.985311]\n",
      "4379 [D loss: 0.191096, acc.: 92.58%] [G loss: 3.147842]\n",
      "4380 [D loss: 0.202608, acc.: 92.19%] [G loss: 3.126077]\n",
      "4381 [D loss: 0.213900, acc.: 91.80%] [G loss: 3.136102]\n",
      "4382 [D loss: 0.199992, acc.: 92.19%] [G loss: 3.190431]\n",
      "4383 [D loss: 0.241719, acc.: 91.80%] [G loss: 2.850523]\n",
      "4384 [D loss: 0.205809, acc.: 91.80%] [G loss: 3.192619]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "4385 [D loss: 0.179001, acc.: 92.97%] [G loss: 3.274740]\n",
      "4386 [D loss: 0.196041, acc.: 91.80%] [G loss: 2.900976]\n",
      "4387 [D loss: 0.179947, acc.: 92.58%] [G loss: 2.891507]\n",
      "4388 [D loss: 0.194013, acc.: 92.97%] [G loss: 2.960908]\n",
      "4389 [D loss: 0.208034, acc.: 92.19%] [G loss: 3.056040]\n",
      "4390 [D loss: 0.207386, acc.: 91.80%] [G loss: 2.935400]\n",
      "4391 [D loss: 0.193426, acc.: 92.19%] [G loss: 3.102427]\n",
      "4392 [D loss: 0.217201, acc.: 92.19%] [G loss: 2.656973]\n",
      "4393 [D loss: 0.185437, acc.: 92.19%] [G loss: 2.920249]\n",
      "4394 [D loss: 0.200158, acc.: 92.19%] [G loss: 3.107563]\n",
      "4395 [D loss: 0.203357, acc.: 92.19%] [G loss: 3.140180]\n",
      "4396 [D loss: 0.196385, acc.: 92.19%] [G loss: 3.028372]\n",
      "4397 [D loss: 0.197436, acc.: 92.97%] [G loss: 2.801290]\n",
      "4398 [D loss: 0.193099, acc.: 92.97%] [G loss: 2.753835]\n",
      "4399 [D loss: 0.179638, acc.: 92.58%] [G loss: 3.374862]\n",
      "4400 [D loss: 0.182313, acc.: 92.58%] [G loss: 3.322090]\n",
      "WARNING:tensorflow:Model was constructed with shape Tensor(\"input_7:0\", shape=(128, 32), dtype=float32) for input (128, 32), but it was re-called on a Tensor with incompatible shape (432, 32).\n",
      "generated_data\n",
      "4401 [D loss: 0.177413, acc.: 92.58%] [G loss: 3.403276]\n",
      "4402 [D loss: 0.190344, acc.: 92.58%] [G loss: 2.925524]\n",
      "4403 [D loss: 0.205125, acc.: 91.80%] [G loss: 3.252320]\n",
      "4404 [D loss: 0.192766, acc.: 92.58%] [G loss: 3.108302]\n",
      "4405 [D loss: 0.174551, acc.: 92.58%] [G loss: 3.210616]\n",
      "4406 [D loss: 0.160064, acc.: 92.97%] [G loss: 3.815905]\n",
      "4407 [D loss: 0.180667, acc.: 91.80%] [G loss: 2.958741]\n",
      "4408 [D loss: 0.181852, acc.: 92.19%] [G loss: 3.209962]\n",
      "4409 [D loss: 0.227265, acc.: 91.41%] [G loss: 2.592774]\n",
      "4410 [D loss: 0.211818, acc.: 92.19%] [G loss: 2.981245]\n",
      "4411 [D loss: 0.194418, acc.: 92.19%] [G loss: 3.078917]\n",
      "4412 [D loss: 0.191365, acc.: 91.80%] [G loss: 2.778860]\n",
      "4413 [D loss: 0.209822, acc.: 91.02%] [G loss: 3.126005]\n",
      "4414 [D loss: 0.176839, acc.: 92.97%] [G loss: 3.331340]\n",
      "4415 [D loss: 0.192955, acc.: 92.97%] [G loss: 3.163205]\n",
      "4416 [D loss: 0.181251, acc.: 92.58%] [G loss: 3.447355]\n",
      "4417 [D loss: 0.198557, acc.: 91.80%] [G loss: 3.093875]\n",
      "4418 [D loss: 0.190978, acc.: 92.19%] [G loss: 2.981030]\n",
      "4419 [D loss: 0.217521, acc.: 91.41%] [G loss: 3.448945]\n",
      "4420 [D loss: 0.183639, acc.: 92.19%] [G loss: 3.594975]\n",
      "4421 [D loss: 0.212221, acc.: 90.62%] [G loss: 3.023731]\n",
      "4422 [D loss: 0.208194, acc.: 92.58%] [G loss: 3.115677]\n",
      "4423 [D loss: 0.206698, acc.: 92.58%] [G loss: 2.856912]\n",
      "4424 [D loss: 0.214225, acc.: 91.80%] [G loss: 3.071826]\n",
      "4425 [D loss: 0.208578, acc.: 92.19%] [G loss: 3.355791]\n",
      "4426 [D loss: 0.198255, acc.: 91.41%] [G loss: 3.576844]\n",
      "4427 [D loss: 0.181376, acc.: 92.58%] [G loss: 3.109163]\n",
      "4428 [D loss: 0.220142, acc.: 91.41%] [G loss: 2.830481]\n",
      "4429 [D loss: 0.189289, acc.: 92.58%] [G loss: 2.925852]\n",
      "4430 [D loss: 0.203941, acc.: 92.19%] [G loss: 3.024804]\n",
      "4431 [D loss: 0.197322, acc.: 92.58%] [G loss: 2.882548]\n",
      "4432 [D loss: 0.191668, acc.: 92.97%] [G loss: 3.287227]\n",
      "4433 [D loss: 0.176425, acc.: 93.36%] [G loss: 3.471929]\n",
      "4434 [D loss: 0.176759, acc.: 92.58%] [G loss: 3.516682]\n",
      "4435 [D loss: 0.166552, acc.: 92.58%] [G loss: 3.272627]\n",
      "4436 [D loss: 0.207651, acc.: 91.41%] [G loss: 2.883448]\n",
      "4437 [D loss: 0.200250, acc.: 92.58%] [G loss: 3.072420]\n",
      "4438 [D loss: 0.221798, acc.: 91.80%] [G loss: 3.394856]\n",
      "4439 [D loss: 0.227114, acc.: 91.02%] [G loss: 3.003059]\n",
      "4440 [D loss: 0.216754, acc.: 91.41%] [G loss: 2.982560]\n",
      "4441 [D loss: 0.198076, acc.: 92.19%] [G loss: 3.099194]\n",
      "4442 [D loss: 0.223399, acc.: 91.41%] [G loss: 2.954578]\n",
      "4443 [D loss: 0.208911, acc.: 92.58%] [G loss: 2.914590]\n",
      "4444 [D loss: 0.185096, acc.: 92.19%] [G loss: 3.126479]\n",
      "4445 [D loss: 0.172748, acc.: 91.80%] [G loss: 3.475712]\n",
      "4446 [D loss: 0.168876, acc.: 91.41%] [G loss: 3.023308]\n",
      "4447 [D loss: 0.176897, acc.: 93.36%] [G loss: 3.312823]\n",
      "4448 [D loss: 0.195902, acc.: 92.19%] [G loss: 2.951864]\n",
      "4449 [D loss: 0.229574, acc.: 90.62%] [G loss: 2.944408]\n",
      "4450 [D loss: 0.193099, acc.: 91.41%] [G loss: 3.128203]\n",
      "4451 [D loss: 0.226863, acc.: 89.45%] [G loss: 3.167475]\n",
      "4452 [D loss: 0.226208, acc.: 91.41%] [G loss: 2.977028]\n",
      "4453 [D loss: 0.212771, acc.: 91.02%] [G loss: 2.975441]\n",
      "4454 [D loss: 0.194878, acc.: 92.97%] [G loss: 2.723703]\n",
      "4455 [D loss: 0.236214, acc.: 91.02%] [G loss: 2.938394]\n",
      "4456 [D loss: 0.202317, acc.: 92.58%] [G loss: 2.930825]\n",
      "4457 [D loss: 0.214864, acc.: 91.41%] [G loss: 2.970619]\n",
      "4458 [D loss: 0.197283, acc.: 92.19%] [G loss: 2.902583]\n",
      "4459 [D loss: 0.233627, acc.: 91.02%] [G loss: 2.740237]\n",
      "4460 [D loss: 0.227766, acc.: 91.02%] [G loss: 2.836273]\n",
      "4461 [D loss: 0.201607, acc.: 92.58%] [G loss: 3.081787]\n",
      "4462 [D loss: 0.195692, acc.: 91.80%] [G loss: 3.111405]\n",
      "4463 [D loss: 0.179160, acc.: 92.19%] [G loss: 3.226704]\n",
      "4464 [D loss: 0.199851, acc.: 90.62%] [G loss: 3.277498]\n",
      "4465 [D loss: 0.199462, acc.: 92.19%] [G loss: 3.034979]\n",
      "4466 [D loss: 0.207258, acc.: 91.80%] [G loss: 2.802165]\n",
      "4467 [D loss: 0.198128, acc.: 92.97%] [G loss: 3.246414]\n",
      "4468 [D loss: 0.220805, acc.: 91.80%] [G loss: 3.251956]\n",
      "4469 [D loss: 0.189458, acc.: 92.19%] [G loss: 3.017253]\n",
      "4470 [D loss: 0.195875, acc.: 91.80%] [G loss: 2.897283]\n",
      "4471 [D loss: 0.200953, acc.: 93.36%] [G loss: 3.133070]\n",
      "4472 [D loss: 0.199936, acc.: 91.41%] [G loss: 3.205870]\n",
      "4473 [D loss: 0.200766, acc.: 92.19%] [G loss: 3.203981]\n",
      "4474 [D loss: 0.200599, acc.: 91.80%] [G loss: 3.435617]\n",
      "4475 [D loss: 0.176626, acc.: 92.58%] [G loss: 2.928616]\n",
      "4476 [D loss: 0.171713, acc.: 92.97%] [G loss: 3.107590]\n",
      "4477 [D loss: 0.174283, acc.: 93.75%] [G loss: 3.022652]\n",
      "4478 [D loss: 0.170731, acc.: 92.97%] [G loss: 2.962319]\n",
      "4479 [D loss: 0.174440, acc.: 93.36%] [G loss: 3.285978]\n",
      "4480 [D loss: 0.213579, acc.: 91.02%] [G loss: 3.114430]\n",
      "4481 [D loss: 0.234130, acc.: 91.41%] [G loss: 3.073117]\n",
      "4482 [D loss: 0.199740, acc.: 91.80%] [G loss: 3.523830]\n",
      "4483 [D loss: 0.205702, acc.: 92.58%] [G loss: 3.155610]\n",
      "4484 [D loss: 0.192670, acc.: 93.75%] [G loss: 3.014055]\n",
      "4485 [D loss: 0.212870, acc.: 91.41%] [G loss: 2.667033]\n",
      "4486 [D loss: 0.205256, acc.: 92.58%] [G loss: 3.042469]\n",
      "4487 [D loss: 0.185165, acc.: 93.36%] [G loss: 3.139912]\n",
      "4488 [D loss: 0.186660, acc.: 91.80%] [G loss: 3.465119]\n",
      "4489 [D loss: 0.196651, acc.: 91.80%] [G loss: 3.216293]\n",
      "4490 [D loss: 0.203963, acc.: 92.19%] [G loss: 3.100513]\n",
      "4491 [D loss: 0.205929, acc.: 92.58%] [G loss: 3.055126]\n",
      "4492 [D loss: 0.223787, acc.: 90.62%] [G loss: 2.961978]\n",
      "4493 [D loss: 0.205613, acc.: 92.19%] [G loss: 3.257154]\n",
      "4494 [D loss: 0.204865, acc.: 92.19%] [G loss: 3.285727]\n",
      "4495 [D loss: 0.230861, acc.: 91.41%] [G loss: 3.087400]\n",
      "4496 [D loss: 0.222658, acc.: 91.02%] [G loss: 3.105208]\n",
      "4497 [D loss: 0.200651, acc.: 92.58%] [G loss: 2.926249]\n",
      "4498 [D loss: 0.210182, acc.: 91.80%] [G loss: 2.875054]\n",
      "4499 [D loss: 0.191988, acc.: 92.19%] [G loss: 3.150140]\n",
      "4500 [D loss: 0.187827, acc.: 92.19%] [G loss: 2.773672]\n",
      "WARNING:tensorflow:Model was constructed with shape Tensor(\"input_7:0\", shape=(128, 32), dtype=float32) for input (128, 32), but it was re-called on a Tensor with incompatible shape (432, 32).\n",
      "generated_data\n",
      "4501 [D loss: 0.173813, acc.: 92.97%] [G loss: 3.790483]\n",
      "4502 [D loss: 0.179728, acc.: 92.19%] [G loss: 3.463360]\n",
      "4503 [D loss: 0.169567, acc.: 93.36%] [G loss: 3.325499]\n",
      "4504 [D loss: 0.168844, acc.: 92.97%] [G loss: 2.874511]\n",
      "4505 [D loss: 0.209122, acc.: 91.80%] [G loss: 3.205102]\n",
      "4506 [D loss: 0.197800, acc.: 92.97%] [G loss: 3.150818]\n",
      "4507 [D loss: 0.210506, acc.: 92.19%] [G loss: 2.764475]\n",
      "4508 [D loss: 0.197785, acc.: 92.19%] [G loss: 3.163330]\n",
      "4509 [D loss: 0.195100, acc.: 91.02%] [G loss: 3.561210]\n",
      "4510 [D loss: 0.169957, acc.: 92.97%] [G loss: 3.238933]\n",
      "4511 [D loss: 0.175950, acc.: 91.80%] [G loss: 3.199401]\n",
      "4512 [D loss: 0.238541, acc.: 91.41%] [G loss: 3.018557]\n",
      "4513 [D loss: 0.220822, acc.: 91.02%] [G loss: 3.129874]\n",
      "4514 [D loss: 0.174581, acc.: 92.97%] [G loss: 3.414613]\n",
      "4515 [D loss: 0.175155, acc.: 92.97%] [G loss: 3.287553]\n",
      "4516 [D loss: 0.160048, acc.: 92.97%] [G loss: 3.323671]\n",
      "4517 [D loss: 0.186822, acc.: 92.58%] [G loss: 3.242485]\n",
      "4518 [D loss: 0.213802, acc.: 91.02%] [G loss: 2.776260]\n",
      "4519 [D loss: 0.185232, acc.: 93.36%] [G loss: 3.228868]\n",
      "4520 [D loss: 0.209302, acc.: 92.58%] [G loss: 3.156832]\n",
      "4521 [D loss: 0.196840, acc.: 92.19%] [G loss: 2.998120]\n",
      "4522 [D loss: 0.196086, acc.: 92.19%] [G loss: 3.033361]\n",
      "4523 [D loss: 0.221238, acc.: 91.02%] [G loss: 3.051171]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "4524 [D loss: 0.201195, acc.: 91.80%] [G loss: 3.194618]\n",
      "4525 [D loss: 0.213465, acc.: 90.62%] [G loss: 3.025758]\n",
      "4526 [D loss: 0.188671, acc.: 92.19%] [G loss: 2.923487]\n",
      "4527 [D loss: 0.224662, acc.: 91.41%] [G loss: 3.080880]\n",
      "4528 [D loss: 0.235961, acc.: 91.41%] [G loss: 2.932713]\n",
      "4529 [D loss: 0.200841, acc.: 91.02%] [G loss: 3.074266]\n",
      "4530 [D loss: 0.206174, acc.: 91.02%] [G loss: 2.746970]\n",
      "4531 [D loss: 0.188247, acc.: 92.97%] [G loss: 2.897145]\n",
      "4532 [D loss: 0.222615, acc.: 92.19%] [G loss: 2.922042]\n",
      "4533 [D loss: 0.190460, acc.: 92.97%] [G loss: 3.019580]\n",
      "4534 [D loss: 0.236490, acc.: 90.62%] [G loss: 3.208850]\n",
      "4535 [D loss: 0.213509, acc.: 92.19%] [G loss: 3.150811]\n",
      "4536 [D loss: 0.199963, acc.: 92.97%] [G loss: 2.876451]\n",
      "4537 [D loss: 0.196838, acc.: 91.80%] [G loss: 3.562014]\n",
      "4538 [D loss: 0.192273, acc.: 91.80%] [G loss: 3.433805]\n",
      "4539 [D loss: 0.195860, acc.: 92.19%] [G loss: 3.329834]\n",
      "4540 [D loss: 0.182260, acc.: 92.19%] [G loss: 3.571703]\n",
      "4541 [D loss: 0.192604, acc.: 92.58%] [G loss: 2.818211]\n",
      "4542 [D loss: 0.179215, acc.: 92.58%] [G loss: 3.148969]\n",
      "4543 [D loss: 0.218514, acc.: 91.80%] [G loss: 2.925289]\n",
      "4544 [D loss: 0.177472, acc.: 92.19%] [G loss: 3.524716]\n",
      "4545 [D loss: 0.169610, acc.: 92.58%] [G loss: 3.561602]\n",
      "4546 [D loss: 0.161082, acc.: 92.97%] [G loss: 3.385440]\n",
      "4547 [D loss: 0.157070, acc.: 94.14%] [G loss: 2.881804]\n",
      "4548 [D loss: 0.188849, acc.: 92.58%] [G loss: 3.078424]\n",
      "4549 [D loss: 0.164939, acc.: 93.75%] [G loss: 3.173320]\n",
      "4550 [D loss: 0.193707, acc.: 92.19%] [G loss: 3.024360]\n",
      "4551 [D loss: 0.173451, acc.: 92.58%] [G loss: 3.510531]\n",
      "4552 [D loss: 0.179897, acc.: 92.19%] [G loss: 3.029306]\n",
      "4553 [D loss: 0.181789, acc.: 91.41%] [G loss: 3.292468]\n",
      "4554 [D loss: 0.185084, acc.: 92.58%] [G loss: 3.139593]\n",
      "4555 [D loss: 0.215903, acc.: 92.58%] [G loss: 3.187216]\n",
      "4556 [D loss: 0.166296, acc.: 91.80%] [G loss: 3.511458]\n",
      "4557 [D loss: 0.171568, acc.: 93.75%] [G loss: 3.186839]\n",
      "4558 [D loss: 0.183791, acc.: 92.19%] [G loss: 3.462071]\n",
      "4559 [D loss: 0.183993, acc.: 92.58%] [G loss: 3.091571]\n",
      "4560 [D loss: 0.199781, acc.: 91.80%] [G loss: 3.244870]\n",
      "4561 [D loss: 0.183211, acc.: 92.58%] [G loss: 3.170640]\n",
      "4562 [D loss: 0.191180, acc.: 92.97%] [G loss: 3.167176]\n",
      "4563 [D loss: 0.200816, acc.: 92.58%] [G loss: 3.394656]\n",
      "4564 [D loss: 0.164686, acc.: 93.75%] [G loss: 3.238079]\n",
      "4565 [D loss: 0.159678, acc.: 92.97%] [G loss: 3.428116]\n",
      "4566 [D loss: 0.180513, acc.: 92.19%] [G loss: 3.253963]\n",
      "4567 [D loss: 0.195394, acc.: 92.19%] [G loss: 3.686703]\n",
      "4568 [D loss: 0.205802, acc.: 92.19%] [G loss: 3.108514]\n",
      "4569 [D loss: 0.203634, acc.: 92.58%] [G loss: 3.365185]\n",
      "4570 [D loss: 0.212553, acc.: 91.80%] [G loss: 3.801582]\n",
      "4571 [D loss: 0.182209, acc.: 92.19%] [G loss: 3.541099]\n",
      "4572 [D loss: 0.148305, acc.: 92.97%] [G loss: 3.511544]\n",
      "4573 [D loss: 0.180180, acc.: 92.58%] [G loss: 3.299706]\n",
      "4574 [D loss: 0.181895, acc.: 91.41%] [G loss: 3.724569]\n",
      "4575 [D loss: 0.203726, acc.: 91.80%] [G loss: 3.478243]\n",
      "4576 [D loss: 0.185538, acc.: 91.80%] [G loss: 3.298591]\n",
      "4577 [D loss: 0.176195, acc.: 92.97%] [G loss: 3.305803]\n",
      "4578 [D loss: 0.178451, acc.: 93.75%] [G loss: 3.048754]\n",
      "4579 [D loss: 0.199265, acc.: 91.02%] [G loss: 3.353461]\n",
      "4580 [D loss: 0.216115, acc.: 92.19%] [G loss: 3.156172]\n",
      "4581 [D loss: 0.185870, acc.: 92.58%] [G loss: 3.369566]\n",
      "4582 [D loss: 0.179178, acc.: 94.14%] [G loss: 3.154908]\n",
      "4583 [D loss: 0.203135, acc.: 91.80%] [G loss: 3.134830]\n",
      "4584 [D loss: 0.176103, acc.: 92.58%] [G loss: 2.831652]\n",
      "4585 [D loss: 0.187224, acc.: 92.58%] [G loss: 3.043805]\n",
      "4586 [D loss: 0.166875, acc.: 92.58%] [G loss: 3.544974]\n",
      "4587 [D loss: 0.195814, acc.: 91.02%] [G loss: 3.080008]\n",
      "4588 [D loss: 0.151184, acc.: 92.97%] [G loss: 3.447037]\n",
      "4589 [D loss: 0.183401, acc.: 91.80%] [G loss: 3.410020]\n",
      "4590 [D loss: 0.212973, acc.: 91.80%] [G loss: 3.158706]\n",
      "4591 [D loss: 0.207757, acc.: 92.19%] [G loss: 3.110028]\n",
      "4592 [D loss: 0.214742, acc.: 91.41%] [G loss: 3.751488]\n",
      "4593 [D loss: 0.189723, acc.: 92.19%] [G loss: 3.503003]\n",
      "4594 [D loss: 0.182228, acc.: 91.80%] [G loss: 2.948596]\n",
      "4595 [D loss: 0.166256, acc.: 92.97%] [G loss: 3.078978]\n",
      "4596 [D loss: 0.172720, acc.: 92.58%] [G loss: 3.050391]\n",
      "4597 [D loss: 0.193938, acc.: 92.97%] [G loss: 3.222238]\n",
      "4598 [D loss: 0.194913, acc.: 92.97%] [G loss: 3.442592]\n",
      "4599 [D loss: 0.185489, acc.: 92.58%] [G loss: 3.225616]\n",
      "4600 [D loss: 0.197967, acc.: 92.58%] [G loss: 3.008636]\n",
      "WARNING:tensorflow:Model was constructed with shape Tensor(\"input_7:0\", shape=(128, 32), dtype=float32) for input (128, 32), but it was re-called on a Tensor with incompatible shape (432, 32).\n",
      "generated_data\n",
      "4601 [D loss: 0.193694, acc.: 91.02%] [G loss: 3.708429]\n",
      "4602 [D loss: 0.177158, acc.: 92.58%] [G loss: 3.454469]\n",
      "4603 [D loss: 0.170365, acc.: 92.19%] [G loss: 3.328159]\n",
      "4604 [D loss: 0.167393, acc.: 93.75%] [G loss: 3.269247]\n",
      "4605 [D loss: 0.217061, acc.: 90.23%] [G loss: 2.820766]\n",
      "4606 [D loss: 0.195056, acc.: 92.19%] [G loss: 3.264626]\n",
      "4607 [D loss: 0.188917, acc.: 91.80%] [G loss: 3.712143]\n",
      "4608 [D loss: 0.179643, acc.: 93.36%] [G loss: 3.253545]\n",
      "4609 [D loss: 0.214328, acc.: 90.23%] [G loss: 3.290500]\n",
      "4610 [D loss: 0.189997, acc.: 91.41%] [G loss: 3.565539]\n",
      "4611 [D loss: 0.215033, acc.: 91.80%] [G loss: 3.029068]\n",
      "4612 [D loss: 0.214956, acc.: 89.84%] [G loss: 3.566506]\n",
      "4613 [D loss: 0.224914, acc.: 91.41%] [G loss: 3.426140]\n",
      "4614 [D loss: 0.187424, acc.: 92.58%] [G loss: 3.304488]\n",
      "4615 [D loss: 0.211548, acc.: 92.19%] [G loss: 2.774498]\n",
      "4616 [D loss: 0.205571, acc.: 92.97%] [G loss: 3.040998]\n",
      "4617 [D loss: 0.201978, acc.: 92.19%] [G loss: 3.197233]\n",
      "4618 [D loss: 0.185397, acc.: 91.80%] [G loss: 3.609092]\n",
      "4619 [D loss: 0.202208, acc.: 91.02%] [G loss: 3.864307]\n",
      "4620 [D loss: 0.181546, acc.: 92.97%] [G loss: 3.444209]\n",
      "4621 [D loss: 0.194670, acc.: 92.97%] [G loss: 3.817736]\n",
      "4622 [D loss: 0.201921, acc.: 92.97%] [G loss: 3.060123]\n",
      "4623 [D loss: 0.182220, acc.: 92.97%] [G loss: 3.461352]\n",
      "4624 [D loss: 0.184345, acc.: 92.19%] [G loss: 3.467069]\n",
      "4625 [D loss: 0.181746, acc.: 92.19%] [G loss: 3.427363]\n",
      "4626 [D loss: 0.196974, acc.: 91.02%] [G loss: 3.575007]\n",
      "4627 [D loss: 0.197432, acc.: 91.80%] [G loss: 3.020008]\n",
      "4628 [D loss: 0.216817, acc.: 91.41%] [G loss: 3.183301]\n",
      "4629 [D loss: 0.200633, acc.: 91.80%] [G loss: 2.785845]\n",
      "4630 [D loss: 0.222261, acc.: 90.62%] [G loss: 3.056039]\n",
      "4631 [D loss: 0.223093, acc.: 90.23%] [G loss: 2.982134]\n",
      "4632 [D loss: 0.218965, acc.: 91.41%] [G loss: 3.598155]\n",
      "4633 [D loss: 0.167588, acc.: 92.19%] [G loss: 3.489047]\n",
      "4634 [D loss: 0.182242, acc.: 91.41%] [G loss: 3.350621]\n",
      "4635 [D loss: 0.189504, acc.: 92.97%] [G loss: 2.949352]\n",
      "4636 [D loss: 0.183245, acc.: 93.36%] [G loss: 3.496931]\n",
      "4637 [D loss: 0.172103, acc.: 92.97%] [G loss: 3.530334]\n",
      "4638 [D loss: 0.171559, acc.: 92.97%] [G loss: 3.388219]\n",
      "4639 [D loss: 0.178204, acc.: 92.19%] [G loss: 3.695481]\n",
      "4640 [D loss: 0.185926, acc.: 91.02%] [G loss: 2.919327]\n",
      "4641 [D loss: 0.211839, acc.: 91.80%] [G loss: 3.137565]\n",
      "4642 [D loss: 0.217479, acc.: 91.80%] [G loss: 3.348456]\n",
      "4643 [D loss: 0.180983, acc.: 92.58%] [G loss: 3.590945]\n",
      "4644 [D loss: 0.196866, acc.: 92.19%] [G loss: 3.141430]\n",
      "4645 [D loss: 0.197922, acc.: 91.41%] [G loss: 3.565434]\n",
      "4646 [D loss: 0.189234, acc.: 92.19%] [G loss: 2.934810]\n",
      "4647 [D loss: 0.195830, acc.: 92.58%] [G loss: 2.994665]\n",
      "4648 [D loss: 0.171330, acc.: 92.19%] [G loss: 3.405365]\n",
      "4649 [D loss: 0.159403, acc.: 93.36%] [G loss: 3.083236]\n",
      "4650 [D loss: 0.177527, acc.: 92.58%] [G loss: 3.221567]\n",
      "4651 [D loss: 0.180385, acc.: 92.19%] [G loss: 3.475311]\n",
      "4652 [D loss: 0.181337, acc.: 92.58%] [G loss: 3.050817]\n",
      "4653 [D loss: 0.197866, acc.: 91.02%] [G loss: 3.052279]\n",
      "4654 [D loss: 0.157950, acc.: 93.36%] [G loss: 3.710647]\n",
      "4655 [D loss: 0.172292, acc.: 92.19%] [G loss: 3.429267]\n",
      "4656 [D loss: 0.162962, acc.: 93.36%] [G loss: 3.828402]\n",
      "4657 [D loss: 0.185199, acc.: 92.97%] [G loss: 3.310889]\n",
      "4658 [D loss: 0.203421, acc.: 92.19%] [G loss: 3.063005]\n",
      "4659 [D loss: 0.169532, acc.: 93.36%] [G loss: 3.301067]\n",
      "4660 [D loss: 0.189036, acc.: 92.58%] [G loss: 3.449718]\n",
      "4661 [D loss: 0.193649, acc.: 92.58%] [G loss: 2.900034]\n",
      "4662 [D loss: 0.207805, acc.: 92.58%] [G loss: 2.972558]\n",
      "4663 [D loss: 0.209890, acc.: 92.19%] [G loss: 3.105795]\n",
      "4664 [D loss: 0.210920, acc.: 90.23%] [G loss: 3.253357]\n",
      "4665 [D loss: 0.222319, acc.: 91.41%] [G loss: 3.322861]\n",
      "4666 [D loss: 0.181683, acc.: 91.80%] [G loss: 3.120094]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "4667 [D loss: 0.170306, acc.: 92.97%] [G loss: 3.403082]\n",
      "4668 [D loss: 0.195036, acc.: 91.41%] [G loss: 3.034202]\n",
      "4669 [D loss: 0.194905, acc.: 92.19%] [G loss: 3.350117]\n",
      "4670 [D loss: 0.190501, acc.: 91.41%] [G loss: 3.770803]\n",
      "4671 [D loss: 0.189460, acc.: 93.36%] [G loss: 3.414499]\n",
      "4672 [D loss: 0.156008, acc.: 93.36%] [G loss: 3.212280]\n",
      "4673 [D loss: 0.176527, acc.: 92.58%] [G loss: 3.476604]\n",
      "4674 [D loss: 0.196384, acc.: 92.58%] [G loss: 3.042068]\n",
      "4675 [D loss: 0.225959, acc.: 91.80%] [G loss: 3.149989]\n",
      "4676 [D loss: 0.232254, acc.: 91.80%] [G loss: 2.732504]\n",
      "4677 [D loss: 0.212036, acc.: 90.62%] [G loss: 3.281743]\n",
      "4678 [D loss: 0.223022, acc.: 92.19%] [G loss: 3.164845]\n",
      "4679 [D loss: 0.204785, acc.: 92.58%] [G loss: 3.301559]\n",
      "4680 [D loss: 0.230926, acc.: 89.84%] [G loss: 3.044724]\n",
      "4681 [D loss: 0.190933, acc.: 91.80%] [G loss: 3.406066]\n",
      "4682 [D loss: 0.175338, acc.: 92.58%] [G loss: 3.206517]\n",
      "4683 [D loss: 0.191550, acc.: 91.41%] [G loss: 3.449527]\n",
      "4684 [D loss: 0.197745, acc.: 92.19%] [G loss: 2.874406]\n",
      "4685 [D loss: 0.219607, acc.: 92.19%] [G loss: 2.986013]\n",
      "4686 [D loss: 0.183179, acc.: 92.97%] [G loss: 3.547202]\n",
      "4687 [D loss: 0.167473, acc.: 93.36%] [G loss: 3.575558]\n",
      "4688 [D loss: 0.169130, acc.: 92.19%] [G loss: 3.305452]\n",
      "4689 [D loss: 0.172451, acc.: 93.75%] [G loss: 3.518380]\n",
      "4690 [D loss: 0.196492, acc.: 92.19%] [G loss: 3.038722]\n",
      "4691 [D loss: 0.197664, acc.: 92.19%] [G loss: 3.315564]\n",
      "4692 [D loss: 0.213384, acc.: 90.23%] [G loss: 3.433503]\n",
      "4693 [D loss: 0.173498, acc.: 92.58%] [G loss: 3.523402]\n",
      "4694 [D loss: 0.173177, acc.: 92.97%] [G loss: 3.252392]\n",
      "4695 [D loss: 0.177765, acc.: 91.80%] [G loss: 3.266572]\n",
      "4696 [D loss: 0.194996, acc.: 92.58%] [G loss: 3.215961]\n",
      "4697 [D loss: 0.228154, acc.: 92.19%] [G loss: 3.385487]\n",
      "4698 [D loss: 0.183688, acc.: 92.97%] [G loss: 3.399913]\n",
      "4699 [D loss: 0.195435, acc.: 92.97%] [G loss: 3.689318]\n",
      "4700 [D loss: 0.182196, acc.: 92.19%] [G loss: 3.099905]\n",
      "WARNING:tensorflow:Model was constructed with shape Tensor(\"input_7:0\", shape=(128, 32), dtype=float32) for input (128, 32), but it was re-called on a Tensor with incompatible shape (432, 32).\n",
      "generated_data\n",
      "4701 [D loss: 0.197178, acc.: 91.80%] [G loss: 3.528590]\n",
      "4702 [D loss: 0.192925, acc.: 91.80%] [G loss: 3.425307]\n",
      "4703 [D loss: 0.194547, acc.: 92.58%] [G loss: 3.110762]\n",
      "4704 [D loss: 0.211905, acc.: 91.41%] [G loss: 3.261726]\n",
      "4705 [D loss: 0.159835, acc.: 92.58%] [G loss: 3.832514]\n",
      "4706 [D loss: 0.180510, acc.: 91.41%] [G loss: 3.398846]\n",
      "4707 [D loss: 0.202569, acc.: 92.19%] [G loss: 3.069304]\n",
      "4708 [D loss: 0.264284, acc.: 88.67%] [G loss: 3.395519]\n",
      "4709 [D loss: 0.211053, acc.: 91.41%] [G loss: 3.156904]\n",
      "4710 [D loss: 0.225675, acc.: 91.02%] [G loss: 3.108076]\n",
      "4711 [D loss: 0.231839, acc.: 91.41%] [G loss: 2.853776]\n",
      "4712 [D loss: 0.204563, acc.: 92.97%] [G loss: 2.797155]\n",
      "4713 [D loss: 0.232463, acc.: 90.23%] [G loss: 3.034196]\n",
      "4714 [D loss: 0.233430, acc.: 91.41%] [G loss: 2.871216]\n",
      "4715 [D loss: 0.234955, acc.: 92.19%] [G loss: 2.773380]\n",
      "4716 [D loss: 0.217872, acc.: 91.80%] [G loss: 2.709376]\n",
      "4717 [D loss: 0.178256, acc.: 93.36%] [G loss: 3.316528]\n",
      "4718 [D loss: 0.199678, acc.: 92.58%] [G loss: 2.688838]\n",
      "4719 [D loss: 0.215463, acc.: 91.41%] [G loss: 3.183248]\n",
      "4720 [D loss: 0.173081, acc.: 92.97%] [G loss: 3.212125]\n",
      "4721 [D loss: 0.173965, acc.: 92.97%] [G loss: 3.080055]\n",
      "4722 [D loss: 0.180766, acc.: 91.41%] [G loss: 4.024938]\n",
      "4723 [D loss: 0.194140, acc.: 91.02%] [G loss: 3.384447]\n",
      "4724 [D loss: 0.161148, acc.: 93.36%] [G loss: 3.489389]\n",
      "4725 [D loss: 0.200725, acc.: 91.41%] [G loss: 3.020428]\n",
      "4726 [D loss: 0.188166, acc.: 92.58%] [G loss: 3.409012]\n",
      "4727 [D loss: 0.181669, acc.: 91.80%] [G loss: 3.539166]\n",
      "4728 [D loss: 0.196183, acc.: 92.97%] [G loss: 2.985854]\n",
      "4729 [D loss: 0.182768, acc.: 91.41%] [G loss: 4.028769]\n",
      "4730 [D loss: 0.184967, acc.: 92.58%] [G loss: 2.842643]\n",
      "4731 [D loss: 0.184109, acc.: 92.19%] [G loss: 3.024426]\n",
      "4732 [D loss: 0.213182, acc.: 92.97%] [G loss: 3.184200]\n",
      "4733 [D loss: 0.194238, acc.: 92.19%] [G loss: 3.238824]\n",
      "4734 [D loss: 0.214126, acc.: 92.58%] [G loss: 3.057536]\n",
      "4735 [D loss: 0.216966, acc.: 91.41%] [G loss: 3.521271]\n",
      "4736 [D loss: 0.194463, acc.: 92.19%] [G loss: 3.210201]\n",
      "4737 [D loss: 0.211065, acc.: 90.62%] [G loss: 3.238661]\n",
      "4738 [D loss: 0.176723, acc.: 92.19%] [G loss: 3.401801]\n",
      "4739 [D loss: 0.202531, acc.: 92.97%] [G loss: 3.094591]\n",
      "4740 [D loss: 0.213278, acc.: 92.19%] [G loss: 2.984625]\n",
      "4741 [D loss: 0.214416, acc.: 92.58%] [G loss: 3.157912]\n",
      "4742 [D loss: 0.231666, acc.: 91.02%] [G loss: 2.923933]\n",
      "4743 [D loss: 0.210090, acc.: 92.58%] [G loss: 2.855818]\n",
      "4744 [D loss: 0.200758, acc.: 92.19%] [G loss: 2.912014]\n",
      "4745 [D loss: 0.191340, acc.: 92.97%] [G loss: 2.832499]\n",
      "4746 [D loss: 0.214028, acc.: 91.41%] [G loss: 2.995569]\n",
      "4747 [D loss: 0.195630, acc.: 91.80%] [G loss: 3.136301]\n",
      "4748 [D loss: 0.233641, acc.: 91.41%] [G loss: 3.218417]\n",
      "4749 [D loss: 0.183879, acc.: 92.58%] [G loss: 3.513463]\n",
      "4750 [D loss: 0.195339, acc.: 92.58%] [G loss: 3.399722]\n",
      "4751 [D loss: 0.214086, acc.: 91.80%] [G loss: 3.068678]\n",
      "4752 [D loss: 0.209052, acc.: 91.41%] [G loss: 3.530946]\n",
      "4753 [D loss: 0.183101, acc.: 92.19%] [G loss: 3.158167]\n",
      "4754 [D loss: 0.184535, acc.: 92.19%] [G loss: 2.949931]\n",
      "4755 [D loss: 0.204322, acc.: 90.62%] [G loss: 3.338459]\n",
      "4756 [D loss: 0.181440, acc.: 92.97%] [G loss: 3.299427]\n",
      "4757 [D loss: 0.181019, acc.: 91.41%] [G loss: 3.401180]\n",
      "4758 [D loss: 0.200216, acc.: 92.19%] [G loss: 3.721536]\n",
      "4759 [D loss: 0.205847, acc.: 91.80%] [G loss: 3.166137]\n",
      "4760 [D loss: 0.183958, acc.: 91.80%] [G loss: 3.206048]\n",
      "4761 [D loss: 0.191408, acc.: 92.19%] [G loss: 3.760419]\n",
      "4762 [D loss: 0.193378, acc.: 91.80%] [G loss: 3.286716]\n",
      "4763 [D loss: 0.170008, acc.: 92.58%] [G loss: 3.347702]\n",
      "4764 [D loss: 0.186991, acc.: 93.75%] [G loss: 3.259402]\n",
      "4765 [D loss: 0.189753, acc.: 93.36%] [G loss: 2.953622]\n",
      "4766 [D loss: 0.187811, acc.: 92.97%] [G loss: 3.004806]\n",
      "4767 [D loss: 0.216947, acc.: 92.58%] [G loss: 3.065716]\n",
      "4768 [D loss: 0.183844, acc.: 92.97%] [G loss: 3.401618]\n",
      "4769 [D loss: 0.204826, acc.: 91.80%] [G loss: 3.236203]\n",
      "4770 [D loss: 0.197314, acc.: 92.19%] [G loss: 3.325431]\n",
      "4771 [D loss: 0.195303, acc.: 92.19%] [G loss: 3.484486]\n",
      "4772 [D loss: 0.184435, acc.: 91.41%] [G loss: 3.162976]\n",
      "4773 [D loss: 0.202248, acc.: 92.58%] [G loss: 3.120761]\n",
      "4774 [D loss: 0.209581, acc.: 91.80%] [G loss: 3.047157]\n",
      "4775 [D loss: 0.211561, acc.: 91.41%] [G loss: 3.047153]\n",
      "4776 [D loss: 0.206776, acc.: 91.80%] [G loss: 3.578729]\n",
      "4777 [D loss: 0.166477, acc.: 92.97%] [G loss: 3.429586]\n",
      "4778 [D loss: 0.169904, acc.: 92.58%] [G loss: 3.331192]\n",
      "4779 [D loss: 0.188880, acc.: 92.19%] [G loss: 3.154934]\n",
      "4780 [D loss: 0.212702, acc.: 92.97%] [G loss: 2.992761]\n",
      "4781 [D loss: 0.205941, acc.: 91.41%] [G loss: 3.130678]\n",
      "4782 [D loss: 0.210262, acc.: 92.19%] [G loss: 3.051246]\n",
      "4783 [D loss: 0.219313, acc.: 91.41%] [G loss: 2.923354]\n",
      "4784 [D loss: 0.198074, acc.: 92.19%] [G loss: 3.189858]\n",
      "4785 [D loss: 0.223620, acc.: 91.41%] [G loss: 2.827816]\n",
      "4786 [D loss: 0.212568, acc.: 91.02%] [G loss: 2.772103]\n",
      "4787 [D loss: 0.173448, acc.: 92.58%] [G loss: 3.395874]\n",
      "4788 [D loss: 0.208126, acc.: 91.80%] [G loss: 3.078721]\n",
      "4789 [D loss: 0.184133, acc.: 91.80%] [G loss: 3.193880]\n",
      "4790 [D loss: 0.232672, acc.: 92.58%] [G loss: 3.124190]\n",
      "4791 [D loss: 0.196995, acc.: 91.80%] [G loss: 3.318553]\n",
      "4792 [D loss: 0.199708, acc.: 91.41%] [G loss: 3.326695]\n",
      "4793 [D loss: 0.208918, acc.: 92.19%] [G loss: 3.096545]\n",
      "4794 [D loss: 0.206056, acc.: 91.02%] [G loss: 3.291832]\n",
      "4795 [D loss: 0.228589, acc.: 91.80%] [G loss: 3.311868]\n",
      "4796 [D loss: 0.196353, acc.: 91.80%] [G loss: 3.070506]\n",
      "4797 [D loss: 0.200043, acc.: 91.02%] [G loss: 3.854702]\n",
      "4798 [D loss: 0.170097, acc.: 92.19%] [G loss: 3.177059]\n",
      "4799 [D loss: 0.174337, acc.: 92.19%] [G loss: 3.318883]\n",
      "4800 [D loss: 0.177600, acc.: 91.80%] [G loss: 3.053385]\n",
      "WARNING:tensorflow:Model was constructed with shape Tensor(\"input_7:0\", shape=(128, 32), dtype=float32) for input (128, 32), but it was re-called on a Tensor with incompatible shape (432, 32).\n",
      "generated_data\n",
      "4801 [D loss: 0.195174, acc.: 92.19%] [G loss: 3.277930]\n",
      "4802 [D loss: 0.212576, acc.: 91.41%] [G loss: 3.232032]\n",
      "4803 [D loss: 0.184303, acc.: 92.19%] [G loss: 3.049583]\n",
      "4804 [D loss: 0.182234, acc.: 92.19%] [G loss: 3.844553]\n",
      "4805 [D loss: 0.188344, acc.: 91.80%] [G loss: 3.487195]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "4806 [D loss: 0.177662, acc.: 92.19%] [G loss: 3.233300]\n",
      "4807 [D loss: 0.197219, acc.: 92.58%] [G loss: 3.213650]\n",
      "4808 [D loss: 0.198385, acc.: 92.19%] [G loss: 2.936682]\n",
      "4809 [D loss: 0.202479, acc.: 91.41%] [G loss: 3.537251]\n",
      "4810 [D loss: 0.198395, acc.: 92.19%] [G loss: 3.659139]\n",
      "4811 [D loss: 0.169561, acc.: 92.58%] [G loss: 3.414418]\n",
      "4812 [D loss: 0.203183, acc.: 90.62%] [G loss: 3.352877]\n",
      "4813 [D loss: 0.178112, acc.: 92.58%] [G loss: 3.047688]\n",
      "4814 [D loss: 0.210304, acc.: 92.58%] [G loss: 2.819966]\n",
      "4815 [D loss: 0.232973, acc.: 91.02%] [G loss: 3.357545]\n",
      "4816 [D loss: 0.224897, acc.: 91.02%] [G loss: 3.212079]\n",
      "4817 [D loss: 0.194592, acc.: 92.58%] [G loss: 3.185918]\n",
      "4818 [D loss: 0.198223, acc.: 92.58%] [G loss: 3.054550]\n",
      "4819 [D loss: 0.196320, acc.: 91.80%] [G loss: 3.392921]\n",
      "4820 [D loss: 0.177004, acc.: 91.80%] [G loss: 3.870626]\n",
      "4821 [D loss: 0.129315, acc.: 93.36%] [G loss: 3.914555]\n",
      "4822 [D loss: 0.182397, acc.: 91.80%] [G loss: 3.238149]\n",
      "4823 [D loss: 0.192947, acc.: 91.41%] [G loss: 3.213581]\n",
      "4824 [D loss: 0.194108, acc.: 91.41%] [G loss: 3.273242]\n",
      "4825 [D loss: 0.197906, acc.: 92.19%] [G loss: 2.993193]\n",
      "4826 [D loss: 0.196573, acc.: 92.97%] [G loss: 3.309669]\n",
      "4827 [D loss: 0.212823, acc.: 92.19%] [G loss: 3.062415]\n",
      "4828 [D loss: 0.220845, acc.: 91.02%] [G loss: 3.029203]\n",
      "4829 [D loss: 0.232941, acc.: 90.62%] [G loss: 3.461193]\n",
      "4830 [D loss: 0.198992, acc.: 91.41%] [G loss: 3.215842]\n",
      "4831 [D loss: 0.204494, acc.: 91.02%] [G loss: 2.971321]\n",
      "4832 [D loss: 0.183414, acc.: 92.19%] [G loss: 3.967444]\n",
      "4833 [D loss: 0.164039, acc.: 92.19%] [G loss: 3.161183]\n",
      "4834 [D loss: 0.146201, acc.: 92.97%] [G loss: 3.403164]\n",
      "4835 [D loss: 0.201259, acc.: 91.02%] [G loss: 3.192573]\n",
      "4836 [D loss: 0.154995, acc.: 93.36%] [G loss: 3.583883]\n",
      "4837 [D loss: 0.175354, acc.: 92.58%] [G loss: 3.523468]\n",
      "4838 [D loss: 0.149282, acc.: 92.97%] [G loss: 3.646947]\n",
      "4839 [D loss: 0.195915, acc.: 92.58%] [G loss: 3.233394]\n",
      "4840 [D loss: 0.203419, acc.: 91.02%] [G loss: 3.480204]\n",
      "4841 [D loss: 0.207938, acc.: 92.19%] [G loss: 3.014236]\n",
      "4842 [D loss: 0.194154, acc.: 91.80%] [G loss: 3.595425]\n",
      "4843 [D loss: 0.178318, acc.: 92.19%] [G loss: 3.313327]\n",
      "4844 [D loss: 0.188661, acc.: 92.58%] [G loss: 3.610951]\n",
      "4845 [D loss: 0.185685, acc.: 91.41%] [G loss: 3.640505]\n",
      "4846 [D loss: 0.191572, acc.: 91.41%] [G loss: 3.346573]\n",
      "4847 [D loss: 0.185240, acc.: 92.19%] [G loss: 3.293881]\n",
      "4848 [D loss: 0.170860, acc.: 91.80%] [G loss: 3.650653]\n",
      "4849 [D loss: 0.184237, acc.: 92.19%] [G loss: 3.138267]\n",
      "4850 [D loss: 0.186562, acc.: 91.41%] [G loss: 3.294692]\n",
      "4851 [D loss: 0.204814, acc.: 91.80%] [G loss: 3.832692]\n",
      "4852 [D loss: 0.162378, acc.: 93.36%] [G loss: 3.194051]\n",
      "4853 [D loss: 0.185287, acc.: 92.19%] [G loss: 3.493716]\n",
      "4854 [D loss: 0.171436, acc.: 92.97%] [G loss: 3.835207]\n",
      "4855 [D loss: 0.199930, acc.: 92.19%] [G loss: 3.023410]\n",
      "4856 [D loss: 0.198067, acc.: 92.58%] [G loss: 3.262815]\n",
      "4857 [D loss: 0.209189, acc.: 91.41%] [G loss: 3.275991]\n",
      "4858 [D loss: 0.188438, acc.: 92.19%] [G loss: 3.109533]\n",
      "4859 [D loss: 0.204839, acc.: 93.36%] [G loss: 3.074644]\n",
      "4860 [D loss: 0.191752, acc.: 91.02%] [G loss: 3.836647]\n",
      "4861 [D loss: 0.193669, acc.: 91.41%] [G loss: 3.578134]\n",
      "4862 [D loss: 0.189075, acc.: 92.58%] [G loss: 3.459987]\n",
      "4863 [D loss: 0.161975, acc.: 92.58%] [G loss: 3.435487]\n",
      "4864 [D loss: 0.183054, acc.: 92.58%] [G loss: 3.056290]\n",
      "4865 [D loss: 0.208661, acc.: 92.19%] [G loss: 3.107413]\n",
      "4866 [D loss: 0.195038, acc.: 92.19%] [G loss: 3.317299]\n",
      "4867 [D loss: 0.185046, acc.: 92.19%] [G loss: 3.291106]\n",
      "4868 [D loss: 0.189661, acc.: 91.80%] [G loss: 3.422256]\n",
      "4869 [D loss: 0.190812, acc.: 92.58%] [G loss: 3.654304]\n",
      "4870 [D loss: 0.190388, acc.: 92.97%] [G loss: 3.085801]\n",
      "4871 [D loss: 0.207765, acc.: 91.41%] [G loss: 3.633599]\n",
      "4872 [D loss: 0.202313, acc.: 91.80%] [G loss: 2.862656]\n",
      "4873 [D loss: 0.181370, acc.: 92.58%] [G loss: 3.706299]\n",
      "4874 [D loss: 0.177663, acc.: 92.19%] [G loss: 3.410697]\n",
      "4875 [D loss: 0.173128, acc.: 92.97%] [G loss: 3.314230]\n",
      "4876 [D loss: 0.193538, acc.: 92.19%] [G loss: 3.596312]\n",
      "4877 [D loss: 0.188910, acc.: 91.41%] [G loss: 3.490354]\n",
      "4878 [D loss: 0.217901, acc.: 91.80%] [G loss: 3.575775]\n",
      "4879 [D loss: 0.185154, acc.: 91.80%] [G loss: 3.547216]\n",
      "4880 [D loss: 0.168309, acc.: 91.80%] [G loss: 3.613276]\n",
      "4881 [D loss: 0.187093, acc.: 94.14%] [G loss: 3.129933]\n",
      "4882 [D loss: 0.189929, acc.: 92.97%] [G loss: 3.322605]\n",
      "4883 [D loss: 0.180328, acc.: 92.19%] [G loss: 3.218355]\n",
      "4884 [D loss: 0.217637, acc.: 91.41%] [G loss: 3.285805]\n",
      "4885 [D loss: 0.164501, acc.: 92.58%] [G loss: 3.808876]\n",
      "4886 [D loss: 0.144948, acc.: 93.75%] [G loss: 3.642342]\n",
      "4887 [D loss: 0.197793, acc.: 91.41%] [G loss: 3.642589]\n",
      "4888 [D loss: 0.187758, acc.: 91.41%] [G loss: 3.099400]\n",
      "4889 [D loss: 0.239768, acc.: 91.80%] [G loss: 3.552783]\n",
      "4890 [D loss: 0.215382, acc.: 91.02%] [G loss: 3.068386]\n",
      "4891 [D loss: 0.210883, acc.: 91.41%] [G loss: 2.796195]\n",
      "4892 [D loss: 0.189113, acc.: 92.58%] [G loss: 3.056251]\n",
      "4893 [D loss: 0.194480, acc.: 92.19%] [G loss: 3.091129]\n",
      "4894 [D loss: 0.218582, acc.: 91.02%] [G loss: 3.384865]\n",
      "4895 [D loss: 0.190666, acc.: 92.58%] [G loss: 2.960757]\n",
      "4896 [D loss: 0.179788, acc.: 92.97%] [G loss: 3.280355]\n",
      "4897 [D loss: 0.179366, acc.: 92.97%] [G loss: 3.050017]\n",
      "4898 [D loss: 0.215619, acc.: 91.41%] [G loss: 3.100254]\n",
      "4899 [D loss: 0.206406, acc.: 91.41%] [G loss: 3.369336]\n",
      "4900 [D loss: 0.192977, acc.: 92.19%] [G loss: 3.049078]\n",
      "WARNING:tensorflow:Model was constructed with shape Tensor(\"input_7:0\", shape=(128, 32), dtype=float32) for input (128, 32), but it was re-called on a Tensor with incompatible shape (432, 32).\n",
      "generated_data\n",
      "4901 [D loss: 0.201049, acc.: 92.58%] [G loss: 2.954826]\n",
      "4902 [D loss: 0.183723, acc.: 92.19%] [G loss: 2.873937]\n",
      "4903 [D loss: 0.178102, acc.: 93.75%] [G loss: 3.082512]\n",
      "4904 [D loss: 0.206234, acc.: 92.19%] [G loss: 3.226659]\n",
      "4905 [D loss: 0.181478, acc.: 92.58%] [G loss: 3.469468]\n",
      "4906 [D loss: 0.198600, acc.: 91.80%] [G loss: 3.196301]\n",
      "4907 [D loss: 0.203503, acc.: 91.80%] [G loss: 3.244956]\n",
      "4908 [D loss: 0.211252, acc.: 91.80%] [G loss: 3.232972]\n",
      "4909 [D loss: 0.169058, acc.: 92.19%] [G loss: 3.603422]\n",
      "4910 [D loss: 0.179516, acc.: 91.41%] [G loss: 3.512519]\n",
      "4911 [D loss: 0.166550, acc.: 93.75%] [G loss: 3.573966]\n",
      "4912 [D loss: 0.214820, acc.: 91.41%] [G loss: 3.091118]\n",
      "4913 [D loss: 0.208319, acc.: 92.19%] [G loss: 3.172322]\n",
      "4914 [D loss: 0.168917, acc.: 93.36%] [G loss: 3.412102]\n",
      "4915 [D loss: 0.171211, acc.: 92.58%] [G loss: 3.448361]\n",
      "4916 [D loss: 0.194731, acc.: 92.58%] [G loss: 3.017939]\n",
      "4917 [D loss: 0.168725, acc.: 92.58%] [G loss: 4.321125]\n",
      "4918 [D loss: 0.184743, acc.: 92.58%] [G loss: 3.405622]\n",
      "4919 [D loss: 0.184491, acc.: 90.62%] [G loss: 3.552594]\n",
      "4920 [D loss: 0.178965, acc.: 91.02%] [G loss: 3.804412]\n",
      "4921 [D loss: 0.172652, acc.: 92.97%] [G loss: 3.053011]\n",
      "4922 [D loss: 0.197975, acc.: 91.41%] [G loss: 3.682899]\n",
      "4923 [D loss: 0.193975, acc.: 92.97%] [G loss: 3.464048]\n",
      "4924 [D loss: 0.231297, acc.: 91.02%] [G loss: 3.357641]\n",
      "4925 [D loss: 0.205731, acc.: 92.19%] [G loss: 3.309650]\n",
      "4926 [D loss: 0.223268, acc.: 91.80%] [G loss: 2.947895]\n",
      "4927 [D loss: 0.188884, acc.: 91.02%] [G loss: 3.783088]\n",
      "4928 [D loss: 0.208171, acc.: 91.80%] [G loss: 3.033327]\n",
      "4929 [D loss: 0.194121, acc.: 92.19%] [G loss: 3.288360]\n",
      "4930 [D loss: 0.213294, acc.: 91.41%] [G loss: 3.005728]\n",
      "4931 [D loss: 0.189800, acc.: 92.19%] [G loss: 3.044521]\n",
      "4932 [D loss: 0.195351, acc.: 91.41%] [G loss: 3.091580]\n",
      "4933 [D loss: 0.186799, acc.: 92.97%] [G loss: 3.213550]\n",
      "4934 [D loss: 0.183007, acc.: 91.80%] [G loss: 3.460110]\n",
      "4935 [D loss: 0.177469, acc.: 91.80%] [G loss: 3.287180]\n",
      "4936 [D loss: 0.181359, acc.: 92.97%] [G loss: 3.383267]\n",
      "4937 [D loss: 0.201666, acc.: 91.41%] [G loss: 3.113325]\n",
      "4938 [D loss: 0.187772, acc.: 92.58%] [G loss: 3.327797]\n",
      "4939 [D loss: 0.202762, acc.: 92.19%] [G loss: 3.676506]\n",
      "4940 [D loss: 0.210257, acc.: 91.41%] [G loss: 3.479877]\n",
      "4941 [D loss: 0.193514, acc.: 91.41%] [G loss: 3.701828]\n",
      "4942 [D loss: 0.147530, acc.: 92.19%] [G loss: 3.679171]\n",
      "4943 [D loss: 0.185979, acc.: 92.58%] [G loss: 3.284509]\n",
      "4944 [D loss: 0.196766, acc.: 91.02%] [G loss: 3.332041]\n",
      "4945 [D loss: 0.220677, acc.: 90.23%] [G loss: 3.209652]\n",
      "4946 [D loss: 0.190466, acc.: 92.58%] [G loss: 3.362188]\n",
      "4947 [D loss: 0.166499, acc.: 92.97%] [G loss: 3.496767]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "4948 [D loss: 0.199241, acc.: 90.23%] [G loss: 3.476346]\n",
      "4949 [D loss: 0.177106, acc.: 91.41%] [G loss: 3.884883]\n",
      "4950 [D loss: 0.158888, acc.: 93.36%] [G loss: 3.435302]\n",
      "4951 [D loss: 0.160287, acc.: 92.97%] [G loss: 3.408411]\n",
      "4952 [D loss: 0.182017, acc.: 92.97%] [G loss: 3.533692]\n",
      "4953 [D loss: 0.201000, acc.: 92.58%] [G loss: 3.310393]\n",
      "4954 [D loss: 0.184420, acc.: 92.58%] [G loss: 3.052211]\n",
      "4955 [D loss: 0.199216, acc.: 91.02%] [G loss: 3.747588]\n",
      "4956 [D loss: 0.177926, acc.: 92.58%] [G loss: 3.589108]\n",
      "4957 [D loss: 0.170698, acc.: 92.19%] [G loss: 3.525677]\n",
      "4958 [D loss: 0.175774, acc.: 92.97%] [G loss: 3.295786]\n",
      "4959 [D loss: 0.193861, acc.: 92.58%] [G loss: 3.223839]\n",
      "4960 [D loss: 0.181004, acc.: 92.19%] [G loss: 3.119286]\n",
      "4961 [D loss: 0.183957, acc.: 92.58%] [G loss: 3.768171]\n",
      "4962 [D loss: 0.161617, acc.: 92.58%] [G loss: 3.570989]\n",
      "4963 [D loss: 0.156503, acc.: 93.36%] [G loss: 3.664942]\n",
      "4964 [D loss: 0.186538, acc.: 93.36%] [G loss: 3.305699]\n",
      "4965 [D loss: 0.182882, acc.: 94.53%] [G loss: 3.297854]\n",
      "4966 [D loss: 0.151827, acc.: 93.36%] [G loss: 3.596328]\n",
      "4967 [D loss: 0.177836, acc.: 91.41%] [G loss: 3.348640]\n",
      "4968 [D loss: 0.166834, acc.: 91.80%] [G loss: 3.942062]\n",
      "4969 [D loss: 0.196767, acc.: 91.41%] [G loss: 3.470487]\n",
      "4970 [D loss: 0.189149, acc.: 91.41%] [G loss: 3.556370]\n",
      "4971 [D loss: 0.194963, acc.: 91.41%] [G loss: 3.053249]\n",
      "4972 [D loss: 0.203847, acc.: 91.80%] [G loss: 2.970602]\n",
      "4973 [D loss: 0.183484, acc.: 92.58%] [G loss: 3.464745]\n",
      "4974 [D loss: 0.185470, acc.: 92.58%] [G loss: 3.546861]\n",
      "4975 [D loss: 0.250707, acc.: 91.02%] [G loss: 3.162463]\n",
      "4976 [D loss: 0.208497, acc.: 92.19%] [G loss: 2.882717]\n",
      "4977 [D loss: 0.186480, acc.: 92.97%] [G loss: 3.179212]\n",
      "4978 [D loss: 0.189650, acc.: 92.97%] [G loss: 3.663980]\n",
      "4979 [D loss: 0.191892, acc.: 91.80%] [G loss: 3.490867]\n",
      "4980 [D loss: 0.209940, acc.: 90.62%] [G loss: 3.417099]\n",
      "4981 [D loss: 0.218508, acc.: 90.23%] [G loss: 3.389675]\n",
      "4982 [D loss: 0.220388, acc.: 91.41%] [G loss: 3.165253]\n",
      "4983 [D loss: 0.187198, acc.: 93.36%] [G loss: 3.359154]\n",
      "4984 [D loss: 0.176351, acc.: 91.80%] [G loss: 3.800806]\n",
      "4985 [D loss: 0.185815, acc.: 92.58%] [G loss: 3.708206]\n",
      "4986 [D loss: 0.162524, acc.: 92.19%] [G loss: 3.430694]\n",
      "4987 [D loss: 0.208664, acc.: 91.02%] [G loss: 3.805154]\n",
      "4988 [D loss: 0.200100, acc.: 91.02%] [G loss: 3.405630]\n",
      "4989 [D loss: 0.214536, acc.: 92.19%] [G loss: 3.287748]\n",
      "4990 [D loss: 0.187142, acc.: 91.80%] [G loss: 3.281596]\n",
      "4991 [D loss: 0.234217, acc.: 90.23%] [G loss: 3.226961]\n",
      "4992 [D loss: 0.202022, acc.: 90.62%] [G loss: 3.834147]\n",
      "4993 [D loss: 0.192197, acc.: 92.97%] [G loss: 3.276432]\n",
      "4994 [D loss: 0.176481, acc.: 92.97%] [G loss: 3.529891]\n",
      "4995 [D loss: 0.231342, acc.: 90.23%] [G loss: 2.811238]\n",
      "4996 [D loss: 0.202853, acc.: 91.02%] [G loss: 3.081892]\n",
      "4997 [D loss: 0.236252, acc.: 90.23%] [G loss: 3.334029]\n",
      "4998 [D loss: 0.219386, acc.: 91.80%] [G loss: 3.159977]\n",
      "4999 [D loss: 0.247766, acc.: 90.62%] [G loss: 3.132468]\n",
      "5000 [D loss: 0.220320, acc.: 92.19%] [G loss: 2.859486]\n",
      "WARNING:tensorflow:Model was constructed with shape Tensor(\"input_7:0\", shape=(128, 32), dtype=float32) for input (128, 32), but it was re-called on a Tensor with incompatible shape (432, 32).\n",
      "generated_data\n"
     ]
    }
   ],
   "source": [
    "#Training the GAN model chosen: Vanilla GAN, CGAN, DCGAN, etc.\n",
    "synthesizer = model(gan_args)\n",
    "synthesizer.train(train_sample, train_args)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 34,
   "metadata": {
    "pycharm": {
     "name": "#%%\n"
    }
   },
   "outputs": [],
   "source": [
    "#You can easily save the trained generator and loaded it aftwerwards\n",
    "synthesizer.save('models/gan/saved', 'generator_fraud')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 35,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Model: \"model_6\"\n",
      "_________________________________________________________________\n",
      "Layer (type)                 Output Shape              Param #   \n",
      "=================================================================\n",
      "input_7 (InputLayer)         [(128, 32)]               0         \n",
      "_________________________________________________________________\n",
      "dense_16 (Dense)             (128, 128)                4224      \n",
      "_________________________________________________________________\n",
      "dense_17 (Dense)             (128, 256)                33024     \n",
      "_________________________________________________________________\n",
      "dense_18 (Dense)             (128, 512)                131584    \n",
      "_________________________________________________________________\n",
      "dense_19 (Dense)             (128, 31)                 15903     \n",
      "=================================================================\n",
      "Total params: 184,735\n",
      "Trainable params: 184,735\n",
      "Non-trainable params: 0\n",
      "_________________________________________________________________\n"
     ]
    }
   ],
   "source": [
    "synthesizer.generator.summary()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 36,
   "metadata": {
    "pycharm": {
     "name": "#%%\n"
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Model: \"model_7\"\n",
      "_________________________________________________________________\n",
      "Layer (type)                 Output Shape              Param #   \n",
      "=================================================================\n",
      "input_8 (InputLayer)         [(128, 31)]               0         \n",
      "_________________________________________________________________\n",
      "dense_20 (Dense)             (128, 512)                16384     \n",
      "_________________________________________________________________\n",
      "dropout_4 (Dropout)          (128, 512)                0         \n",
      "_________________________________________________________________\n",
      "dense_21 (Dense)             (128, 256)                131328    \n",
      "_________________________________________________________________\n",
      "dropout_5 (Dropout)          (128, 256)                0         \n",
      "_________________________________________________________________\n",
      "dense_22 (Dense)             (128, 128)                32896     \n",
      "_________________________________________________________________\n",
      "dense_23 (Dense)             (128, 1)                  129       \n",
      "=================================================================\n",
      "Total params: 361,474\n",
      "Trainable params: 180,737\n",
      "Non-trainable params: 180,737\n",
      "_________________________________________________________________\n"
     ]
    }
   ],
   "source": [
    "synthesizer.discriminator.summary()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "models = {'GAN': ['GAN', False, synthesizer.generator]}"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 65,
   "metadata": {
    "pycharm": {
     "name": "#%%\n"
    }
   },
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAmkAAAJ+CAYAAADoopfxAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjMsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+AADFEAAAgAElEQVR4nOydd3hUVfrHP296AknovYSOgCCIDRuurr23tfdV197ruqKwou7+drG3VbH3LljXxQoIKL136SEJIaSX8/vjvePcDJMGSWYg7+d57pOZc88998ydO5nvvOct4pzDMAzDMAzDiC5iIj0BwzAMwzAMY3tMpBmGYRiGYUQhJtIMwzAMwzCiEBNphmEYhmEYUYiJNMMwDMMwjCjERJphGIZhGEYUYiLNMAwjyhGRPiLyuIgsEJFtIpInIgtF5DkR2T/S82tsRMR526RIz6U+EZEYEXlARFaISIn3GmfW4rgkEblSRL4WkU3esZtEZLaIPCMiR1Zx3FG+a+lE5K0q+o339ckXkfYh5w7sW7nDL74eEJFR3nZRI593L9+596rPsePqczDDMAyjfhGRi4GngMSQXf28rS1wcmPPy2gQLgfurMsBItID+AQYGLKrrbftCRwK9A9z+Hkhz08UkTTn3NZqTpkC3A7cVJd5NhL3en+/BcY34nn38p17JVCjsK4tZkkzDMOIUkTkD8B/UIHmgDFAV+95X+AuICdiE2xkRCQJwDkn3jYywlOqb/b2PT7Me41VWmZEJBGYSFCgTQX+ADRDxdQQVFAtDXNsM7YX90nA6bWY55Ui0rEW/YydxTlnm2222WZbFG7ol67ztnFV9IkLeX4R8COQBxQDy4BxQJuQfit9Y+8JfAcUAguAE4EE4GFgE5AJvAK08B0/0nf8fcBtwArvnDOBY0POdzVq4VgHFHnnWgg8BKSG9A2MOwkVEjOBEmBU6H7fMW2AJ4DlQAGwFVgEvAH0818v4EbgFyDfm8t84H6gWTXzOBaY5s17mfd6pZbvY43vie9codv4asa90tdvfeh1rGFO5/mOfQGo8B5/E6bveF/fMu/vI96+JN++lbU898HAx959VQpsAN4EBof0mxQYu7p27/pWdf0meX1G+dpOAp7xzl8AfB5yj2SEu/7h2v1zCbNd5PusfOG9zhLvvFOAB2u8VpH+J2SbbbbZZtv2G9Au5B9+51oc80w1XxgrgQ6+vit9+zJD+pagFprQMV72HT+ymuMDX+Z/9PX/vJq5fRPyOgLt2UC57/mokP2TfMdMqGb8470+sVW8rsA2A59Q87XnEhQx/u28+npPqukzvpqx/a/57jreX/73Y7AnGpz3OruG9B3v6/uS97cQ6EwdRRoqDv3vqX8rAkb6+k4K7AsZo1I7dRdp4e7Xjb73IiPc9Q/XTg0iDeiGCsFw+zfXdL1sudMwDCM6yfA93uqcW1tdZxEZgfo0AaxC/WRaAS96bd1Ra1E4PgNaAv/ynscDRwEnAO298QD+JCIS5vjmqKUpDV1eAxVED/n6jPPNKR7oggoFgMOqcLhuCbwGdAJaUL2f0SHe3/eBdG8ug4GbgTXevrOAY7zHvwK9gA6olQNgGHB9mLHTgLHefK7xtZ9fzXzq9J445wQVQAF6OF3uvKiaU/TwPV7gO+8NIQEBTkSO9+1vDxzhPV3inJsNvBfYDZxTzTnfBeag4uyuavpth7fE+hjqalUGnIJe2yu9LomoqK0Tzrnx3vUL8K2rfkk8B10ibkPwdbdDraN1PfdI4GJf08W+c48H9gGSvX1noa+xI3r9n6hpfBNphmEYuwfH+x4/4pyb5ZzLQR28ndd+bBXH3u+c2wJ87Wv7yTn3qXNuE/CT15aAippQ3nfOfeacywP+QVAUDRWR1t7jTNS5eh5qhVkDHO0bo1+YcXOBK51z651zuc65lVXMH3SpFeAA4K/Aad58xznnAo7cx/n6j3bOLXfObSQoLCH8NdoI/M27Rn4h1b2a+cDOvSd1paIOfc9BRTQERcp7vv2hAQV+HGqVArgM9ZGsLQeiYhtgonPuQ+dcnnPuGYLO9n1FpHcdxtwR/uGcm++cy6JyoMYfG+BcK3yPr0R/NBwALHLO3VvTwSbSDMMwopOVvsdpItKphv5tfY9XBx54wiIQrdeuhnMV+tpW+R6X+B6HRpmGns8RFGkAbUSkJ/A9ajnpSPjMAslh2hY55wqqmHMof0Z90DoCt6LWqunAcp+VLuw1ovJrDXeNljnnyr3H+b72pBrmtDPvSW3wC4Dfozedc+PCWOb8+EXYPBEZhAYaLPfaBtWQSuID1BKZANxdh/lWdf2h5vfAz85mplhdxeM29X1e59wvwD2oP+JI4AHU2vubiLwvItWOaSLNMAwjCvEsWD/7mm4N18/3Tz7T19zNt78FuqQEGgQQ7lxlYZrDtVWF/3yCLmUG2IwGIgRE2KtAK09E/IvqKaxh/+8456Y65/qjS4DHAHcA21BrV2DZNew1Cnkc7hqV+s7jwuyvih1+T2rJp77HV4tIOKFbCRHZA13WDfAKunw5B+jpa6/SmuZdg1E19QtDVdc/9HngmhQHGgKRvd795V/m3RGqeu83h56XykLcf338VHtPOOfGoAJwGLrk+Zq36xTgjOqONZFmGIYRvdxNcBnrOi9ZZicRifcS3N4FPOft939hXycie3pi4J+onxGoo3lDcIqIHCkiqaiYDIi0X70lJb/gKwCKROQA4IL6moCI/F1ETkCd0r8B3iaYniTwRey/RneLSA/PP+tBX3t9XqOGfk9eBBZ7j7sAE0RkXxFJ8JaZw1mkqvWj83G2iFSpEZxzH6OWytiq+oThR4LvyTEicqKINBeRPwNDvfZFzrlAyhC/de0E7++1qI9iOLK8v91FpGU187hZRPp712isr/0r7+8GgkLtQBFpJSLN0WX06s4LaoX83TomIgNE5D40Hcpq1ArpdysIFauVqUs0iG222WabbY27oX4/xVQdQfahr+8ORXf62kb6+o/3tY/3tWeE6bs2zPl+j+5EHfQLw/RZ7Ht8ke98lSLzwlyT7fajucCqeu3jvD47Gt05qYrzr6zF+1eX92S761yL8Xujy7xVnSOwHY8Kw5W+tv5hxvvJt/+PYeZ1vK/vcaGvpxbzPZfaR3ceGrI/z/v7e7RkyNifhhlzlLdvlK8t3P36e3Sn1//FkHkVo0vd4T4fnQn/Gc0ADqrmPSkHhlV3vcySZhiGEcU45/6D/gp/EhU1heiXxSLgeXxWIOfcFWik2WR0qa8U9TN6BBjunNvQQNN8Ds09tgL1X5sNnOSc+8qb1zJ0yfNX9AtvBXAV8Ho9zuFx1IK2zptDERqkcC/eUrFTv7ITUeftX9Ev+2I0MnI0cIhzLn+7kXeChn5PnFqdhqHX/wfUUlWK5k2bjkYQHo5G8B5MMNhhmnNuYZgh/X5s1S5lOucmoLn86jLf11CB/ylqgSpDBdLbwL7OuUm+vt+iP1IWE8wvdwaV3QD8XIuK8JoSPF+Bfp42o/fJl+h7738vbkCvRSZ6P31M5UAQ/2tai1qF51N5qRT0vX4KmOXNq9z7+w1wjFOftSoRTwUahmEYRq0RkZHA/7yn9znnRkVuNoZRPSIyimDppsP8YjCaMUuaYRiGYRhGFGIizTAMwzAMIwqx5U7DMAzDMIwoxCxphmEYhmEYUYiJNMMwDMMwjCjERJphGIZhGEYUYiLNMAzDMAwjCjGRZhiGYRiGEYWYSDMMwzAMw4hCTKQZhmEYhmFEISbSDMMwDMMwohATaYZhGIZhGFGIiTTDMAzDMIwoxESaYRiGYRhGFGIizTAMwzAMIwoxkWYYhmEYhhGFmEgzDMMwDMOIQkykGY2OiIwUkTWRnkc4RGSliBwR6XkYhmEYhom0JoiITBKRHBFJrGX/DBFxIhLX0HPzzudEJF9EtnnblsY4b02IyHgRKRGRPG+bKyJjRSS9DmOYCGzCiMhZIjLVu783eY+vEhHx9RnlfQb2DTn2Iq/91pD2NSIyspFegmEYjYiJtCaGiGQABwMOODGik6meIc655t7WIlyHxhKNITzsnEsF2gIXA/sDP4pIswjMxdiFEJGbgUeAfwAdgPbAlcCBQILXR4DzgWzgwjDDZAO3i0haY8zZMIzIYiKt6XEBMAUYT8iXgIgki8j/icgqEckVkR9EJBn4zuuyxbNsHeD92n/Vd2wla5uIXCwiCzyL03IRuWJnJx5YJhWR20VkA/CiiLQUkU9FJNOzDn4qIl18x1SyXIWZ9/ne680SkbtrOxfnXJFzbhoqdFujgg0R6SUi33jjbRaR10SkhbfvFaAb8Il3HW/z2t8RkQ3eNf9ORAbu3JUyog3P2no/cJVz7l3nXJ5TfnXOneucK/a6Hgx0Aq4HzhKRhJChFgCTgRsbbfKGYUQME2lNjwuA17ztKBFp79v3T2BvYATQCrgNqAAO8fa38Cxbk2txnk3A8UAaKmD+LSLD6mH+Hby5dQcuR+/hF73n3YBC4PHaDCQiA4CnUMtFJ1Rsdan2oBCcc3nAV+iXK4AAY73x9gC6AqO8vucDq4ETvOv4sHfMZ0AfoB3wC/reGLsXBwCJwEc19LsQ+AR4y3t+fJg+9wA3ikir+pueYRjRiIm0JoSIHISKmbedczOAZcA53r4Y4BLgeufcWudcuXPuJ98v/DrhnJvgnFvmWQu+Bb4kKGRqwy8issXbHvW1VwD3OueKnXOFzrks59x7zrkCTzD9HTi0luc4HfjUOfed9zrv8cavK+tQ4Yhzbqlz7itvfpnAv2qaj3PuBc+yUowKuiF18XMzdgnaAJudc2WBBhH5ybu/C0XkEBFJAc4AXnfOlQLvEmbJ0zk3E/083d5IczcMI0KYSGtaXAh86Zzb7D1/neCXQBsgCRVuO42IHCMiU0Qk23P8P9Y7R20Z5pxr4W3X+doznXNFvvOkiMgz3pLlVnRptoWIxNbiHJ2A3wJPnHP5QFYd5higM+orhIi0E5E3RWStN59XqeZ1i0isiDwoIsu8/iu9XXW5Vkb0kwW08ftROudGeP6WWej/4lOAMmCi1+U14BgRaRtmvL8BfxGRDg07bcMwIomJtCaC51t2JnCo5/+0AfVrGSIiQ4DNQBHQK8zhLkxbPpDie/77l4Vo1Oh76PJpe++LaCK6FLizhM7lZqAfsJ9zLo3g0mzgXFXOE1iPLkcG5p2CLnnWGhFpDhwBfO81jfXmONibz3lUft2h8z8HOMkbIx3ICJm/sXswGShG3+uquBBoDqz2Pp/vAPHA2aEdnXMLgfeBu+p/qoZhRAsm0poOJwPlwABgL2/bAxUXFzjnKoAXgH+JSCfPwnOAJ7gy0WXAnr7xZgKHiEg3b2nuTt++BNT/JhMoE5FjgCMb6HWlon5oWzwfnXtD9s9EHbDjRWQ4usQZ4F3geBE5yHPQvp9afiZEJFFE9gY+BHJQv7jAfLZ58+kM3Bpy6EYqX8dU9Ms7CxWTD9Tm/MauhXNuC3Af8KSInC4izUUkRkT2Apqh1tjDUR+0wOdzCPAQ4aM88ca7GAgb/WwYxq6PibSmw4XAi8651c65DYENdbI/11uGuQWYA0xDl+8eAmKccwWor9ePng/N/s65r1Dn5tnADODTwIk837DrgLdRAXMO8HEDva5xQDJqCZwCfB6y/x7UOpiDfqm97pvnPOBqr22916emJLu3iUgeen1eRl/7CG+pFO8cw4BcYAJq7fAzFvirdx1v8cZYBawF5nuvwdgN8QJFbkIDcjahgv0Z1LesFzDTOfdlyOfzUWCwiAwKM94K4BVU5BmGsRsizoVbyTIMwzAMwzAiiVnSDMMwDMMwohATaYZhGIZhGFGIiTTDMAzDMIwoxESaYRiGYRhGFGIizTAMwzAMIwqJq7mLEQnatGnjMjIyIj0NYzdixowZm51z4bLXNxp2X0eOLWWwqghSYiFeoNxBQQX0T9HnuyrRcF8bRkNhIi1KycjIYPr06ZGehrEbISKrIj0Hu68bHudg6lZ4cxMsKYReyXBKazhlniZUO6+9ZqZ+exPslwIt4qFPMrRPgPPbQ8fESL+CuhEN97VhNBQm0gzDMHZRtpbBP3+DjzdDYgyc1Q7m5sMnWbCtHDKS4JsceG0j5JbDrL2haxKkx0GSwL/X6jgd4mHP5vCP3+DdgXCo1TAwjKjARJphVENpBXyRDZtK4eB06JNS8zGG0VDkl8O/foMPN0OMwIYSODAVLumgbXcshxKnzsbXd4aHe8GMbXDIL3r83jPUiubQf/7NBZrHwYBm8GOuWtkuXQiL99PxDcOILCbSDKMKFuTDsXOga6JaJO5YrpaKR3qD2BeY0cjklOr92CURHu0Dn2fBE+tg+jaYtg2yS6F1HKwvheax8GUOzJ0DA1KgFBBUvHVKhNXF2lbqoLwMzmwL7eLhpfUq2ublq2UtwMYSWFEIvZOhTULleRWVQ5xAnIWhGUa9Yx8rwwiDc3D2fLi7G/xvL7ilK3wxGH7Ihbc2RXp2RlNiWSEcPQs6/AQ/b4VtZdApAfLK4fx2sKIIyhykxkFyrP5T31oOCwvgqxx4eh0koNazElSg+Sl0cOUSeH8zxApkl+nYoJbkKxdB/5/huqXQ52e4fokGHUzdCgf/Cuk/QKsf4erFaukzDKP+MJFmGGGYX6A+PF0SodcUOGMenDBHfYCeWR/p2RlNhYJyOHwmHN4SzmirFq+9msMRs9TR/5VNkBQDuWWwvhiWF+lyJkA5sFczqHBQXN1JPBwq2ACWFujf+1fBqmJYuT/8vDcs2w9m58Mdy/TzcEVHyD8YFu+r0aPnL6j/a2AYTRlb7jSMMBRVQDxw/kJ4awAMagb/WQcvbVTfncwSaJtQ4zCGsVO8mwntEuDh33Q506G/rJvFwDubIKes+uNn5tf+XKWeQIsHfs2D8zuoFW7yUA00AGgVD+N6wyG/wl86wXkdtL1DIrzYH7pPgcUF0Nd8Nw2jXjBLmmGEYUgz2FAKh7VQX50h02FFMSSK5pQaNE1zThlGQ1FaAe9uUsFUXA4T99R78eQ2sLVCBVht/oHX1X2yFBi3DlK+g82lcOpceGWD7iup0OjRvHJoE1/5uIQYGNwMlhbW8YSGYVSJWdKMJsuWUnhlIywuVEvZue3UaRrUCfqgdJiQBVO2wkFpMD8fWsbDCc3U0nbPCnh5j8i+BmP3ZEkBHD1bl9cduoR55RL9+9Fm7eNQAdYlHtaUVj2W28E5FHkHbiqFixbC695nJU6geQzct1IjR1/pr5+X/HKYkaefJcMw6gezpBlNkiUFag37aatGrH2RrdayNT7r2Dnt9AtnbTG0jodD0tWS9nYmfJatAu/AX6DPVEj8FvaboaLOMHaWCxfCDV3UahYD5FfAyiINaPH/sq6geoFWH2ws1eXVz3OgoAzWlcBLe2igws9b4cHV8EsenDxX59stqWHnYxhNCRNpRpPkpmVwY1d4YwBc3wXeH6TO2afMg2sXw8ULYHqeWswEtRKM3wDf5kK3RE32GYNa10oqYMMI+Gt3+PMi+DI70q/O2NXYUhqMjFxVpBGdV3VW5/9yIC0G2sVpgECJ77jGygST50Uj5JXDfqlwyzJ4cw9NT3PfKjhznroGPN23kSZkGE0EW+40dgsm5cCYVRp5lhYLF3bQtBnJsdv3LXfweTa8OUCfOwc3L9NkoJtLYfY2daJuHqs5oCqAVzfpF2X7ePh+CJyxQL8gL+qgkaATs+Hc9lBYAWNXw5GtGvPVG7sqM/Lg2iUwJ1/vw+Naw41dtHqAOPhvjkZvljjomaB/c31pLnZ0KbOuxKCfgxIHr+4Bj6+F5zbA6B76g2fa3o00EcNoYpglzdjleS8TTpsHU/PguFYwpDmMXgX7zIA8X/Sbc7AwX0VYvECh92U3MVtF29Wdgn4++zRXq0Ep2haLOmv/VgLdf9YItnLghQ16vrleFN3B6WpdM4yaWF8Mx8zWKMmcA2HdCE35cuNSSImB9zZrRYHjWusPhpkFsKW88YSZnzTvx07fFPg6B67tAh9lwmNr4aTWEZiQYTQRTKQZuzTOac6mWIHP9oQX94D3BsE/emrepifXab9n12nSzT2nw6EzVYhdtkiPf2sTXN0Z/r1G2xfuC3/vqccle+fx5+h0QI7XUFwBT62Fvsnq5P3mJv2i3RYmNUJZBbyfqV/CY1epr5vRdHlhA5zeVlNdxMVAWhz8s5dayq7rDH9ZrBGT322BzWWQHgOP94rM8scW736fV6CWv1uWak61NcXqO2cYRsNgIs3YpckuU2tD63g4yFcU+uS2wTqHLb+HKxZreZypQ+Ge7moZ+zQLBvwMP+XC6JXqnN0rCTok6PIn1JwEtNjBtgq4ZJGKwJuWqaWu02R4aLWKQNBl06Nmw8OrNVv86mIYMg3e2KjWvYpImEeMiLK8EIY2r9wmom1pcTBnH9g/VR33BfULu3YZ1JAarUFJRJdfX9ukP0wmDQlGRBuGUf+YSGtERORoEVkkIktF5I5Iz2d3IDVWv9hyStXXLMDkXF2uTPP290tWq8TzG+DWbnBfD+ieqMuTh7eEFnH6pbOiCDpPVh8hCGZvrwtl6LnHrtRgA1BrW4LAT8P0/Hd2U4vbeQvgmDnQdyp8tQsGHNg9veMMS9WoYj/FFTBpCwxNhY6JcFNX6JwQTMMRaS0fG6NW61ZxakV7YHWEJ2QYuzkm0hoJEYkFngCOAQYAZ4vIgMjOatcnIQYu76hfbg+sUsvVskK4Zgkkx8DNXTUp6EHp8Ome8MYmTbNxYhtdskyJ1S/L7DKtdViOZnGvjy/D3ApdUu03BW5arikMOk+GDzLhuDk6h4xEeG8APNUXzlmg1pVdBbund44L2sOsfLh5qfo4/rxV01gcnA4DvVxjezXXPGXdEjQ/WXqYQJjGIBBFOjQF/txRlzj3SIFH18I6W7Y3jAbDRFrjsS+w1Dm33DlXArwJnBThOe0WPNATTmgDo1ZC0nfQf6qKrt7JcFobtaR9kqXLkIOaqZP/nG1alHppIbywHl7qD32SNUnnjgq00A+ToNaPxUWwZwo83EOtIn+ap2LsX79pFYNLF0HbeLiwPTy/a9UFtXt6J0iNg+/30ojgP87ShLEHpwcTJJc7jabskKA/HMpc5MLxA+WopudrAtvnN2gk9aBm8H1uhCZlGE0AE2mNR2fgN9/zNV7b74jI5SIyXUSmZ2ZmNurkdmUSYvSLbfX+0DNJl4n2bKY+ZkNnaImnzaX6RfhDrjpkn7sAOsZrMtsvBsMxrSFG4MehO/5FWEHlvFV+sTevAP61Bj4ZpMuhgjqJ75mikXJHz4bOibC+hF2JGu9psPu6OjokwpN9YdUBMH9fuKu73s9vbISeUzRZ8qYS9XsEyCqvfryG5prOcGJr2FYOj/eByVs18tQwjIbBRFrjES7vZCWjjXPuWefccOfc8LZt2zbStHYPZm+DE+aqk/VhLWBUd7U+DE/VL5KTWmvuM0Gd9pNiYHRPGJEGLbwahAekwePr9E1ptoOfjFArXCzq++ZQS0ivn7Utv0Lzur0+EC7pCEe21AjUA9KqGNfB1K3q2/ZFdmX/uwhS4z0Ndl/XlUk5cOsyeHsgrBkBR7QM/qNuFQcJEZpXBfDoGngnU+uIljlNZVMQYeFoGLszJtIajzVAV9/zLsC6CM0lKvhvDoz8FVp8D8Onw5sbd2ycH7bAEbPUNyYjEV7dCKfO14jJr7PVz+zLHDi7HczYG7YdrAEHzsGCgmBk5Z3d1IKRHKOVBnbWQCDouRNElcu93b3zAq3j1OK3rBC+36IJcbPKNCFuKEXlcMIcOHc+zNymNUOHTtc8WxHG7ukG4LG1MCoD9kuDjSVq7f3FSxabHVJxoLGIAfb3ftBkl6n/5ONr4dB0KNiR6BrDMGqFBU83HtOAPiLSA1gLnAWcE9kpRY5vclR0PNpHoytn5MFVi9U/5+KOwX4z8mBePvRLgX1T1b8M9Nf7t1t0ifL+lfBIb7h0oeZG+1t3LYr+RU7QrHNoC3isT9BqlpGkVolOiXDbMri/B/RKVpH03PqgwNoZYtDzryhSX7esMrXulQP/7gU/boX7lkMRunw0JkMDGUJ5cLU6jS/cV/NpAdy9XJdsr+msKRt6JG9/XCNg93QDsLo4WKR8U4n6pA1OhR5JKsyLXeNGecYDL/aHB3+DxfuqT2ezWPWj3HM6PN+/ESdjGE0ME2mNhHOuTESuAb5AjTQvOOfmRXhaEePvq2BcbziznT4/spXW0TxjnpZaKqiA0+dpxOWBaVpBoGsifDhIUxRcshD6p0BmKSwuhCNaaHLNeOCZ9ZpYNgb9MskvV2vW8XPgui4qmGZug+Fp8P5ArbfZ8ScVSGmx0DIOBoRxiI6jbjmqAiKvAvUpGrUKkkVzYF22CNLjdf4B7l6pgrVXSuVx3tik1yYg0LJKVaD+tFWthyuK1BfvpT1g3yqWSxsCu6cbhn1TNYff/ukazLK5FL7I0nv6/3rCjcs1oKCx6J0CZ7eHCdkwciZc2lFTzDy1TuvVdkpsvLkYRlPDRFoj4pybCEyM9Dyigdn5cFjLym37pOkXUl65WsfSY2GJZz2qcCqmrlkCE7K0hNNj6+DwFhqheb+Xr0nQJaI4UXGUX65i6aMstT7MWaQCsH08XL7ISyrbHD4frF823RLVcjfiV/1wVHjbUS11ybQ6YtjeApcSowXYE72o0cs6arqFtcUwp6Dy8ZtLoffP8ExfuLxTsL2oHP6Xo1+KreLU/65/soq0UqfCckkhHPwLvNAfzu1Qt/diZ7B7uv65uSuM+EXvmVPbwqlt4Pi5cGZbTePSmAJNUGvenHyt2flplm7JMfDuQF2SNQyj4TCfNCMi9EnWvFB+5udrWoLmsfDKRi3NFLAeCboE9OpGXRJ9+De4t5sKNr9LTAle/U1vSagcSBQVZXHA1nLonqBLj78Vw30ZsKgADpkJZ83TElEDm+kX4jGtdOz0mMpLp378nvMpMcFfPYI6eKfF6mvql6LC8aPNcENn9UELcFk7DVQ4wSvKfsXioDN2cQVUCPzzN0gSFaSf5+hSKajY65GkDtwlwHkL4aYlmhvO2DXpmQzfD1UL6alz1Zp8a1e9Xz/PgQNTg31TGuE/eP8UvdLQmnYAACAASURBVM9iRHP7PdsPHuljAs0wGgMTaUZEuLWr1gD8YYs68M/dBucvgFu66pdBQUUwcWd+ORw7G/62QoVSl0QVUjcuhyIHqSF3saNyOacE0aVV0OXQZcUqfqbmaVLZaXnQJ0nPOXa1OuZf2wWm5EHbOC3H0ydRj02N1Vxng5J1WTQJ/RC1ioOLOupyaFqsCqp902H9gfDNEB27XwrklmletIDg6xAPH2WrdeSO7sEP5GNr9O9LG3TszFIVqDPytH2hl/T2rm4q3PZPC0akfp4Dtyyrj3fJiBR9UtQqumg/mDRUcwEe3Uprev6wt96XoPdViwZOgTGvYPvyVYZhNA4m0oyIcEpbtZRdsgiSv9O6lue2h5u8Ys3HtYKn1+lS38iZ8N8tcHxr6JmoouSXbcGx8mqwGuVVwAULIT0uaHULiKQK4KMBKrxm56vV4oHVMDELXu2v5aIqgCXF0DJel2JjRf3gcsrAiVrN8ss1PUY5KuSOaRXMxJ5Xrla0JYUa0dkmQccAOCQdLuukH8SJWcE553rObx9u1sS3n+wJ4/qos3bgQ5sk6ucWF6PCtq/nyxaDiru8SBZ5NOqd1NigD+O0YcH2LTsb4VINDq0w0NH8zgwjIphIMyLGue1h0b6aQPaPLXVJr9n3kPodrCvREP+RM9VXq3ksfJerCV/bxdf9XCVOlzjDfZ8dORe+zg32c8C9K2BynloyuiWqxQugU7wu/TSLVUvfWe1VxL3YH4oOgWNbabLPL3NUmGWWaPRoYbmWAYoTLQjf3fvSe3szPPGbto9dHUz7cbHnV5ZdqlaVI1tBWYX60L3jZaQvdHpcdqn6ue2XqoJxaaFa+TbuWolxjRo4s536g03Jhe4palULUF/LnqHDtI+Dh3rWz9iGYdQdE2lGxFhTpAXGD/pVl/I2lejSZ6nTBLS5ZRrR1i7eK53TQq1d+6fWPPaOEAf0S1KhU4qWmXo3E0ZnqCj7Ry/4Q0sVYi3i4em+Ks5OaA2XL9ZqAYe0gGfXazTn4kJo95PO+aIO8K/emk7hoV5wRSeN9ATY6jS5rfPOO7gZ9PFSMAxpBksKoNcUuHOFirKLFqufHei1ihVN4fHBZrWuZSRpLqsuZv3YrWifoJU1TpgLh83UAJI40WXuWJ81OQG9h0MzDVeVBDfF1zHUKP3agGDaG8MwGh8TaUZEmJAFQ6bB+5vVSlaOVgEocpoHqszp4wWF6kRf4eCzHP3i+Ti7YebUI1HrbDaLCYqgW5ap2LqmE9yxDF7dpD46Y3qoFQ3gnYFq5er/M9y1HNrEw21d4cEeWiD7zm7w1wwVU5d0UIvhBR0g/xA4PyQB/9ltYebw4PPLO6vg21yq44IXMeq0rBXo49xytagd3QpWFsGFHSDJyvXsdhzXWsuf3doVbu0G6w7QtvKYoDArRQV/aKBLl0T9gdMlvrKA65III0J++HRLgC8Hw+GtGvLVGIZRE5aCw2h0iis0z9nYnlqGaU6+tudX6JdHSow+DjC3AG7pDIuKtFB6Q/2wX+L5kKXGqh9ZsdOlzb+vrtxvRRHcv0KXLXskwh0rVFCKg5Hp8N4g9V8DTaFw4K9wZ3cVaX9qpyKv71TNabakUPPAvT5A88CFWi2WF+rSaLxoROzXOVpVoUWcRtd9naNLqhnJanmcla/7AoESxu5Hciwc2zr4/K2BcOtS/Syt3l/zC/6Sp9bf7DLP2oreF/ML1OoaID0WlhXBqmJ4vLemfom3n+6GETXYx9FodH7K1bQRbRM0e7kfh1qB9vZFk6XHak60BfnBPg3FwBS1SgUKWu/lzSOB4HKRQwXjLcu0/NTGYrW8OeB/uWpRK/K+CHsnqyUw33suAqN7wOL94MYumpz3+6HQLSn8slJ2meaTe6SP+pn1T4HVJSokf87T/QkxmuutT7J+IX8xRAMJjKbDpR3V+vzUWi019s0QzbNW7oK5+37J1xQ0gd8/scCEwbB+hB5b7EygGUa0YR9Jo9GJE12ie2Fd5fqYCejzJ9dpSo4AKaIZ9Xt5pY8aUn/MKwjWIoxFo0jbxEFsjOYh652kFi8IisW8Ck30eVRLTa6bUwZnz9d9k7eqH1pqyNJjuwQ4prWKq+p8fg5roVGfB6bD+D1gxnCt47i+BP7QAlbtr8ENJ7eBu7rDsv10TKNp0b8ZnN4W/vGbBt2cu1BLSCXHQMeE7f/Rx6M/EA5M1x9LLeO0cLphGNGFLXcaDUpumTq+d0tSYQJwgFdZYPY2/bVf5okifzBiMSqSyoH1ZSqEuiUF2wLEsL2zc33QNUEDGYqBzWVBMbmpJGhlC9AyDk5vp2kKTpijedk+y4LXNqqP2tieO+583TdFo2AP+lUtb8kxmppkaKrWPQ2k8ghXmN1oWry6B/xnHfx1pabquLmr5t5Lj4OHV8GbmzQAZ3UxbB4Byd5//8m5en/GmfXVMKIOE2lGg+CcJm19daMKiXKnUZFvDVRh9lQfLXUTL/qrvjTk+BhUjAVE2LYK+Cpne0FWnwKtdayOl1MOm0qDCXEDcwFNVNshUYMJAuSUwaxtapW4uQvcv0qT2j62Bh7vAye02bl5/V8v+DgLXt+o/nwXdYDz2gcFmmGA3g9XdFY/yMNmapLmJ9fq3+l5ugSaINDrZzh9PtzQRX3Uxq7S4IHTdvI+NQyj/jGRZtQbFQ6eWw8vrNe8XbnlGuV4Zjv4IltTWpw/XwtHj1uj4iy3HBLRiEp/sEBFyN/Qxw3BSW3g5U0a6TY1D/ZIht+KYJvPCW5NKVCqWd79SURPnqvlnsatUWtXfAx8O1QF6c4ionM7yb5EjVrQNkGXxd/L1B8Px7aCl/preTKA9wbqcvyMPF2KT4/TyOErO1U/rmEYjY+JNKPeuGWZBgWM6QEnzoERafBWpqYKuLKT3my3r1BxM6w5pKerKCpGLUShtInXZdGUmKCfWH3QMlatZaG8sFH93ablqR/P8iJdAkqPUYdrf8BCru/4CjTtxQ3LoHmMWtr+0ql+BJph7AiJMXBOe91CObGN+jK+vkl9Gw9K19QtZpk1jOjDRJpRL6wrhvEbYPl+kBanKSleHwDXLIFh071UAJ7QebQX/JSnpYvC0SoWssuDZY0KKurX9yycQAvggD1SoHMS/DcHMg/UYu+fbIZvtlQOKigD3uivpa3KUWvg1nK4trNuhhGttEmA67pEehaGYdSE/dY36oVft8G+qZqJP0Y0Me0/V8N3W9QilnMg/LGVWqqOmA2vbdClmAD+sjbZnogq9pmu6kOg1WQoiEM/ELMLtPB7qzgt73RsK/hpq/rV4fVJjNHqBB9kae1MgOyD4ahWWkbKsrQbhmEYO4uJNGOnKCzXiLJn1qmQmeflMvtzR3hinebw6pkEY1bBxGy94fIr4MjWcHjL4DilFeFFVHPfHdp2JzLoZyTCASFZ1QMZ2gOlQMtQS1qrOI0mbZegvmY9p2iS2uFeaovUWBVpxU59egSN8HROlz3bVVV/xzAMwzDqgIk0Y4fJK4NDZmp9yz1S9GYaOh2eXgv/6gWdEjQVwCdZWjy8X7L2KatQS9qMrcGxEiR8kto0nzDLrGaZMhz+m3tlsS6x+gmUzokRzcUm3vNCz2xX4eC1/tCvGXyYpcukvZOgqAJGttCKAy9u0DQZF3vlnmJFU4wYhmEYxs5iPmnGDvPkOrVQxYim2uiVDIsK4C9L4PZlkBIL+6VqEfJmsdBvqgqYn7ZqqSe/KMv3MqMnCxR4O+KADaG5OYAO8eHb/VS12piELk/6c50VOxV0DuibBGtL9Pn8Aih0MGefYN+CcjhlrqYDaRsPa0pgaaEmDu2WBB8PsqVOwzAMo34wkWbsMJ9lweBm8PwG9SlbUOBFNFZo4MCS4XDwTHh2PeSXqXiZkqdiSIDmAnk+peYICrS9m2musqyyys76l3SAT7OCVq/q6JKolryAZSwGGJKqqQdCqUA/DOe0h/tWwcHp+nr+/ZuW3AmQEqtllxYVaJqRXkkq+NLjoF9KXa+gYRiGYVSNLXcaO0zzWE2x0SZeE7lmHgibD1TxVebgpmXw3VDPb22DiqqkGI2ETIxRIVcVv+TDbyWVU2+UA89tUCtauEP9BqxYVKAl+Bor0HqXZVWcswwYtUqPjRWtIbqyKHzffimapHZAcy2UbgLNMAzDqG9MpBk7zIUd1Nq1qUQLgG8rg75TNflrBfDaJjhtLgxopg72D/bQTPmgedHKqVy7E2B/zznfL8ICfeIIOvsTsj82pL0MiHEatOCnVRwc3xraxwfbktFggPRY6J6oxzaLhcEpMDwk2MAwDMMwGgsTacYOc3pbLYpe5ODPC6HzZE0AO6xZsM/PeXD/SjihFTywGo5ppe3NY6FHUrAsVOBGnLqt8jni0RQYAWLZPh3HUS3Vl60CSBKdE6if26/5lcVbdpkul270fNriBArRIupby9UCeGQLHe/m5XBvxo5dG8MwDMPYWUykGTuMCFzRSYXRpC1q/eoSD7O8NByCip7sMnhhg95sJ87VfVvLYdlWFXhlBIWX34L2QA/4S2eYkK1izhG8YXsnwR9b6OMvciqXbiohaH2L9cYMPA+MsU+zoJiLR4Mb/tNPrX6T82BOPnwwCA7zpQkxDMMwjMbERJqxU9zfA3oka4H0CrS2ZSBTRmCpMRAocGd3mDUcmm0CbgBOBwr0JgysPsZLUFBd3VmXUW/uAqVOyzkd11r3bSmH3inwev/K82kRq8uiCUCPRG1LiQkus6agS6/7pGs+s3JvafbQlnBJR3i5v9be/GCQ+tkZhmEYRqQwkWbsFC3iNUVF78Rg2ymt4ciWsKwoaBkrA25cCnvfBvlnAbOAIoj7QIMJ4kTTeZS5oMj7cQu8vRFe3qjP5+8H7+8JR7SALWXw1kZ4cr2KMAESRZPQDkuFyzrruIkxMHcfLfEEcENXFXxPrtN9h6Tp+W5eCrcug6Ez4MYu0N+3ZGsYhmEYkcBEmrHTxAo4z/ErkG/sy5zKfdwPUHEdlD7F78otIQHK34TkYrigA3RKVLG2VzNNYnv8XDh3oVq8uiZAW2998vz2anEb0lwT5A5L1ZxlzWOhTzKM7gFbvdQdXRPV0vfrcI30fGUjHJSm+dvm76uF0rskQIs4DR74egjc0b2RLpxhGIZhVIOJNKNeWF+iiWAr0Oz8v7MGuBK4B5jja08EKYNWidB5AnywWX3XYoDFhfDRICgfCdOGqaa7yyecjmylVrDT2qlwu6wjrNpfxVqpgwdXQ89k+HmYlqD6bosWlH60N6wvVR+2ogro+JPmQnuxP9yTAX/LgIFmQTMMwzCihEZJZisiTwNrnXOj67OvEXnKKuCzbGgVrxGTF7WDlzdBRQnwCPA54aujO3CJUFwCK8fDw9fArevhtQGQXQoXLdT0Hq3j4fauGqAQoEOiCrMX18OYHtAxEe5eAUsKYfreekyAZ/vCafPU+tYzSYuiryiG1cUwMAUe6gUjzPfMMAzDiELEuZrytoOIrAQuc8593eAz2g0RkTOAUcAewL7Ouek1HTN8+HA3fXqN3SLK6iI4cpaKopZxGoXZJg6yvgc3BigIf1xiHyjORnNflACJ0OI8ePXvwcAA5yC/XDP855fDtDw9x17NNarUOY0YfX495JSpD9zt3XTJNJQVhTB+A2wsgUNbwGlttfB7U0NEZjjnhtfTWHW+p2HXuK+NXYv6vK8NI9rYaUuaiMQ556pK4m4oc4FTgWciPZH65MrFGjX51wx9/sQMuPYJcOOptmZT8ZKQhjI4cG1QoIEKseZx8Mw6uHO5LkOuL4a0OHhvoPqZXdqxcsmmquiRDPf1qNtrM2pkt7ynDcMwooka7Qki8grQDfhERLaJyG0i4kTkUhFZDXzj9XtHRDaISK6IfCciA31jjBeRMd7jkSKyRkRuFpFNIrJeRC7ewb6tReQTEdkqItNEZIyI/FBvV6eecM4tcM4tivQ86pMtpfB9LtzcVZ+XlcHfjgT3IjUX1fSRnAwPPQQff7T9vh9zYcwqmDoMvh8KS/ZTUXjqPLWkGZFjd7ynDcMwoo0aRZpz7nxgNXCCc6458La361B0qeMo7/lnQB+gHfAL8Fo1w3YA0oHOwKXAEyJSVdrQ6vo+AeR7fS70tl0WEblcRKaLyPTMzMxIT6daypyX38yL6vzPf2DLljoMkAB/OAUyM+G22yAmzJ34/HrNkdbHq4spoukx8svhl23b9zeik13pvjYMw4gmdsYzZ5RzLt85VwjgnHvBOZfnnCtGfVWGiEhVLtmlwP3OuVLn3ERgG9CvLn1FJBY4DbjXOVfgnJsPvLQTr2enEJGvRWRumO2k2o7hnHvWOTfcOTe8bdu2DTndnaZNAuzZDF7aqFa0u+9Wq1itaA/Dn4H/vg/NqommzC6FziE+ZiLQKUH3GQ1LfdzTsGvd14ZhGNHEzvik/RZ44AmmvwNnAG0JxvO1AXLDHJsV4sdWADSv4jxV9W2Lzv833z7/40bFOXdEpM4dKZ7sC0fNhmeehi25UFFefX9J0nxq/3obbjyk5vH/0BJe3ag1QsWz2K0o1LJT+6bt/PyN6mmK97RhGEY0UVtLWjgPIH/bOcBJwBHo0mSG1y40HJloIvsuvrauDXg+I4TBzWHWUJj3KMQmVd/3tNNgayaM/Rv8/FTtxr+0I6wtVh+09zLh8TUwcibcnwHpjZI8xjAMwzAiR21F2kagZzX7U4FiIAstj/jATs6rRpxz5cD7wCgRSRGR/sAFDX3eHUFEThGRNcABwAQR+SLSc6ovPnoViraCVLH8mJICN94I774LzZvD1VfDf/8L8+fXPHazWJi0F4xsoSk0fs6Dl/eAa7vUfKzRsOzO97RhGEa0UFt7xFjgMRF5GBgTZv/LaADBWiAbzS//l3qZYfVcA4wHNgCLgDeAqMuX45z7APgg0vNoCDZvhs6doaICNm6E9HRIStJAgMREWLoUZs2CE08MHpOQAP/+Nzz3XM3jN4+D67voZkQPu/M9bRiGES3UKpntroKIPAR0cM7t0lGesOsl/Xz3XbjnHvjyy6D/2Pffw/XXw1NPQXx85f59+0L//o0/z6ZMNCT93NXuayP6iYb72jAail3as8db4kxAq0Lug6bouCyik4ogZWUQGxsUSfPmqRCKjW34c//yC+TlwQEHVG5PToaBA02QGYZhGEZd2dWL46Sifmn5aP62/wPCpEXdPZk5U5cZA5x7Ltx1lz5etw723hvGj6/6+OOPh7Fjqz/H9dfrcmVNPPAArFmz/bZqlQk0wzAMw9gRdmmR5pyb5pzr7ZxLcc5lOOfGuh1cvxWROBG5QkQ+F5HZIjJLRD4TkStFJL7mERqXhQth2DD48EN9PmsWTJoEzzwDmzbBqFG6pHjnnVAaxql/2jSYMAHuuw9KSoLt//sfPPEElJfDjz/Ck09qDjTDMAzDMBqXXVqk1TOvAHuhiXiPBY4D7gOGAK9GblrhGT0a/vAHFVkVFZq1f+hQtaYdeCA8/zysXKlVAJ59dvvjL7sMhgyBuDi4/XZtq6hQy9ntt+sS5dVXw7hxKgCnTavdvCoqVCB++ml9vVLDMAzDaJrs0j5p9cww51xo1YM1wBQRWRyJCVXFwoXw9dcaOTlypAqp776D4mI4/HBtF4GCAthnH/jrX+Hyy4PO+9OmwZw5MGMGfPQRPPig1s/85BONvExKguXLte+ll+pY991XvfCaO1fHff55tcB16QJHHbV9wIBhGIZhGLWjwS1pIjJKRKLOEhWGHBE5Q0R+vyYiEiMifwJyIjiv7Rg9Gm64AVJT1Wp1770aHPDAAyq4QIXVwQdDy5aQn6/WtHHj4J//VCvaXnup5e2ee9SadtttKsSOPx6ys3WJtEMHFW6XXlqzNe3mm+HKK1Wgxcbq8a+80iiXwzAMwzB2S2y5M8hZwOnARhFZ7FnPNgCnevuigoUL4auv4Jpr9HnXrrBtm1rQevRQgRUTo8uOixYFrVq33qpibvRomD1bLV6gguqWW9QPLT5ehVWXLjpG+/Yq3OLj1bftvvvCz+mnn3Reqan6fMgQ2LpVzxfOH84wDMMwjJqpV5EmIreLyFoRyRORRSJyHHAX8CcR2SYis7x+6SLyvIis9/qP8ep/IiIXiciPIvKYiOSKyEIRObw+5xkO59xK59yfnHNt0SzqI5xz7by2FQ19/toyerRGbX76KbzxBvz979o+cSKcf74+rqhQYZWXBy1aqCWtsFAFWaCg+T776LJmUpJGeJaVQevWGpHZu7dWCpg+HZYtg5494fHHNdBgRZgrcd99cM45KhBjY1XQJSebNc0wDMMwdoZ6E2ki0g+tALCPcy4VrUCwEC0R9ZZzrrlzbojX/SW07mZvYChwJJXzm+0HLEcLtN8LvC8ireprrjXhnMtyzm0OPBeRPzbWuWuiTx8VXh9/rNvPP8PgwXD00SqyEhO1X79+ahHLzNRoT1CL25VXqpVMRMXUv/+tbaB+bSJqBevRQx/36qXC69VX1Y8tI6PyfAJWtAkTtP8FF2gS2/79q7amZWXB2rUNepkMwzAMY5enPgMHyoFEYICIZDrnVgKIVK6xLiLtgWOAFs65QiBfRP4NXA4843XbBIzz0mm8JSI3o9GWkbLLPA90i9C5KzFqVPDx2rW63Nmli+ZM27AhuG/uXBVN/oQkpaVqEUtNVcE2eza89x4UFek4gUjRxYv12KQkrbHZtq0KwO7dg2Nt2wZnnaXBCeecAw8/rMd8+61GlbZvr4EMa9aoX9upp+pxn3+u/Xv1UoEZcnsYhmEYhuFRbyLNObdURG5AU1gM9Aou3xSma3cgHljvE3AxwG++PmtD8p2tAjrV11zDISIfV7ULaN2Q595ROnTQYuWlpZrXrKgouK+sDC68UC1rW7ao9cw5tazFxmr/jz+GJUvUOrd0qQqvAIG3xjlNy+EXaKD50774Qs+zfLmKOxFdDnVOBaNzWrPz6KN1/w03wGOP6fErV2oJqaOOatBLZBiGYRi7LA1Su1NE0lCrWBmwFOjjnDvP29cRXcpMdc6VhTn2InSJtHNAqInIVOBx51yDWdJEJAc4D9gWugtdrm3fUOcOx87WODz7bE1mO2ZMsCpBIKDAz6mnqjXNORg0SMVaebluf/yjRn4uXqxLmnGepN+2TS1hd94JN96ogQJr1mhkaaDPoEF6/rQ0Xd4cOVKte6BpPmJjNRebWdMaj2iocWi1O436Jhrua8NoKOrVJ01E/iAiiUARUIgugW4EMgKpLZxz64Evgf8TkTQvzUUvETnUN1w74DoRiReRM4A9gIn1NdcqmAIUOOe+DdkmAYsa+Nz1yvz5amFLSgoud8bGVhZoaWn69+OPYcECtWzNn699Av2++UaDE7p2hddeCx775JOaSHfqVA1EmDcPBgzQfRddpNvw4cFznHlmUKAF5pKWpiWjvvyyAS6AYRiGYewG1Gd0ZyLwILAZTV3RDo3sfMfbnyUiv3iPL0ALo89Hc5C9C3T0jTUV6OON9XfgdOdcVj3ONRzLgZJwO5xzhzTwueuV0aPhpps0LYdzaqkKCKYAW7eqWCorU0E1cqS2x8UFjykv12CBQOqOsjK1ov3f/6ml7ptvYMQIbb/kEo00LQm5grfcoqWmAsTHa+BDYaGO9de/VvabMwzDMAxDaZDlzp3BW+68zDl3UCOf93o0H1pH4C3gDefczMacg58dXRaaP18F17JlcNVVGpVZFW3awObNQR+100/X+p+bvbjWQMDAggVw2GFqIdu4EX79Vfd37gwvv6y+cbGx2v/MMzVZLqh/Wu/elS146ekaUFBRoWk6ioq0/ujRR9f5pRp1JBqWhWy506hvouG+NoyGwspCeTjnHgEeEZHuqFh7UUSSgDeAN51zUVUaqipeekmtZHvsATk11EkIiLHycv1bVBRsS0hQX7LMTLjjDvUvGz1arV45OXqOpCQNTFi/Xvu+845azi64QI+//fbtfeByc4OPS0r0ePNJMwzDMIztsYoDITjnVjnnHnLODQXOAU4BFkR4WrXm/vvV+f9//4PmzTWZbEw177K/tuann2rf2FgVUAHfsaef1gjSo4/WXGoHHggnnKDpN049VVNqDBumOdeKivScK1bABx9UP9euXTVwwCI8DcMwDGN7os6S5pwbD4yP1PlFJB44GrWmHQ58C1RRECn6SEzUvGmTJ6u166KL1JoVEF+lpdCunVYhcC5oRYuLU98yv+WrtFTbkpPhhReC7Zs3w7p1mm/t7beD7WlpcMopev477tBj/fijS08+WZdiAxUQDMMwDMOoTNSJtEjhVRU4G02a+zPwJnC5cy4/ohPbQQ44QJche/bUJcZ771UR9eCDKrIC4iw1VX3EAq6J8fFw0EG6BHnVVXDaaduPPXly9edesUKjNkeO1L6HHaa+bjEx+viYY3RsW+Y0DMMwjKoxkRbkLuB14BbnXHakJ1Mf/Oc/as3KyFBfNVA/soBAA7WoBaxoAf+wa64JVggIx4YNaikbPz78/qefVovZrFlqtcvKgk6dNOnt7bfDwQfX1ys0DMMwjN2XiIg0rw7n82jNzs3Anc651yMxlwDOucMief6G4E9/0uhKP2+/rVGdM2ZorrPvvlMhFROjTv9/+xvcc48uR1bly3bhhWopO+igYCSnn9tu03qi112nS5pJSSrazjmncuCAYRiGYRhVE5EUHCLyBhq0cCmwFzABGOGcm9fok4lSGiJVQW6uirZOnbTuZ3Z2cJmzb18txN6qlVq8/vxnePbZ7cdYt059zgYO1AS1Eyfq8mUop5+u1rlzzgm2ff65Jr6dMsWWOiNBNKQqsBQcRn0TDfe1YTQUjR7dKSLNgNOAe5xz25xzPwAfA+c39lyaGo8+Cvvvr9GfubmVk8guXqwJZvfcU5+/+qr6qoVy8cUq5Fp71UzPr+Jd69VL03C8+25w27ZNLWwm0AzDMAyjZiKx3NkXKA/JOzYLOLSKODI1EwAAIABJREFU/kY9kJurIm3YMBVq06Zpfc3JkyElRVNuHHmkOvjHx6tA+8c/tCJAgHXr4KuvVNx9+60GJ0yerNa0Y4+tfL6HHmrUl2cYhmEYux2RyJPWHAj1TMoFUiMwlybD669DXp4uUU6ZoiJs6lTdV1KiOcsmTVIxd9VVWlz9wQcrW9MuvliDDAK51d58U/9ecEGjvhTDMAzDaBJEQqRtA0IqSZIG5EVgLk2GCy9Uq9dbb2kU59VXq1/ZjBnwyCPQp4+KtT59YNw4TUgbsKaB5lz76ivNnVZaqkEFgwdrUEBWllrTDMMwDMOoPyIh0hYDcSLSx9c2BNhtgwZE5B8islBEZovIByLSorHnkJICQ4eq9SwuTmtuZmVpstsnntBozeJiTc9RUKBJZk8+WSsYFBZqdGZsrIqzlBTYe2+1uh1xBPTooaLPaDpEwz1tGIaxu9PoPmnOuXwReR+4X0QuQ6M7TwJGNPZcGpGv0DQjZSLyEHAncHskJnLEEWpJmzFDHfkDbNqkSW5XrYIW3tdtebmKszFjVMCVlalQKyhQnzbQUlIBSko0WMBoEkTNPW0YhrG7EqlktlcBLwCbgCzgL7tz+g3n3Je+p1OA0yM1lyFDdAtl4kT4+mt47z0NAkhK0hQc++wDH36o0Z+BRLjl5Vq/s6AAZs5UcdeiRfU1Qo3di2i6pw3DMHZXIvK16pzLds6d7Jxr5pzrFulEto3MJcBn4XaIyOUiMl1EpmdmZjbqpI49VoMHMjNVbHXsCMcfr4Xac3O1pFTnzsH+P/6offv106LqJtCaNFXe0xDZ+9owDGNXxspC1RMi8jXQIcyuu51zH3l97gbKgNfCjeGcexZ4FjTpZwNNNSwFBfDww/Dxx3DmmSq6Jk1Sf7TcXBVoEybA8OEaRBAXBwMGwBdfNOYsjcakPu5piOx9bRiGsStjIq2ecM4dUd1+EbkQOB443EWizEMNPPMMjBihPmuDB8OaNZpH7dpr1XettBSuuAIOPVT92Tp31pJSc+YEE+Aauxe7+j1tGIaxq2MirREQkaNRp+pDnXMFkZ5PKAEr2uefa6DAnDlqLTvxRPVVE9FSUcuXa+BASgrk5OjjW24xa1pTJNrvacMwjN0B8yRqHB5Hk/V+JSIzReTpSE/IT8CKNmSICrKXX9Z6nOvXa0DAuHGQnKw1O/fcUyM6//tfOPfcoDXNaHJE9T1tGIaxO2CWtEbAOdc70nOojvHjYcUK6ODzPiou1goFTz8Nl10Gzz8ftLCdcUawX2IijB4Nb7/d6NM2Iki039OGYRi7AybSDH74AfLzK7dNmQLXX6+VCgCmT4dOnWDffSEjo3Lfvn0bZZqGYRiG0aQwkWaQmqobaDqNhx/WZczBg+G554L9DjtMLW4TJkRmnoZhGIbRlDCRZlTiuefg3ns1DUfr1lqQPUCbNmY1MwzDMIzGwkSa8TtFRfDggzB2rAYPzJmjEZyGYRiGYTQ+Ft1p/M5zz2kZqNtvh/R0eOeduo9RXq6logzDMAzD2DlMpBlA0Ip2772ahmPUKLj/fhVddeGVV1TorVrVINM0DMMwjCaDiTQDCFrRhg7V50ceWXdrWlkZjBkDI0fCAw80yDQNwzAMo8lgPmkGAA89pAXWzz472FZUpO1nnVW7MV59Fbp1gzff1ACDu+6C7t0bZr6GYRiGsbtjIs0A4NlntZC6nxNPhPbta3d8wIr2/PMaFXrllWpNe+aZ+p8rQGlpKWvWrKGoqKhhTrALk5SURJcuXYiPj4/0VAzDMIydwESaAcCxx1Z+/tJLcPTRtRdpASvaoYfq85tualhr2po1a0hNTSUjIwMRqf8T7KI458jKymLNmjX06NEj0tMxDMMwdgLzSTO2Y9EiuOQStYzVhoAVbdAgeO013T7/XOt8NpRvWlFREa1btzaBFoKI0Lp1a7MwGoZh7AaYJc3YjjFj4JprNFLzjjugc+fq+5eVabBAVhZMnBhs79wZevZsuHmaQAuPXRfDMIzdAxNpRiUWLVIr2LJlkJCgaTkee6z6Y5KS4D//z959h0dVZg8c/55JJYVO6BhAQIqAgIoFxN5XUbGgqKjY1oKuZdeydmxr2XVd166oKKira/utXcFOgIAU6S2hJYFACqnz/v44d8wQAoRkkpkk5/M885C5uXPvO+Em98x53/e8L9RP+yLJhg0bmDhxIjNnziQuLo7U1FSefPJJYmNjOeWUU5gfvFxDiEyfPp2JEycyb9483nrrLc4666yQn8MYY0xksO5Os4P774eJE6F5c7j5Zu26zMwMd6tqr6gcXt8Ad66ENzdCsb92x3POMXr0aEaNGsXy5ctZuHAhkyZNYuPGjaFp8C5069aNV155hbFjx9bpeYwxxoSfBWnmd4Es2rXX6vOUFLj0Us2mNWQZRTAwDV7fCNECL6yHA9JgQ3HNj/n1118TExPDlVde+fu2wYMHM2LEiB32W7VqFSNGjGDIkCEMGTKEH374AYD169czcuRIBg8ezIABA5gxYwbl5eVcfPHFDBgwgP33358nnnhip/OmpqYycOBAfD771TXGmMbOujvN7x59FFq1gltuqdiWnQ0ffgh//Su0a6fbsrI02/byy9olGuluWg7npsC9QZMdb14Ot62El/ar2THnz5/P0KFD97hfSkoKn3/+OfHx8SxdupTzzjuPtLQ0pkyZwvHHH8/tt99OeXk5hYWFpKenk5mZ+Xs3aW5ubs0aZ4wxplGwIM387qKLYNiwnbefeCIkJ1c8f+wxmDpVJwtMmFBvzasR5+D9bNhw6I7bb+wC+/1S8yCtukpLS7nmmmtIT08nKiqKJUuWAHDggQdyySWXUFpayumnn87gwYPp0aMHK1as4Nprr+Xkk0/muOOOq9vGGWOMiWgWpJnfjRihj93JytIlpN54Qxdiv+iiyM+miUDlJUjLHfhqMQmyf//+vPPOO3vc74knnqB9+/bMnTsXv99PfHw8ACNHjmT69Ol8/PHHjBs3jptvvpkLL7yQuXPn8umnn/L0008zbdo0XnrppZo30hhjTINmA1vMXnnsMV0m6pxztFjtq6+Gu0W7JwJj2sGDqzWrBvrvg2vg7HY1P+5RRx1FcXExzz///O/bZs6cybfffrvDflu3bqVjx474fD5ee+01yr0V61evXk1KSgoTJkzg0ksvZfbs2WRnZ+P3+znzzDO57777mD17ds0baIwxpsGzTJqptkAWLT1dn991F5x/fuRn0x7tCcfOhR/nwCHNYcZWKHPw2aCaH1NEeO+995g4cSIPPfQQ8fHxv5fgCHb11Vdz5pln8vbbb3PkkUeSmJgIwDfffMOjjz5KTEwMSUlJTJ48mczMTMaPH4/fr1NPH3zwwZ3OO3PmTEaPHs2WLVv48MMPueuuu1iwYEHN34gxxpiIJS6QXjARZdiwYS4tLS3czdjBn/8MeXnw9NMV2447DsaMqf+xaYsWLaJv377V3r/MD59sht8KoX8inNAaohpxzdeqfj4iMss5V8Wow/oTide1adgi4bo2pq5YJs1Ui3Pwz3/qWp4HHFCxPStLt+8uSPv8c90vnKW9on3wh7bwh/A1wRhjjNkrFqSZahGBOXMgP3/n77Vps/vXnnAC+P1w9tkQbVecMcYYUy12yzTV1qvX3r9m4kQN0ABOOUWL5RpjjDFmzyxIM3XqqaegQwcthPvpp7oYu2XTjDHGmD2zEhymzgSyaN98A95qSJxySlibZIwxxjQYltMwdSaQRevTR5/vv79l04wxxpjqskyaqROBLFpBgQZnQ4dCYE3wk08Ob9tCZcOGDZx77rn07NmTfv36cdJJJ7FkyRJWrVrFgAED6uScjz/+OP369WPgwIEcffTRrF69uk7OY4wxJvwsSKsHInKfiMwTkXQR+UxEOoW7TXVt2zadEZqXB/PnawHcX3+tCNTCobwcJk+uWHmgNpxzjB49mlGjRrF8+XIWLlzIpEmT2LhxY+0PvhsHHHAAaWlpzJs3j7POOotbbrmlTs+3K03xmjbGmPpmQVr9eNQ5N9A5Nxj4CPhruBtU155/Xrs5p06F1q1h6VINksrLtcszHN55R1dH+PDD2h/r66+/JiYmhiuvvPL3bYMHD2ZEpcVPV61axYgRIxgyZAhDhgzhB29w3vr16xk5ciSDBw9mwIABzJgxg/Lyci6++GIGDBjA/vvvzxNPPLHTeY888kgSEhIAGD58OBkZGbV/MzXT5K5pY4ypbzYyqB4457YFPU0EGv0yD9Omaf20MWM0gzZpErzwQvjaU14O994LV1wBd98Np56qmb6amj9/PkOHDt3jfikpKXz++efEx8ezdOlSzjvvPNLS0pgyZQrHH388t99+O+Xl5RQWFpKenk5mZibz588HIDc3d7fHfvHFFznxxBNr/iZqoSle08YYU98sSKsnIvIAcCGwFThyF/tcDlwO0K1bt/prXIgFAqKnntJA6IYbtMbabbdBjx7hadM770ByMvzrXzBkiGbT/lAPyw+UlpZyzTXXkJ6eTlRUFEuWLAHgwAMP5JJLLqG0tJTTTz+dwYMH06NHD1asWMG1117LySefzHHHHbfL477++uukpaXttKB7farONe3t1yiua2OMqW/W3RkiIvKFiMyv4nEagHPududcV+AN4JqqjuGce845N8w5N6xdu3b12fyQCmTRjj5an7duDVdfrdm0cAgEjXffrWPi7rpLv67N2LT+/fsza9asPe73xBNP0L59e+bOnUtaWholJSUAjBw5kunTp9O5c2fGjRvH5MmTadWqFXPnzmXUqFE8/fTTXHbZZVUe84svvuCBBx7ggw8+IC4uruZvYg9CcU17+zWK69oYY+qbBWkh4pw7xjk3oIrHfyvtOgU4MxxtrC/PPAOLF+uYtI4doW9fHbD/6qtQWFj/7Qlk0Y4/Xp+fdprOPK3N2LSjjjqK4uJinn/++d+3zZw5c6fM1tatW+nYsSM+n4/XXnuN8vJyAFavXk1KSgoTJkzg0ksvZfbs2WRnZ+P3+znzzDO57777mD179k7nnTNnDldccQUffPABKSkpNX8D1WDXtDHGhJd1d9YDEenlnFvqPf0D8Fs421PX3nkHcnLg5pvh44+1i3G//aBZM/DGvNer+++HTp3g9tsrtrVpo9tr2uUpIrz33ntMnDiRhx56iPj4eFJTU3nyySd32O/qq6/mzDPP5O233+bII48kMTERgG+++YZHH32UmJgYkpKSmDx5MpmZmYwfPx6/t47Wgw8+uNN5b775ZvLz8xkzZgyg3YcffPBBzd5ELTS1a9oYY8JBXCjqEZjdEpF3gT6AH1gNXOmcy9zda4YNG+bS0tLqo3l1Yv166NpVA7Ojj4b33w/t8RctWkTfvn2rte+zz2rQWFmHDnDJJaFtV6So6ucjIrOcc8NCcfyaXNPQ8K9rE3lCeV0bE2ksk1YPnHNNrivoyis1WzVkCHz1FcydC4MGhactV1wRnvM2Zk3xmjbGmPpmY9JMyK1fr92cjzwC991XMVjfGGOMMdVnmTQTcoEs2oUXagmOQw8NfzbNGGOMaWgsk2ZCavNmnTWZmgqXXw4TJkBsLGzfHvoSHDaesmr2czHGmMbBMmkmpL77Trs3o6Jg4cKK7dHRsM8+oTtPfHw8OTk5tGnTBqnN0gGNjHOOnJwc4uPjw90UY4wxtWRBmgmpgw7SdTurcsQRoTtPly5dyMjIICsrK3QHbSTi4+Pp0qVLuJthjDGmlixIMyHVoQOMH1/354mJiaF79+51fyJjjDEmTGxMmjHGGGNMBLIgzRhjjDEmAlmQZowxxhgTgWxZqAglIlnocju11RbIDsFx6lJDaCM0/Hbu45xrV9+NCRai67qh/z9EkobQRth9O8N+XRtTVyxIa+REJC3S17VrCG0Ea2ekaCjvryG0syG0ERpOO40JNevuNMYYY4yJQBakGWOMMcZEIAvSGr/nwt2AamgIbQRrZ6RoKO+vIbSzIbQRGk47jQkpG5NmjDHGGBOBLJNmjDHGGBOBLEhr5ETkURH5TUTmich7ItIy3G2qioiMEZEFIuIXkYiaxSUiJ4jIYhFZJiJ/Dnd7qiIiL4nIJhGZH+621Ae7rmvPrmtjIp8FaY3f58AA59xAYAnwlzC3Z1fmA2cA08PdkGAiEgU8DZwI9APOE5F+4W1VlV4BTgh3I+qRXde1YNe1MQ2DBWmNnHPuM+dcmff0J6BLONuzK865Rc65xeFuRxUOApY551Y450qAt4DTwtymnTjnpgObw92O+mLXda3ZdW1MA2BBWtNyCfB/4W5EA9MZWBv0PMPbZiKHXdd7z65rYxqA6HA3wNSeiHwBdKjiW7c75/7r7XM7UAa8UZ9tC1addkYgqWKbTYmuB3Zd1ym7ro1pACxIawScc8fs7vsichFwCnC0C2PNlT21M0JlAF2DnncB1oWpLU2KXdd1yq5rYxoA6+5s5ETkBOBW4A/OucJwt6cBmgn0EpHuIhILnAt8EOY2NXl2XdeaXdfGNAAWpDV+/wSSgc9FJF1E/h3uBlVFREaLSAZwCPCxiHwa7jYBeIPTrwE+BRYB05xzC8Lbqp2JyJvAj0AfEckQkUvD3aY6Ztd1Ldh1bUzDYCsOGGOMMcZEIMukGWOMMcZEIAvSjDHGGGMikAVpxhhjjDERyII0Y4wxxpgIZEGaMcYYY0wEsiDNRCwR+UZEjq+0baKI/EtE/iciuSLyUaXvz/BKMqSLyDoReb9+W23M7tXkug7a7ykRya+flhpjws1WHDCR7E20yGZwbalzgZuBWCABuCL4Bc65EYGvReRdIFKX5TFN115f1wAiMgxoWR8NNMZEBsukmUj2DnCKiMQBiEgq0An4zjn3JZC3qxeKSDJwFGCZNBNp9vq6FpEo4FHglvprpjEm3CxIMxHLOZcD/AKc4G06F5hazXUaRwNfOue21VX7jKmJGl7X1wAfOOfW13X7jDGRw4I0E+kCXUN4/75Zzdedtxf7GlPfqn1di0gnYAzwVD20yxgTQSxIM5HufeBoERkCNHPOzd7TC0SkDXAQ8HFdN86YGtqb6/oAYF9gmYisAhJEZFk9tNEYE2Y2ccBENOdcvoh8A7xE9TNjY4CPnHNFddYwY2phb65r59zHQIfAcxHJd87tW7ctNMZEAsukmYbgTWAQ8FZgg4jMAN5GsxEZlUoa7E23qDHhsrfXtTGmiZHqjcE2xhhjjDH1yTJpxhhjjDERyII0Y4wxxpgIZEGaMcYYY0wEsiDNGGOMMSYCWZBmjDHGGBOBLEgzxhhjjIlAFqQZY4wxxkQgC9KMMcYYYyKQBWnGGGOMMRHIgjRjjDHGmAhkQZoxxhhjTASyIM0YY4wxJgJZkGbqnYiMEpGMcLejKiKySkSOCXc7jDHGGAvSmiAR+UZEtohIXDX3TxURJyLRdd0273xORApEJN975NbHefdERF4RkRIRyfMe80XkQRFpsRfHsCCwCRORc0XkZ+/63uR9fbWISNA+d3u/AwdVeu3F3vabK23PEJFR9fQWjDH1yIK0JkZEUoERgAP+ENbG7N4g51yS92hZ1Q71FTRW8ohzLhloB4wHhgPfi0hiGNpiGhAR+RPwd+BRoAPQHrgSOAyI9fYRYBywGbioisNsBm4Vkeb10WZjTHhZkNb0XAj8BLxCpZuAiDQTkcdEZLWIbBWR70SkGTDd2yXXy2wd4n3afz3otTtk20RkvIgs8jJOK0Tkito2PNBNKiK3isgG4GURaSUiH4lIlpcd/EhEugS9ZofMVRXtHue93xwRub26bXHOFTnnZqKBbhs0YENEeorIV97xskXkDRFp6X3vNaAb8KH3c7zF2/62iGzwfubTRaR/7X5SJtJ42dZ7gaudc+845/KcmuOcO985V+ztOgLoBFwPnCsisZUOtQj4Ebih3hpvjAkbC9KanguBN7zH8SLSPuh7fwOGAocCrYFbAD8w0vt+Sy+z9WM1zrMJOAVojgYwT4jIkBC0v4PXtn2Ay9Fr+GXveTdgO/DP6hxIRPoBz6CZi05osNVlty+qxDmXB3yO3lwBBHjQO15foCtwt7fvOGANcKr3c3zEe83/Ab2AFGA2+n9jGpdDgDjgv3vY7yLgQ2Cq9/yUKva5E7hBRFqHrnnGmEhkQVoTIiKHo8HMNOfcLGA5MNb7ng+4BLjeOZfpnCt3zv0Q9Al/rzjnPnbOLfeyBd8Cn1ERyFTHbBHJ9R7/CNruB+5yzhU757Y753Kcc+865wq9gOkB4IhqnuMs4CPn3HTvfd7pHX9vrUMDR5xzy5xzn3vtywIe31N7nHMveZmVYjSgG7Q349xMg9AWyHbOlQU2iMgP3vW9XURGikgCMAaY4pwrBd6hii5P51w6+vt0az213RgTJhakNS0XAZ8557K951OouAm0BeLRwK3WROREEflJRDZ7A/9P8s5RXUOccy29x3VB27Occ0VB50kQkWe9LsttaNdsSxGJqsY5OgFrA0+ccwVAzl60MaAzOlYIEUkRkbdEJNNrz+vs5n2LSJSIPCQiy739V3nf2puflYl8OUDb4HGUzrlDvfGWOejf4tFAGfCJt8sbwIki0q6K4/0VuEpEOtRts40x4WRBWhPhjS07GzjCG/+0AR3XMkhEBgHZQBHQs4qXuyq2FQAJQc9/v1mIzhp9F+0+be/diD5BuwJrq3Jb/gT0AQ52zjWnoms2cK5dthNYj3ZHBtqdgHZ5VpuIJAHHADO8TQ96bRzotecCdnzflds/FjjNO0YLILVS+03j8CNQjP5f78pFQBKwxvv9fBuIAc6rvKNz7jfgP8BtoW+qMSZSWJDWdJwOlAP9gMHeoy8aXFzonPMDLwGPi0gnL8NziBdwZaHdgD2CjpcOjBSRbl7X3F+CvheLjr/JAspE5ETguDp6X8noOLRcb4zOXZW+n44OwI4RkWFoF2fAO8ApInK4N0D7Xqr5OyEicSIyFHgf2IKOiwu0J99rT2fg5kov3ciOP8dk9OadgwaTk6pzftOwOOdygXuAf4nIWSKSJCI+ERkMJKLZ2KPRMWiB389BwMNUPcsT73jjgSpnPxtjGj4L0pqOi4CXnXNrnHMbAg90kP35XjfMTcCvwEy0++5hwOecK0THen3vjaEZ7pz7HB3cPA+YBXwUOJE3Nuw6YBoawIwFPqij9/Uk0AzNBP4E/K/S9+9Es4Nb0JvalKB2LgD+6G1b7+2zpyK7t4hIHvrzmYy+90O9rlK8cwwBtgIfo9mOYA8Cd3g/x5u8Y6wGMoGF3nswjZA3UeRGdELOJjRgfxYdW9YTSHfOfVbp9/MfwEARGVDF8VYCr6FBnjGmERLnqurJMsYYY4wx4WSZNGOMMcaYCGRBmjHGGGNMBLIgzRhjjDEmAlmQZowxxhgTgcKxQLWphrZt27rU1NRwN8M0IrNmzcp2zlVVGLXe2HVtQi0Srmtj6ooFaREqNTWVtLS0cDfDNCIisjrcbbDr2oRaJFzXxtQV6+40xhhjjIlAFqQZY4wxxkQg6+40Zjc+yoZn18OmEjiiJdzYBTrEhbtVxhhjmgLLpBmzC3/PgInL4Jx28Pi+UOSH4bNhY0m4W2aMMaYpsCDNmCrkl8E9q+B/A6FNDEzZCLECBzaHp/a0uqcxxhgTAhakGVOFBYXQPQ7uWQ23rIAezSApCr7aAlM3hbt1xhhjmgIL0oypQvsYWFYEc/LglyGQ6INXN8LmMt3+78xwt9AYY0xjZ0GaMVVIbQYto/Xx7Dr4ZyZM6g6dY2FQItyxEt7NCncrjTHGNGYWpJkm6/UNsP9MiPsWhqTBe5WCrlPbQFYJ/Gk5FPjhhmVwS1eI98HIFnDLcthcAjNy4ZX1kJ4XnvdhjDGmcbISHKZJemU9PLAGnu8NBzeHb3NhwhIQ4PR2UO7gkg6aLfMBL/WBJzLg1pU6yzNOoNRByg/QIRaOagV/XQVDk+GtfhBnH3+MMcbUkt1KTKOxrhh+zIWs4j3v+8AamLwfjGoFzaJgcBIMSYJzF0LLGRD9LYyYA/s2gzLgD7/C7DwN0P7VC+5KBQec0Fq33ZsKyw/W4O6hNXX7Po0xxjQNFqSZBm97OYxdAD1/ghHpkPIjdP4Bvtpc9f6lflixHYY31+dbSuGwOTr+rNhp16YA2x18t033yffD+hIYkAA/b9PyHAHnpsBbmyDGB/ekwusb6+69GmOMaTosSDMN3k3L4ec8GNESlhwMGcN13NjpC3YcJ/ZBNpy9AK5YDCmx8IsXgD2/Hg5toePPAI5sCR/210xZ4BfEhwZu8wu1Zlr7WN3++RZYW6yBHUDzaCgsr/v3bIwxpvGzIM00aEXlMHkD5JTCtH5az6xzPLzQB5Kj4JG18O4mzbKNng8/bYM3s7RY7fHz4PtczYylxsFnW7TUxv8GwvStevwE0X/9QCD2KgXWFGsQV+LggxxYVgjXL4Gj0yHWBy+s2zlYm5UHJ8yFhOnQ/Sd4ZI12jxrTkPgdZJdAib9iW3YJPLwGxi2CPy2Db7ZAmX/XxzDGVI9NHDANWoFfg6UezaBlTMX27vGQWwbTNmkB2k2l0CMePhkILaLh+LmwsgjOW6SZsBkxkBgFLaPgzU26Xidol2d1vBU0M1SAv6yAx9fAjKG6YsFvBXDiPC3jMa2/drdOWAz/ydIs3kmt4ehWIBKqn4wxNbelVH8/usbBiiIdItAnAW5cBu9l6zhMgI6x+kFmVbFOwMkuhZXFuqRa2xh4pjeMbhfe92JMQ2aZtHokIieIyGIRWSYifw53exqD1tF6I1laqIugBzy0RmdfntFOg7Ux7eCmrppNS4mBR3tqpu3sdrBgGJQ56BIHa0rgjuWwzcuC1aTn0gHZZbCkCC5aBBlFOiGhezwMSNQu0Tn5sKpI/02MguuWwUW/aZaiIbFruvFYWqgZsKuXQNcf4di5Onv5kNkwMl2/fn2TfjAqRx8ZJbCkGEqAGdtgyXb4T394qhde8Xc0AAAgAElEQVSkxsOVS+DX/DC/MWMaMAvS6omIRAFPAycC/YDzRKRfeFvV8Ino4ud+4ODZ8EwmTFwKL66HGIFrO0O06Cf/KzppAPVLHuwTD/nlUOyHixbrupyLC/WYq0p0v1D4eDN0/QkWFMDS7XD4HDhiti7cfkkHzTb0TYBZQyE9Hz7JCdGJ64Fd041DdgkcMUdrBh45F55Zp4HY5jL9vqP6vw/lwKm/wiHNYVY+XNlJx3waY2rGujvrz0HAMufcCgAReQs4DVgY1lY1Aie2gW8Hw1VLtOBsidMuxzYxkFmsXTKfb4HT52tgNj8f5hZo4PbpFr2RTOwCg2Zql+Pfa7jkUxQaLAZuaMFZuO7N4LwULekx/jfdJ6tMx9LdsAx+K4RLO+r4tlPa1uanUa/smm6A1hTBs5n6oaFttM5GzgthBtcBYxbo5J0+zeA/2aE7tjFNjWXS6k9nYG3Q8wxv2+9E5HIRSRORtKwsW3Nobwxrrhm1ZlHwcHetW5YSA3euhO1+HeT8f5shoxguX6LLPI3voNm2G7qATyA+Ck5srb8UMTUYG1bOrjMOa4rg/tWQWaSBXIxoADm+Iyw4EF7ZAEsKG1wR3D1e02DXdaRwDl7bAL1/gr+thbez4ZkNoQ3QQK/vlUVwSht4LwcOaxHa4xvTlDSsW0LDVtVtf4c/j86555xzw5xzw9q1axqjbRcX6uD+WXl6E6mJcgc3LYOj50IscNcq+DgHyr1JBRtKNFsWjWbVDm8OzaM0a9YtvmKw/tgUXQIqwVf1f1ZNCDCqudZfO6MtPLhW21TstIzHP/aFtrE6Zu61jdqGqiwqgEt/gwPS4Iz5MD03RA2snT1e09A0r+tI810upHwPF/4GxegYsroU64NNxbCwQDPExpiasSCt/mQAXYOedwHWhaktYVfih7ELdSzMtE3aPXL0XMgt3ftjPbQG0vKgTTTklmuJjJ/zIL1QM1h+NIP25WDIPBS+HQL9EiFKtCTHem+Fgms6ayBX6A/dAP4odAycD2/2Jvo4u61m9k6fDxcs1HFAQ5LgkCqyDvPy4Yh06NlMS4uc1FonIvwn/Ekpu6YbgEfXaJHn7LL6OZ+gv98DkmDGATpRxhhTM/brU39mAr1EpDuQCZwLjA1vk8LnkTU663LVcO1mLHfwxyVww3J4eT/dZ1EBPJmhg+57J2i35P5Juu8bGzVI8Ql8vQW+HAQHzoZOsfBIT/hHBszM04BNgBlb4RpvkJhzeu7Ocbpg+sh0+HM37R5tFQtR26FbLCwv1uCqpuWefGgXaKHTX7QnMmBruReoCfSOh/kFXgbRwc3dqj7Ofavh9n3g+i76fGiyHuOS3+CZDOiXBNd1hp4JNWxozdk1HaEWFMDnm+HfmbC4qH7P3TUOjmgJf+9Vv+c1pjGyIK2eOOfKROQa4FM0wfKSc25BmJsVNq9uhKn9NEADzWo90AP2+RGe7Q1z8+GkX+H6znB+e814HT0X3ukHT6+D1UU6CH/mNs2enTBPg6mNJXDjctjsZeRaRGlg1D0OTv4VDkyC2CgtNNs9Hk5qA72bwdQs2FoGp7TWsh7Tc/WXIxCgdYvTemp7k2Dzo4FatLcYe0axdrXOzdcAMzCGLXDMU36Fo1rCF4N2rJf241b4W08NTn1o0d0/r9CM37xCLX3wVCacnwLP9dFxefXBrunIklOqZWhOnAerq7F+bV2I90GraF3dY+omOGcX3ffGmOqxIK0eOec+AT4JdzsiQV6Zzr4M1jxKA5dSB3eshId6VIxnGdkSusbCJYu1bEb3eNhSBmPbw92rdaYk6GLoG4IG3ATqnc0q0H9/yNNVBUqcVv3v3kyDp3PawUPdoUcCXO+HgWnajjwv87WpmmU5WkXBlqBpnVHe+4kTnRTQNhru6a7FPteXaDugImP3VS4MnAm/HhT0c4mG036FXwv0ax9aTuTBNbC1VLOFDnhjEywqhG8P0Npr9cGu6fDLKtHJMF9uhrwwVvm/PxVuT9Wvf83XDPWRLXUJNmNMzdiYNBMWJ7aBFyrVT5qyCQ5K1gBjxlYdTB+QWaRLPC0v0sDm+2267e7VO45eDwRSUd7DoWPV7uimWbUoNMhrHa2B3plttbvxlQ0wOA2OTIeNpTrz87AWFet3BpfTiEc/3UShkwwAWkfBQYkV3ZmJPj3+X1NhQketG9UiGr7ZpsvlrC6uCNCu6KADraf21efzCyvWHF2+XYPItcVwbCs4IFEzfo+t1XYd1EIXiu8aq22dla9LYC0oqPn/jWk4nNOJJFklmsUKh57xcHWnigANdFjCSa3hfSu/YUytWCbNhMU9qTBijlbdP6aVzu6cukmXbQKd+bhiOwxO1mWdTv214rWFftg/UWudQdUZruCgamuZdoPml0OyD3L9sKFUH/NXawA1L1+7XtvGwDHp8NMB8NYmHeOWWQJDkzSTFS8aUCX5NJhzXk22OB9klGpbmvk00GwRBXekammNk+ZpQFckMGmNBnjlwMmtddkdv9NzB7a/uUnf+9OZehPcWArfbYUEL9tY7p03r1wnR+SWVby2VzO4bDH8OCSE/2EmIqXn6VCA3ZV/CbUoNAD7OQ9ObqO/wy2quJPE+TSLbIypOcukmbDoFg9zhsGgRPhyi3YTfjpQg6kPs2Bce10qKW0bnLVA64odkgxntdVga+5eZIrKgOc36E0st4ruoLbRmlnLKoV3s+C37TBsNkzpB7d4g/Vn50Ez0ZtRVimsKIZ8f0XNs8Na6HuJ8+mNq3eCBlAA32+FdrH6vF2MnitwU1tTBHFRGuzdvELHr4GOgQOd/LCmGJYcBKuH61JWveL1ewmi49ua+fTcN3rzLJdt10dGPQ8YN3Xn+6061qzjD3D4bHh3Ezy3Do6Zq9d3XcZCUcDRzXViTTM0c3tgc0gbAj9u0yLNr23UMXEBa4vgv9lwaps6bJgxTYBl0kzYtI7Rshe3rYCH18K9a3R7YLD9oEQ4bDYkRmuA0zNB//Bf0bFiAfS9savhOh9urvg6cLNbVgTnL4C0A+HLrbptc5mWCEnwwYgWEOPTcTgHz9Hg7O0B8MAq7cbNLtWZpxcs1IrrsQJv94dbl2uZjUA30K+F2oVbjgZcgQxgYCxejOhsuR+3wb2rtTDvX/eBq5ZCgTeRYF2JTqJ4L1tvqNvKtevLkhiNw3e5cMYCmNQdnukF/1gLZ9XDmg4x6Hq2rWJ0qbRyB4OS4d7umvmdsESvu8s6avHawWlwUXvtxn91g3b1d4uv+3Ya05hZkGbCosyvY8weXK3dl6ABBmigUuJgdr52N57ZTtf/eydLg7Upm+qmTbHoIOcMb+LBrAJdZPqefeCcRXBEC9hSqvWf5hfqUlTdm8FzvbVIaMdYDb6yS3XtQ4d2mY5uq6shdI2Ht7M0S7ZquNZ3eyyj4v0HvNirYtbrKW3gluVw/iLYr5ku0H7rCh3zVuCvmHCwwFvmqplPS4s49Hym4Zu4VLu2L19SP4G3oMMR7kzdcXtRua7UcdsK/QAyph38sbPORJ7UQ4s1v5+tH2K+Ggz9E+uhscY0chakmbC4aqmuVxklOn4rr3znfcqBtSV6Y9g/Udca9KEBSlX714ag2a6MEh2cv6UMVhXr7MkLF8E+sZrFK/DDmW3g2V6Q6gVBR7SApCiYlqUZhpu76QoHKbEwabVmwQIB05+66li8hCi9wR2QBFf9BlE+Lb/xVG/oFBRcHdVSfw5d43QMUJR3c3x9I4xqoYuy55ZrodIUb7H26Vvh//YP7c/H1K9SP/wrA/66ErbVUWQWg84MHp6kwX+eX9favLijfvioLD4Kbuqmj6oMa64PY0zoWJBm6t3aIq0T9kZfXRR9VVBNJwHGpcDbm2C7t60MDU6u6QwPrdUB/6HmgHynEwPmFVR0OyZHwcx8LWzbLlrHob2TA+/mQPsYzZ6lF+jr88o1Gzc4saLswMQucN/3sL1c65f1SYCvBuk6nk9m6Ouf3g8uaL9jbbSAX/J0PdGFBTohYMl27dYc214zjzPz9BgP94Cft8G3W3WM0PE2FqjBKiyHY9Php7zQLU9WmQ/tdv9bTx1fZoyJTBakmXq3sBCGJGtNr+Cim3Gia1r+lKefyGds0+2dYzUwe6selkESduxSmpmvGYfEKM1WBboXHRUzRGOBljFQUK6z2U5fADMGw2Etqz7HgCR4q3/12tPc6/ZcPhx+2QYrt+sg7Tc3alB3jHeOq5bo2KFLO8Bt+9TgjZuwyy/TrvJpWTqhxId+QAklH5otG9ce/pIa4oMbY0LOgjRT73rGa8mLlds1Q1bmRUXF3r9LtusjYHOpBkGFIe7irMr2oAgtBs2OxaEZNIeWHthWCjPyKvaL90HGITpWLD1fZ6Re+JsGVk9maImRmq4CcGpbuH4ZfLpZV0c4pAX0T4Jf5sB3Q6Cvjftp8Er9cPNyeHm9roaxubTmS5HtShy6qPobfeGBNXBwFWvEGmMijwVpps5sK4N/ZWodp4FJ2l2ZEgv7JujsyHezNXvWPAo2VxGABep+bXfQrBwGJ8E3W0N/A6vK2LYwxZuB6acikCz169gwqMi6BYLLu1K1PMJ2p8tWjZqjRWi/HFTzdiRGwX8GwDkLdKZdsygNcJ/pYwFaY/HnFTpuMcEL5Ovi+m4fB32bwUc5umzTkbvI8hpjIosFaSaknNMxXYsK4IolWjKiYyz832atkv/dARpsHddKS1MUOyjeRYYssFnQpZa+2hqaNlbu0gQNCMd30DUHt5bDm9kVa3cGD4H7PFcnGOAqjuEcPLdeJwKc1BqWr9PXjUnR7sf4GmbRAg5rASuH64SAEr8ukVVfyz6ZurWuSOudRfsgUSAzhOMtfeh1XYauWlHih3HJ8Ezvqsc/GmMijwVpJmRWbIcxC7T6/YYSLQnx933hmi5a6PLQ2bpyQHK0fj/Rp92I3WJgQ5mO5woOnpJ8Fd2MAVUFWHur8jJPeM/f2qRB5bBkzZZ1iNVyAkuCisIGMmfxAkVeQ0qA65bCPau8Eh2JuirBHzvXsqFBYnxwdKvQHc/Uv6Jy+CBHs6tDk7Rw8uSNOmMYP+Syd9d3YHYmaDY6MOO5e4yuflGih6VjLLzVT4N7Y0zDYisOmJAIrCF4fnutjl/khxf76CLgUzZqgdcTWukSS3/uBpd0gIs66E1pTanWRQvcnAIf8gPP44M+9YeiGsGuupPy/RqsbS3TMhdbyuDIVnBiK+hfqSRBIEBr4as4Zl6ZBmfrS+C5PiFoqGk0lhbCfr9ovb+1RXDmAp0I89Z++v2aXNeBAK2FV5ImVrSu4AovQItBhxhkHmIBmjENlQVpJiRm5mlgdkMX7Urxo8Us+yTAhMWwvlhnbYJW0T+8hZaS2N3NqcCLpopCEZkF2dPhVm7XMUIAD/TQ2ZLb/HrTC4gX7UqafoB+nRwFN3bRbsg3+8GgpNC22TRsly2GG7rC54M0o7bZm8l50oIdr8fqXurJ3gcXQcd4/qc/LDwQHtsXvh0ERSOgZBQ81cu6No1pyKy704TE+mItlSGiN46ucXDBIs1GHdUSntgX+vyiAdq4RVp4dXNQfYEOMRX1z+prOaNAOY3KXxej3ZaBZZ1GtdIsWfDrih10j4eR6dA8Wrt4H+gJ8wo1E2cMaIZ50mqYsVU/yNy4LDQTA8qddsc/1gPGdqjY3iMhBAc3xkQMy6SZWvl6Cxw0C8YshK+3wlWLdYDyK310ke+cUl0pIOUHWL5dZ0kOSIS7U2FC0M1lVwVqk0KUBUiJhiOSd9zm38XXPeI1i3dWO/huG7ywDi7uAEd4XUbDkmBsiq49ek6KLpZ+QJKu6/n9Vh3TZpq2D7JhaBr4voU7VukHj+3+mgVocd5DvMfQJFhxiGauj24dwkYbYyKOBWmmxtLz4JyFcGF7DWgSfbrweZ+fNYPWKVbHyZQDhzWH/gm6cPr8Arh2KTwTtEh63C6CsZIatq3yhb2pDL4Nqm0WfLpmvoqxZVBRbmPKRkgfCtd30S7XU9vCpFRdYWDqJpiVp0tFZZfoqgDHztNgroutmdmk/S9Hiwvf0k0zx7X5nCFoZtcvGug54NwUOHIuXNcZ2seGpMnGmAhl3Z2mxv6eCVd0hLtXaZef32nAs6pYg7AzU+DjHPhliBanPXy2Lln01qYdy1qAdh/GAnE+XUNQ0BlrDigJmorZLka3Ly9it4IzFoEZc9HAfolQWAYriiu6OLf7K5aguqsbPLgWWvt0zcRH1sKzlSYBHJAMk9bAbwVayDYpCr7KhT920skQpmlaWqjZ47tX6azmDrE6Y7k2BL3mN5dWXMff5ML93WF029q32RgT2SxIMzW2bDtkFkO+txzSGK/r7x+ZsL4UzmmrQduAmRVlL17fVPH64ckVkwlAs2YlXnR1RlstvFlS6SbXI15rPu2JD83ufb9VZ5SC1otaUwTbvKCvctdTFLqYe4mDU9rBgkItk1A5SDuhjT6MAV0O7IJF8MNWXU3jlzx9Xu5du8HjHfekXTRkeWMaY/GKOZfDca3h/BS4bw18NDD078EYE5msu9PU2P6J8N1W7XJ5pCe80hf+1KViFuR5i+DyjlqOYl0JHNsSWkZrNiDZB2n5O1+Aid6Gd71Ct5UTET/nVQRdVQl86vChAVavSqUzCsqhbfSOMzUDEnx6UxTg0y1wXRebGWd2r8wPo9LhkxzYVAo/eh86ih2/93PuzTi07DJoEVh5QGDfZpBxqF6Lt6+CO21dVmOaFAvSTI3d0EW7CtcUa6X921fAPj9XdGVmlMD+M3VNwoOaQ9d4eLa3fm+7vyIgiws6ZrG/YoA06AXa2wu0YryyF5X1iq/Y7qdiYWo/8P22Hff1oTWlgrtbY9GAM84rCBojcG1nLcMxpt3e/lRMU3LzcpiTpxmwwOD+gLJqdnUG/ggneF/kletxknyaie70A9y2Ah7pAWPbh67txpjIZ92dpsZ6JUBqLKwqgX1/1m7CWNFJAt9u9QIn0cH1o1rCO1m6KgFohqrYrwGRcxXjbYIrrseLjnMLdIHidlwtoLlP65eJVGz3iV7UJU6DNFfpRlkKRAVtE+91heX66J2gmYx/r9MM4We1WHfTNF5+B19tgWe8JcAyS6GZQDz6AaG6Y9ECXaHJ3lhMHxqs9UnQ7NmFNsbRmCbNMmmmVh7oqRkxv3dTKnNaiiMK6NdMA7EydJH16zrDSm/Af+lGKPpCu4VKqAjMgjNc84bBrwdCljdouplPgzCAKzroeofJUTo2LvA6n9Njdo7TgDGQnQheMSBKNKMX5x0zSmDZcMg9HO7rrmt3ntEOfh6qZTaMCTYrD7r/BCfM27FLfrvTx64CtNbeR+LDkyv+8Ap6jeb7oVOMTj45O0WPe7ZlcY1p8ixIM7Uytr1W5Q/Ub/UH/Tt3e8UNrMAPb2yCv3aC1s8A5wP3A8v1RrV/ol6MwaU4OsRBb6+mmkO7Hr8cDC198NwGXRvz20FaVDagHJ1Vl1GsgeM2v5bHCFzoQ5OgVTTctRLaxUKhX2+qpV43a3YpbCuDB7pr8GZMsMJyOHmeXjcDE3Vbx2oG8lvLdB3NdrF6PU9K1dnMEzvrtdw8Bj7arDOlvxkM8VX17RtjmhTr7jS1dn0XmLIBfsnX5zMGwdiFsDYoLeYDVmbCledAyRpvYwxEvwJJk2BBga46sK5UA7VipwOyi/2wqFB3f7YPRPng7Pbw2kaduTl1k45ZW1sMqfFaEuPGrtrlevRcrcv20f5wwzL4NRNwOnmgzMGk7rC6WGejtv9Bz9EvAT4dBEn2m2E8zmnh2HgfvJ2lsy3z/RrMg85k3pUo9INDFJrZ3VACX26BI1poncAn94UJnerhTRhjGiS7FZmQWFZUMb7mqLmV6qBlQPlDwMKgkhoCrZKhYA6UL4d/Hau11F5crxmt89vBqiK9sR3RAlYWa4AGWszzoxxIz9caZb/kwdWd4I2NekP8MFuDsgvaw5sbNVP2917w2WY9zvntNWO2b4LOmDutLfy7t96MOwTPYjBNzuw8XQR9UwmMaKFZ14fW6ljKNjG6Nmugjt+eJgYIOq6ywBsfud3pGMyUOL3O7krVJceMMWZXLEgzIVHudDmkuflBY3JKgKeAT9i5DkEClJRAlB8GToOpg3QmZptore4fJTC+A/ywTUtpfBE0gH9ECx2b9lIfXci8Q6wGf1/kwpi2Ovj/7710XNqK7XDLcni4B3w2EAbP0lURAP63WdcbfakPtLKxZ03eWxth4jLNDI9qCU+u1fU2x6bAA6ma0b1jle7rgPHt4eWNuz6e4JVwcfqH9tEecG0X8NkgE2NMNdXLnwsR+beI3BnqfRsKERkjIgtExC8iw8LdnlD6cguc+qvekNLy4O59vHjsM+AM4CN2DtCSgRgoKIGoeJg9A+JWQd8EWHIw/HaQdl1+slkDsDnDYEBSxcujfTClL0xYAjcshxuXw36/6I31zlQ4t70GaACT+8KKIujyI5w0Xy/4KzrCfanwZj/47WAL0GqisV3TJX4N0J7uDQc316LJ6fk6PmxuAZy9EB7P2PE1kzfuejkz0Ms+37v2owVuXQmj5kLatl2/xhhjglUrkyYiq4DLnHNf1OQkzrkr62LfBmQ+GrI8G+6GhNJbG+Gm5Tpx4PrOMHo+/GUpcCswZ9evi46CMm/8Wv42wEHG8/D9BzoWLAm4zSvaWVSu48++3KLB1PgOWnNtREtYdjC8lw1bSjVLFhzIBbSOgQ/3h4winSXaN8EGZIdIo7qmp+dCfhlc8Zt2ay4r0j+OT/aACUthWj+4cxVs9sahCfr9BB+Ul+vEmZZROqEgsJRTTqk3trIlTO2nq3FM2QQn/QqzhmrdQGOM2Z1ad3eKSLRzrmzPezZdzrlFANKIytf7HfxlJbzdHw5podtungGTnobi9bt/bVnujs+bNYMrT9asRbDt5XDMXB13dkF7XbVg9Hy4JxUu66T7V3etzC7xtvB5KDW2a/ovK3TsWIkf8oo1C1YC3LRCu+/PW6QlNKKpmMlcDDTznseIvmb6YF38PKcMWsboJJXPBlXMFL6ogxa/fW69lnsxxpjd2WN3p4i8BnQDPhSRfBG5RUSciFwqImuAr7z93haRDSKyVUSmi0j/oGO8IiL3e1+PEpEMEfmTiGwSkfUiMr6G+7YRkQ9FZJuIzBSR+0Xku5D9dOqZiFwuImkikpaVlRXu5uzWxhK9AQUCtMxMuPvOPQdovxPAB2eeqa/94x933uWVDZp9+N9AGNcBbu0GXw+GW1Zo1sM0DJF6XfsdrCuGT7J1iTLQySPB68VmlVUUWc4q1YAsWrRwLUBuuc7cLHUwsgVculj3HdNOZ38e0nznUi4HNtfafsYYsyd7DNKcc+OANcCpzrkkYJr3rSOAvsDx3vP/A3oBKcBs4I3dHLYD0ALoDFwKPC0iu5rntLt9nwYKvH0u8h5hISJfiMj8Kh6nVfcYzrnnnHPDnHPD2rWL7EqWLaM167DBW+z8ppsgem/ysqkw4zt45x1otYv/+c+2aOYhOFnTOwEGJGpxXFO3QnFNQ2Re11M3Qc+fYeBMOHm+bguOpSoPUyxzmtFN8EGfeJ24ImiAFufTpcU+2gzrS+CwFvBBjs7enJOvGeFgX2yBQYl19c6MMY1Jbbo773bOFQSeOOdeCnwtIncDW0SkhXNuaxWvLQXu9bpJPxGRfKAP8FN19xWRmcCZwADnXCGwUEReBUbV4j3VmHPumHCcN1yaRWkAdfkSmJSswVZ1yeXw+QNweNvd79ciSkshBHNOt1XuGjWh11iv6W+2wMSlMLy5lmUJcGimzEdFEWbQNTTz/bo6xZHJ8P5mzZStLdL1a8ekaJZta5l+sIgCJu+nZTbmFcAZC7QmX9sYeGmDBmmP9qjXt2yMaaBqM7tzbeALEYkSkYdEZLmIbANWed/a1W04p9I4tkJ0vPje7NsODTLXBn0v+GtTxx7pCV3i4IDL9OZWtocuyMNHQ9+h8PoRcPQeAjSAizvAY2thjbeUlHPw7DrtbjowudbNN03U/at1gL9PtNxL5T+CUVSMO4sWSPHSapvL4OtczaANStTAbHQ7zfSmxOpatn/sDFd2rqi391IfLaw8ZgEMnaUlYWYMhrax9fNejTENW3XzEVWVbQzeNhY4DTgGDdBaAFvYsQch1LLQv6VdgCXetq51eL4aE5HRaMWwdsDHIpLunDt+Dy+LeHE+uD0Rnv8W4qKheBdBWnw8zJoF/frBF1/ANdfAOedA1B5mWY5qpTWrBs7UsgjrS3Tsz/sDduwCNfWvIV/Tv+RB1zj4aasO9h/RHOblw1avXEZwIeYyp8HYvvE6Vm1tsf7he3OTjpWM2cPH3Bgf3JGqD2OM2VvVDdI2ArtL0Cejk51ygARgUi3btUfOuXIR+Q9wt4hchk5uuBAdPxdRnHPvAe+Fux114eGHNYPm91cETiIVz1u3hn/8Q//dsAH699fZnFOnwtixez7+DV21W/X7rVpO45DmFYusm/BpqNf0vasgv1xXstjifaiYsQ0SvGsqKUq/D/oJ06ErDLhSiAeSo+D8FJhToCtWGGNMXapukPYg8JSIPIIui13ZZHQCQSawGbgTuCokLdy9a4BXgA3AYuBNoMEX1mxILroIevWCrVvhwQd1AkGrVpolmzNHg7Ebb9z5db/8Ur0gDTQ4O7Ua3aPGVOacLjdW5IdEH/wjA3rGa4HaNtGAH7b7odDrFygMGuQfJZpJO6YFbCjV7swbusDgJOjwQ1jejjGmialWkOac+y/w36BNf6v0/Xy0uzPY5KDvXxz09TdoF2Xw61NruG8WcHLguYg8DFSqC27q0tCh+rjjDujQQTNoOTn6vfh4XfopLQ26dNn9cYwJlf/lwINrdNB+mdOF0dtEa7flkCTI8Loso0WDt8qigNv3gcxi+E82PLQvDA0aA/n1Fti3WX29G2NMU9agV5ETkf1EZKCog9ASHQ2uCyZUZs6EpUv1a8K9VsMAACAASURBVOfg1lshYzch65o1UFCw6++DBlxuDwtJA4waBRdfDLGxFY+OHeGeeyDBuoVMPfkoGy5ZDFd1guZRuparz2nh2Cs6wfStmlk7u52OMeuTsOPA2QOT4ZvBcFIbXc3ipq4w/jcdswa6pNMVS+DmiBz9aoxpbBp6IYNktIuzE7AJeIwdM36NVnk5XH+9BkFt2ui4sPPOg65d4euv4csv4ZFHIDcXnq1i4Z7ycth/fx0j9kNQ143fD8XFOm6sqAgGDoQ774Qr97BY1zHH6MOYcLp3NTzTCz7O0Ykmr26EWB+MXQRP9tQPHAKc3x5e2g8+zdEyMse31iWbAG5bqYupv7IfnNJGx6mdNE/HsLWLgTv2gQuqudKFMcbURoMO0pxzM4F9Q3EsEYlGM3Gj0aDPAevQoO9F51zpbl5e76ZNg3/9C1q0gAcegNdf18zV2rXw6qswaZJ2Q778Mtx+O3TrtuPrH30UCgvhp59g4UKdeQkakH3/Pdx2G6Sn6/iySZNg/HiIi6te24qKtLDtXhW3NWYvLSmEW1fA/+VAYhSMaw/pefBhDrydpQWX3+qrXZZPr4Prlumkk3IHZ87XGcMLCuH+7lraZd94zbj50dUDmnmzj6/rAtd01hU2kqJsZrExpv406O7OEHsNGAzcDZyEjnW7BxgEvB6+Zu2svBzuvVczZP/+N2zcCH/+M6xYoYP0L7tMuz3nzNFA6aabdn79pEm6X9++GoABZGfr8RYtgpNPhvvug8mTNdv28st7bpffDz//DPvtp1k+Y+pKdgkcma41yDYeBrOH6ZizGB+8vUmXfAJdgiklVrNnJ7bWICvZB8nRkJankwluX6EFku9KhWNba1atWaXyMD7R11iAZoypTxakVRjinLvKOfeTcy7De/zknLsKOCDcjQs2bZqWtLjsMl378tJLIS9Px4F9/XVFUdm2beGkk+D993X8WU6OBmKPPqrZrscfh+ef17FsCxfq89NOgy1b9GZUXKzdp3fdpUFdcfGu2/Tss3D00fpYvVqzeWuttLCpIy9t0GDq+Na6RFiCD/7ZS7NkBX5d8Hx0Wxg+W4vXDkrUoKzYD+/0rxhneWJrnT18fGsY2z6878kYYyqr8w4pb4mofZ1zF9T1uWppi4iMAd51zvkBRMQHjEEL80aEQBbt73/XQOqWW6B3b338+98aJAUMHw7z5kFMDPzpT9q9WVYGP/6ogV2zZnDooZpNu+ACDa7OOUfP4ffDIYfAY4/BQw9VZNOqGptWVKRZt23bdCJCbKx2jT74oHbJGhNqs/Ngdj58uQV6NdMCtX4HOCgHtpTDc+sr9l9cqBm1Y1rBcW1g6cHQ+UfoGAdT+mrXp2XJjDGRxjJpFc4FzgI2isgSEVmC1l87w/teRJg2TceJHXusPv/+e80KxMRoMBXIEDgHH3+sBWTj43VtzRkztD5ZXp5OGnjrLX2ccYZ2jR5xBLzyio5lS06GJUs0yPr0U83I7Sqb9sILMHiwltsQgcsvB59Pu0otm2bqwtwCXfFixXD4fBB0jtVyGsVAC6n4w9YqClpGQbHT0ht/9sZmLiqEfeLhnlQY3sICNGNMZAppJk1EbgWuA5qjg+5vBG7Tb8npwHLn3CARaQE8jo798gMvA3d5qwhcDEwAZqMrCKwH/uic+zKUba3MObcKOMd7H20Acc5l1+U5a+KJJ7Tb8qij9Pnq1RqgLVkCK1fqGLTSUjj+eMj3ygb89pv+W1ICAwboEk1//KMGUiIVgd3atZpp699fs2KLF+vEhPHjtc5Zly7aXdq5c0V7ioo0ODzrLPjkE51pOnKkBnuWTTN1YX0xbCiBZgJPrIXjWsHKIi0+GyeQGA3bSnTfLV5x2mg0a/bDVg3QblwOf+tpwZkxJrKFLJMmIn3QFQAOdM4loysQ/IYuETXVOZfknBvk7f4quu7mvuh4r+OAy4IOdzCwAl2g/S7gPyLSOlRt3RPnXE5wgCYix9bXuffklVc0c/XXv+qszbIyuPZaGDdOg6JAwPXZZ9qt+f33FcVlS0rgsMM0OEtJ0SxaWZnu07u3Bm+gGbjNmzUDl5ysAdv//qelOoIDtLVrK7JoL7ygqwxMnKhB2xlnaJfpiy/unE179lmdjWrM3tpUoouct4iCbw6A77fBgbM1UybomLPDW8Bt3WByH32ND3i7vwZwt66AZ9bBs73hnJRwvhNjjNmzUGbSyoE4oJ+IZHmZKaTSR1URaQ+cCLR0zm0HCkTkCeByIFDRaxPwpHPOAVNF5E/obMvXQtjevfEiujZo2PXrV1EuIzNTA7N339Ws2fbtGihFRWmXaG6uBlk5ORowOafraHbooN2gN96oEw+GD9dCtE8/rVm4Xr2geXMNzn78ESZM2Hkx9Pnztcu0bVutz/bxx7rPPffoMebO1bFt8fGakevaVcfEnXoqfPWVZuj+8Ac9jzF7UuaH65fBGxshIUproA1Jg+u7wGv7QcvvdAZmSgyc3hZeXK8TCqLQSQTFDjrFwcM94Yx24X43xhhTPSEL0pxzy0RkIlrCor+IfIp2d1a2DxADrA8K4HxAcL4l0wvQAlajtcvqjIh8sKtvAW3q8tw11blzxQoDlU2ZosVs58/XAM3n04kCBQUaoAGsX68B3pgxkJoKPXrouLXSUujUCdq109eWlmpQFezee3XCwZIl8NxzO3adRkdr23r0gL/8RYvczp2rXbSbN+t+CQnwz39qPTZj9uThtbCwQCcJ7JcIfRPgrpXw/Hr4bzZ0ioW1JVBcAi+ug+nbNDiLFV1F4Pl1WqLj1Ij8TTbGmKqFdOKAc26Kc+5wNBBzwMPev8HWouN72zrnWnqP5s65/kH7dJYdU3Dd0DFudWkEmsl7rIpHfh2fO6QCM0CvuGLHMTclJRVfd+yo+91wgwZi552nkwpattQSHAsWaID38svw3nuatQuYPx+mT9euzfJyDeDi42HZMg3oSkth1SrNmB17LHz4IQwZogEa6Dny8+Fvf9NsnTEB6XkwbhEcOAsuXKSzOIv9Wmz29LYQ7YPJ+8Ft+8C7A2BbGawphiNaQtdYHZc2J18/LBT7oZSKgO2rQVpHzRhjGoqQjkkTkaNEJA4oArajXaAbgVSvnAXOufXAZ8BjItJcRHwi0lNEjgg6XApwnYjEeGUx+gKfhKqtu/ATUOic+7bS4xtgcR2fO6SmTtUB/MXFFTXTQIOngEA2LTNT1/h8/HHdPzdXA7voaO0mHTcOLrkEHn644rX33qtdpY89ptm0nBy45hqdJFBZbq5OPPB7C1n7fBos+v0a2P3zn6F//6ZhmpELx82DA5LglNaaIRs6C+Knw9piXUvz+FYVHzxOaA2lDiZ05P/Zu+/wNqvrgePfI3nbiePsvQNZZGEIYbZsykoLtOxZVssqu9BCaGmB0v5ayiwtG8Joyt6jrISRnZC9EzLtOIn3lO7vj/Oqkh0nsWNr2D6f59ETW3olXTtXfs977r3nsl8WLBsP+6ZD91S4oz+8PlKL3Z7RGd4brfcbY0xL0pzXlanAvcBWtHRFV3Rl57+9xwtEZLb39flACrAIrUE2BegR8VrfAkO81/oDcLpzrqAZ21qfVUBVfQ845w6P8ns3m1AWbdIk6NdP78vI2HkVm3OQmalf//nPGqhlZuoQp3Ma3Pn9+jo33aQT/TdsCGfROncOHyuiQ6mvvKKrTSPfY+zY8MIFCG8X1a6dZdNMbbevhgcGa72zv67XTNjIDJ1n5gM+L9Q5aSEv58HhHWB+qZbTSPPDt/vD+d3h3W06RHp6F5g8PG4/kjHGNInUnvoVf14Jjp97w6axfN9r0XpoPYCXgRedc3Nj2YZIubm5bubMmY1+3uTJWuD2H//QSfwzZuz6WJ8vPF8tGNRaauecoxm10MrOkhK93XyzZsA2b9bjp03TUhtz5+pChJkz4corNXP22GP6+q+/rgsTQlk0CC9AiMys/e1vmokz0SUis5xzufFsw+76ddJnkHcIDP5W98n8cWd4aii0nwrnd4OnN2u9nsu6a1bs4Q1wVld4ZxssPlDrppm2JxH6tTHRYn/WPM65B5xzE4AjgG3AUyKyWETuEJF94ty8BgsEYNQoXam5fr1mvHy7+F8OBUqhf886KzxvraIC8vN1xWifPrp68/HHdTXpd99pQdzXXtPdBULZtFGjwtk053TOWmSAFmpfIKCPhzJtZyZMqWATbaFLwk+3wwWL4ScL4LENUBGA7im6g0DHJA24zusOy8uhRwpc0gMGpulKp8n5cM86KA/CsnL4eLQFaMaY1skyabtvy1jgSWCUc86/p+Ob095m0kJWroR999Xtn554QgMjv1//hZ2zaKGVmampOuy5bZvuD1pTo4HbZ5/psOnIkVoCJDNT66p17qzPLSsLB2vXXQe5uVrgNvR+dYnoxu/33WcFRWMlETIOKUNz3S/emsnrW+GGPtAlGZ7arAHXMTk6TLmwRL+/qS9MLYQfeftrPrAe1lbAzP2hm7dpeofkeP40JhEkQr82JlqivndnYznnngaejtf7i0gycDw69HkU8DlwV7zas7eSk3VXgTff1MDr6KOhb18digyt4ExK0mHNkpJwEdzKyvDWTz4fjB4N3brB+PHh177+et154NA6YXSnTlr7rH9/DdJCAVpSUniOWzCo7//vf9feZ9S0Db1S4e8bYOWBMCBD7/tpVzh2nj52aDbMKtbA/Z51uoBgZCacu1gDt9FZWoLDGGPagoQL0uLF21XgLLRo7nTgJeAy51xpXBu2l/r21ZWZzzyjWa7Fi/XWoYMGWCFZWRqkJSVpUOXzaemMjh11F4KUlJ1f+9JLd//eb76pw6WXXKJz1265ReedLVqkw6a5uRrQmbbHL7pbwMqKcJDmEzi3G3y4XSf5X98bfrkM3iqAt7fpLQkY2w7eHBnX5htjTEzZTI6w24CvgWHOuZOdcy+01AAt5OuvNcjq2FFXWBYU1K6V1rMn5OVp5isQ0A3Wc3J0C6iRI+sP0EAL4GZl6SKB+qxbp0HYc89pBu2BB8JlNzIyLEBry/yidXlKvCzrugr4cBssLNW5aKB7bL66H2w7FJYeAF+NhSXjYfr+VkbDGNO2NHuQJiKpIvKEiKwVkWIRmSMiJ0Q8fpSILBGRMhH5VET61XnukyJSJCKbRaS+HQuiwjn3Q+fcP51z22L1ntF29dUahIUCtIIC+OEPdV/N0A4BIhpUHXmkDm36fBpULd1NZbhrr9WdC448snYdtpCrrtIiuYccojsizJmjWbSzzoLPP4/ez2sSX2GNrtxcXQ4XL4axM+G2VfCX9VoH7b61sN2r59cuCfbJhAnZMCg9vu02xph4aPaFAyKSCdyEzitbB/wIeBHYD63cvxLdTP0t4PfAYc65g7zn3gMcCpwCdAc+BS50zr3frI1sAZq6cKA+c+bASSdpgJWWBlu27HxMaAHB2LEwe/bOj0+ZAj/7GZx/vm72ftllWu6jrtGjwytDQ/LyYONGHW61xQKxlwgTrH375rphz80kvxrKAjA4XeucdUnS3QQyfLrP5udjYIAFZqYBEqFfGxMtMVndKSLz0cn3ndCg62Dv/ky0YO1Y59wSEdkAXOSc+9B7/PfAEOdcmyvSEI0gbeJEHW787judM1ZRoWU6srN1Xtpll2lm7J//1En+S5fCoEG1X6NXLz02OTm8D2d5+c5Do/PnawmPutq3hwMOaNYfyzRQIpzMho3NdSWPzKQ6AJP6w69WQf9UuKo3fFUIL+bBpd2hKAgvWhFa0wCJ0K+NiZaoLxwQkW7APsBC4EpgXugx51ypiKxEN2Tfgm6iPi/i6fOAidFuY1swZw58/LHOQ9u6Vav8p6XpY4WF+u+TT+rKTp9Pg64rr4QPPwy/xpQpuil7KK5/6CEd2rz66p2zaaNGRf9nMi1Pph8OyoEXt8DsEt1VYHYupPt1e6eX8uDbYlhdEe+WGmNM/EV14YBXzuIF4Bnn3BIgCyisc1gh0M57jDqPhx4zTbR2re6zmZ6u2bKcHM2ADRig9192mR6XkqLHPvAAfPqp1lsLufZaDeBCwd3gwVov7V//qr0gwZjdmV8C+7eD1wogTXSeGmgW7bBsDdDSbEmTMcZEL0jzNlR/Dt0PM7TpTwnQvs6h7YFi7zHqPB56zDTRxIm6RdRzz+mQ5znnaKZs1Sq97+23dQP2UaOgd28tn5GTA1dcoc/fulXnkwUCOkwKcPzxOr8tGNT9PY3Zk1XlWu/sH/vAjhpdyXnOYrh1Jdy0Eu7sB2VB3e7JGGPauqgEaSIiwBNAN+A055y3XouFwOiI4zKBQcBC59x2YFPk497XC6PRxlgSkfu9Fa3zReQ1EekQr7a8+qouGHjgAd06SkTrlm3cqMHWihVa0+zvf4cxY3SIdPZsnfQPmmnLyYETT9TvDztMs3GPPVb/Sk/TOu1tny4OwIZKmJwHN/aGgIN5JfDYRjiwHUxcCJ2S4J6B0f4JjDEm8UUrk/YoMAw42TlXHnH/a8BIETlNRNKAO4D53lAowLPAb0QkR0SGApcSx90HmtFHwEjn3ChgGfDreDXkzjt13llpqe6/Gbodc4wuFqiqgjvu0OM+/VSf8+c/w7nnakCXkaGB2ooV+vU33+iOBiOtyGhbs1d9enQWrByvhWr3y4KD2uvQ5sHZ8F2p7tM5x5ujZowxbV2zLxzw6p5dDlQCmyVca+Fy59wLInIa8BDwPPAtuv1SyJ1ogLcWKAfuaw3lN0KrVT3fAKfHqy0iGmTVXY356qta/Pb443V1Z48eumPBCy/oBuihxQWFhbpwID9fV3hWV8Prr+s+oabt2Ns+va0aOqfAb/rBM5vh3VHwXQnMLIab+sARHXQHAmOMMVEI0pxza9G9j3f1+MfA0F08Vglc7N1aq4uBl+t7QEQuAy4D6Nu3byzbRFaWbiE1YYKu1OzXD26+GY47TjdRHzBAFxfcfruW4Kiq0nppxxwDQ4bEtKkm8eyyT0Ptfu3v1pfpRbqxepE3PL5flt6MMcbUZnt3NhMR+RgtwFvX7c65N7xjbgdq0BWvO3HOPQ48DlonLUpNrdeSJVpuY9kynaO2dKmW5Jg7V3ckyMzUW/fuemzv3vDBB/DEE7ri07Q+zdGnoXa/7jMq1z22UUtvHN8xCo02xphWxIK0ZuKcO3p3j4vIBcBJwFEuFhWEG+nuu+G663QD9q5dtTxHIAAXXaRZsx494JprNGs2cqQOc65ZA88/DxdeGO/Wm2iIRp+ucvBOgc49e2Sf5milMca0XpYDiQEROR64BTjFOVcW7/bUFcqiXXWVrvBctw42b9bAbO5czZStXasFcOfOhS5ddNunQYPg1lttVWdbtLd9ekeNLhaYNg6y7RLRGGN2y4K02HgILcr7kYjMFZHH4t2gSHffrXPRPvtM66U98ghceqkGY6eeqhm1Tp10uHPUKA3QevbUoc/iYpg8Od4/gYmDverTAjw9VHceMMYYs3t2LRsDzrnB8W7D7nTrplmyf/0rfF8goLXR7rhDv7/pJs2arVqlNdVCkpN1e6jzz49tm0187W2fHpoBOcnN3RpjjGmdLEgz/OUvtb8PBuHBB7VMx7hxet8tt8D06TpfLbQLQUjnzrFpp2n5kqy8hjHGNJgFaaaW5cvhhz/UDFlNje46ELJjhxbBffTR+LXPGGOMaSssSDO13H23Bmcnnqh7e9bVvu7Oq8YYY4yJCgvSzP8sXw7vvqsLCA47DH7/ex3eNMYYY0zs2epO8z9336210EaMgLPPhvvvb/xrbNgA116rW0cZY4wxZu9ZkGaAcBbtmmv0+1tv1d0E8vIa9zr33KOLDt55p/nbaIwxxrQlFqQZQLNoP/+5Fq4tLobsbPjxjxuXTVu/XmumPfIITJpk2TRjjDGmKSxIMwB8+aXWO+vZM3x75RX4/POGv8a998Ill+hG7NXVlk0zxhhjmsIWDhhAi9Q2RSiLtmSJZuPuvFOzaSeeCGK1sYwxxphGs0ya2UkwCIcfDv/9b8OfE8qihVaDTpxo2TRjjDGmKSyTZnYyZQosWAC/+Q1Mm7bnTFhJCfzznzB0KHz7bfj+7dvhr3+Fk06KbnuNMcaY1siCNFNLMAh33QXPPaf7dX70ERx77O6fk5kJU6dCefnOj/XoEZ12GmOMMa2dBWmmlilToF07+NGPdJXnpElwzDG7z6aJwAEHxKyJxhhjTJtgc9LM/4SyaJMmaeB1xhm6X+dHH8W7ZcYYY0zbY0Ga+Z9QFu244/R7vx/uuKP+mmcFBTFvnjHGGNOm2HCn+Z/XXtOJ/35/+D7nIClJt3vq3VvvmzoVjjoKli2Dfv3i01ZjjDGmtbNMmvmfyZMhEICamvAtEIDKynCABppZGzJEt4AyxhhjTHRYkGb+R0QL0dZ3C/nySy18+9FHuiPB2rXxa68xxhjTmlmQZhrlrru0flqPHnD55ZZNM8YYY6LFgjTTYKEs2nnn6fc33GDZNGOMMSZaLEgzDXbXXbrd05w5MH26BmxHHWXZNGOMMSYabHWnaRDn9DZ1qt4ipafv/rlHHw1btsB330WvfcYYY0xrY0GaaRAR+OSTxj9vx47w82bPhnHjmrddxhhjTGtlw50mqsaP1wDP79ftpYwxxhjTMBakmajZsUML3k6cCLfdBtu2aTbNGGOMMXtmQZqJmlAW7dVX4Xe/s2yaMcYY0xgWpJmoCGXRjj8+vO+nZdOMMcaYhrOFAzEgIr8HTgWCQB5woXNuY3xbFV3HHqv/vvee7lggEn7s1FPh++/j0y7TPNpinzbGmFizTFps3O+cG+WcGwO8DdwR7wZF28MPQ9++0K6d3o4+Woc6jz0WHn003q0zzaDN9WljjIk1y6TFgHOuKOLbTMDFqy2x0qcPFBfD4sVw0knwy19qBs20Dm2xTxtjTKxZJi1GROQPIvI9cA5tIOtw//26fVSvXnDnnTBpUnhummkd2lqfNsaYWLMgrZmIyMcisqCe26kAzrnbnXN9gBeAq3bxGpeJyEwRmZmfnx/L5jerzZvhqafgllv0+1AG7c0349cm03jN0ae912kV/doYY2JNnKU3YkpE+gHvOOdG7u643NxcN3PmzBi1qnndcAMsXAi3367ZMxFdQPDee7qyM3IRgYkdEZnlnMuNwus2qE9Dy+7XJjFFq18bkwhsTloMiMgQ59xy79tTgCXxbE+0BQJQWgpXXaWbsI8erYFZ165QXg4ZGfFuoWmqttanjTEmHixIi417RWRftFzBWuCKOLcnqv72N/13wACoroaf/xwuvDCuTTLNr031aWOMiQcL0mLAOXdavNsQa2+8AevWQWamLhw491xIst7WarTFPm2MMbFmCwdMVFxzDRx+OFx/PVRVwfPPx7tFxhhjTMtiQZppdm+8AevXw7/+BdddB2Vlmk2rqYl3y4wxxpiWw4I00+yuuQYOO0znpLVvr4FaZaVl04wxxpjGsFlCplktXapz0datA7+/9mOTJ9sCAmOMMaahLEgzzWrx4l0vEJg4MbZtMcYYY1oyC9JMs5o4UctuGGOMMaZpbE6aMcYYY0wCsiDNGGOMMSYBWZBmjDHGGJOAbIP1BCUi+eh2O03VGdjaDK8TTS2hjdDy29nPOdcl1o2J1Ez9uqX/PySSltBG2H07496vjYkWC9JaORGZ6ZzLjXc7dqcltBGsnYmipfx8LaGdLaGN0HLaaUxzs+FOY4wxxpgEZEGaMcYYY0wCsiCt9Xs83g1ogJbQRrB2JoqW8vO1hHa2hDZCy2mnMc3K5qQZY4wxxiQgy6QZY4wxxiQgC9KMMcYYYxKQBWmtnIjcLyJLRGS+iLwmIh3i3ab6iMgZIrJQRIIiklBL7UXkeBFZKiIrROTWeLenPiLypIjkiciCeLclFqxfN531a2MSnwVprd9HwEjn3ChgGfDrOLdnVxYAPwG+iHdDIomIH3gYOAEYDpwlIsPj26p6PQ0cH+9GxJD16yawfm1My2BBWivnnPvQOVfjffsN0Due7dkV59xi59zSeLejHgcCK5xzq5xzVcBLwKlxbtNOnHNfANvi3Y5YsX7dZNavjWkBLEhrWy4G3ot3I1qYXsD3Ed+v9+4zicP6deNZvzamBUiKdwNM04nIx0D3eh663Tn3hnfM7UAN8EIs2xapIe1MQFLPfVa3JgasX0eV9WtjWgAL0loB59zRu3tcRC4ATgKOcnEsjLendiao9UCfiO97Axvj1JY2xfp1VFm/NqYFsOHOVk5EjgduAU5xzpXFuz0t0AxgiIgMEJEU4EzgzTi3qc2zft1k1q+NaQEsSGv9HgLaAR+JyFwReSzeDaqPiPxYRNYDE4B3ROSDeLcJwJucfhXwAbAYeMU5tzC+rdqZiLwIfA3sKyLrReSSeLcpyqxfN4H1a2NaBtsWyhhjjDEmAVkmzRhjjDEmAVmQZowxxhiTgCxIM8YYY4xJQBakGWOMMcYkIAvSjDHGGGMSkAVpJmGJyGciclyd+64TkUdE5H0R2SEib9d5/EuvJMNcEdkoIq/HttXG7N7e9OuI4x4UkZLYtNQYE2+244BJZC+iRTYja0udCdwEpAAZwOWRT3DOHRb6WkT+AyTqtjym7Wp0vwYQkVygQywaaIxJDJZJM4lsCnCSiKQCiEh/oCcw1Tn3CVC8qyeKSDvgSMAyaSbRNLpfi4gfuB+4OXbNNMbEmwVpJmE55wqA6cDx3l1nAi83cJ/GHwOfOOeKotU+Y/bGXvbrq4A3nXObot0+Y0zisCDNJLrQ0BDevy828HlnNeJYY2Ktwf1aRHoCZwAPxqBdxpgEYkGaSXSvA0eJyDgg3Tk3e09PEJFOwIHAO9FunDF7qTH9eiwwGFghImuADBFZEYM2GmPizBYOmITmnCsRkc+AJ2l4ZuwM4G3nXEXUGmZMEzSmXzvn3gG6h74XkRLn3ODottAYkwgsk2ZagheB0cBLoTtE5Evg32g2Yn2dkgaNkxw+FQAAIABJREFUGRY1Jl4a26+NMW2MNGwOtjHGGGOMiSXLpBljjDHGJCAL0owxxhhjEpAFacYYY4wxCciCNGOMMcaYBGRBmjHGGGNMArIgzRhjjDEmAVmQZowxxhiTgCxIM8YYY4xJQBakGWOMMcYkIAvSjDHGGGMSkAVpxhhjjDEJyII0Y4wxxpgEZEGaiTkR+YGIrI93O+ojImtE5Oh4t8MYY4yxIK0NEpHPRGS7iKQ28Pj+IuJEJCnabfPez4lIqYiUeLcdsXjfPRGRp0WkSkSKvdsCEblHRLIb8RoWBLZhInKmiHzr9e887+tfiIhEHDPJ+wwcWOe5F3r331Tn/vUi8oMY/QjGmBiyIK2NEZH+wGGAA06Ja2N2b7RzLsu7dajvgFgFjXX8yTnXDugCXAQcBEwTkcw4tMW0ICJyA/AAcD/QHegGXAEcAqR4xwhwHrANuKCel9kG3CIi7WPRZmNMfFmQ1vacD3wDPE2dk4CIpIvIX0RkrYgUishUEUkHvvAO2eFltiZ4V/vPRzy3VrZNRC4SkcVexmmViFze1IaHhklF5BYR2Qw8JSI5IvK2iOR72cG3RaR3xHNqZa7qafd53s9bICK3N7QtzrkK59wMNNDthAZsiMggEfmv93pbReQFEengPfYc0Bd4y/s93uzd/28R2ez9zr8QkRFN+02ZRONlW38H/MI5N8U5V+zUHOfcOc65Su/Qw4CewLXAmSKSUuelFgNfA7+KWeONMXFjQVrbcz7wgnc7TkS6RTz2Z2B/4GCgI3AzEAQO9x7v4GW2vm7A++QBJwHt0QDmryIyrhna391rWz/gMrQPP+V93xcoBx5qyAuJyHDgUTRz0RMNtnrv9kl1OOeKgY/QkyuAAPd4rzcM6ANM8o49D1gHnOz9Hv/kPec9YAjQFZiN/t+Y1mUCkAq8sYfjLgDeAl72vj+pnmN+C/xKRDo2X/OMMYnIgrQ2REQORYOZV5xzs4CVwNneYz7gYuBa59wG51zAOfdVxBV+ozjn3nHOrfSyBZ8DHxIOZBpitojs8G5/j7g/CNzpnKt0zpU75wqcc/9xzpV5AdMfgCMa+B6nA287577wfs7feq/fWBvRwBHn3Arn3Ede+/KB/9tTe5xzT3qZlUo0oBvdmHlupkXoDGx1ztWE7hCRr7z+XS4ih4tIBnAGMNk5Vw1MoZ4hT+fcXPTzdEuM2m6MiRML0tqWC4APnXNbve8nEz4JdAbS0MCtyUTkBBH5RkS2eRP/f+S9R0ONc8518G7XRNyf75yriHifDBH5hzdkWYQOzXYQEX8D3qMn8H3oG+dcKVDQiDaG9ELnCiEiXUXkJRHZ4LXneXbzc4uIX0TuFZGV3vFrvIca87syia8A6Bw5j9I5d7A337IA/Vv8Y6AGeNc75AXgBBHpUs/r3QFcKSLdo9tsY0w8WZDWRnhzy34KHOHNf9qMzmsZLSKjga1ABTConqe7eu4rBTIivv/fyUJ01eh/0OHTbt6J6F10KLCp6rblBmBfYLxzrj3hodnQe+2yncAmdDgy1O4MdMizwUQkCzga+NK76x6vjaO89pxL7Z+7bvvPBk71XiMb6F+n/aZ1+BqoRP+vd+UCIAtY530+/w0kA2fVPdA5twR4Fbit+ZtqjEkUFqS1HROBADAcGOPdhqHBxfnOuSDwJPB/ItLTy/BM8AKufHQYcGDE680FDheRvt7Q3K8jHktB59/kAzUicgJwbJR+rnboPLQd3hydO+s8PhedgJ0sIrnoEGfIFOAkETnUm6D9Oxr4mRCRVBHZH3gd2I7Oiwu1p8RrTy/gpjpP3ULt32M79ORdgAaTf2zI+5uWxTm3A7gLeERETheRLBHxicgYIBPNxh6FzkELfT5HA/dR/ypPvNe7CKh39bMxpuWzIK3tuAB4yjm3zjm3OXRDJ9mf4w3D3Ah8B8xAh+/uA3zOuTJ0rtc0bw7NQc65j9DJzfOBWcDboTfy5oZdA7yCBjBnA29G6ef6G5COZgK/Ad6v8/hv0ezgdvSkNjminQuBX3r3bfKO2VOR3ZtFpBj9/TyL/uwHe0OleO8xDigE3kGzHZHuAX7j/R5v9F5jLbABWOT9DKYV8haKXI8uyMlDA/Z/oHPLBgFznXMf1vl8/h0YJSIj63m91cBzaJBnjGmFxLn6RrKMMcYYY0w8WSbNGGOMMSYBWZBmjDHGGJOALEgzxhhjjElAFqQZY4wxxiSgeGxQbRqgc+fOrn///vFuhmlFZs2atdU5V19h1Jixfm2aWyL0a2OixYK0BNW/f39mzpwZ72aYVkRE1sa7DdavTXNLhH5tTLTYcKcxxhhjTAKyIM2YPdhQCfNKoHJvtl43xhhj9pINdxqzC9uq4cIlMK0QuqfA1mq4dyBc1CPeLTPGGNMWWJBmzC6ctxgGpcMrwyHVB9+VwIkLYGA6HGG7JRpjjIkyG+40ph5rymFGMVzRE85aDKlfwKFzYWAaPLin3T2NMcaYZmBBmjH12FINPVPg2HlwSHvYegg8vg/kV8EH2yBo89OMMcZEmQ13GlOPkZmwrByOy4ELusMP54LzHisLwgGz4fMxkGWfIGOMMVFimTTTZjkHXxXC05tgZlHtxzL9MDYLphbCxAUwOB0Obg/lDn7SGVIEJq2JS7ONMca0EZYHMG3Sjmo4ZQFsqYLx7eF3a2FoBkwZARl+PeanXeHVfA3UxmZBhyQ4vTM8tEFXe/7dm5u2oRLWVsKYLPhVbxiSEb+fyxhjTOthQZppk25cCcMy4LMx4BOoCcLPFsHPl8J53WBdBWT4YGW5Hv/LXnDDSj0OoMZBAHhgPQzOgH8MgU92wCFz4L+jYWRW3H40Y5os4GB+CaT4YHgGiOz5OVVBWFCqFzMD06PfRmPaAgvSTKuwsRIe2wBfFkHnJLiuNxyyizIZQQcv5sHqgzRAA5icB5/ugB018FIeJAnkJEFRtc4JuHgppABpflg7Hm5fDU9t1uDtsx06T+2uAfqcSWtgysgY/eDG1FEe0H6duovJLPlV8NwWWF0ByWif7p4MQzO15MzCUvjFMsjyQ2FA+/+v+8JZXeGNAs0+H95Bh/9DwdsreXDtCuiUpPUEh2bA88Ogd1qsfmpjWicL0kyLN7MIjpuvOwL0TdMs2BFz4bQuMHk4+COyAM5plqDKQTt/+Pm3rILb+8KNq3S+WZoP8qvDiwUAqgAJwoBvoSIIQeD1rXBuN/iiEI7vBKd3gXvXxfCHN8azpBSuWQFf7NAg7ZRO8MAQ6Jai/f7tAi0f80UhjM6CrVW6irnKQbUDQQOyAHpi2OC9rh8NwK5ZAcfkwKgsuGiJvsaLw+C7UrhmOby9H+S2h+og3LMOfrIQvh3XsCycMaZ+tnDAtHhXL9er/r8PgUUHwsqD4Jc94fMd8OxmPWZeCYydCSlfQPoX0DEJ/ugFU09shmt76dAlwOsjYcY4DdD8Ee/jAyqdZs1CFTi2VsPzW3SOGsDKCj0p7srqcnh+M3y4TYNFY5rD9mo4ah6c3Al2HAqbDoY+aXDCfM0c37Yarl8JM0qgaxLMLoZVlVAa1AANtL8HvNerQYM2vPuC3uOzivWz8t0BmpF7cjM8uAFys+C3a+CQ2fD7tXBlT92xY3ZJbH8PxrQ24pydKRJRbm6umzlzZrybkfB2VEOvr6Fzcu3hy2VlMGE2dE2GTsnwVREMSINremmm4ZMdUByAM7vCkjLNnk0vht4psPZgOG4efLhdg7TAblug0rzjqr3vx2TCPQM1uwaaybhhpQaNR+fAqgodWn1nv9gtNBCRWc653Ni8W/2sX0fHg+vh6yLNHIc4B7mzdOj/qmUajJU38c+9H+idCosPhHcLNLu2qUovYFIEzuwG4mBaEfRIhZv6wI867f41a4Ka7d7bjFsi9GtjosUyaTEkIseLyFIRWSEit8a7Pa1Bik9PRskSvvIH2FylQVheNawo18nM1UEN4v6zH1zcA/qm6hBpuk+P7Z2qQ5yHztYADcIZsz2pIByggU6g/tlC+Nx7nZe9OW8rxsNLI3TBQp9UGD4Dsr6AMxaGFym0JNanE8PKcshtV/s+Eb3vza1QEoSKZrgeDwCbKuH+7+HxTTo/LZSB8wl8tA0mZMOIDPimCA5ot+vXeiUPRkzX3Tz6fwMPrdfPsjEmzIK0GBERP/AwcAIwHDhLRIbv/llmTzL8OsSztRreK9D7KoNw+RK9sr+uNxQFdK7Y52PhjjVQWAM/76H3907VjFd+tQ7PlDuYVxx+/b09Z9QARUE4+Tu4cDGcvwSWlukK0uVl8NOFmv3rnAyfjIH9s+CIOVBQvceXThjWpxPH6Cz4ZHvt+wJOLwwWlOrFRpdGzkDeVWKrCrhzjV7IBIB0dA5beRDWV8Gly+DVAn3/70o02x0a2l9drgt8zl4EFyzRLNy4LLi0B/xzE/x9Q+33qgjAfevggFl6u2+d3mdMW2FBWuwcCKxwzq1yzlUBLwGnxrlNrcLD+0CvVK17Nugb6DQVllfoPLHLe+oxXxZq5mpsll7hF1RrGY1t1Ro4XdET2ifpiamkGa/mi4PwzBbo7NMiuDOLYdR0+LxQ5wUVVMNfv9eg8agceGpT8713DFifThA/66rZtOtXwKpyXaF59iJdtbnUy9Dm1TTuNRv6MShHV0NHZp1T0MDsqPnQbZp+Jod8A6Nm6DDpS3maNXtkCNw9EF7IgxM6wp/W6Rw60H9PXaB1Cv88SG/TCvVzHrSMm2kjLEiLnV7A9xHfr/fuM03UOQUWHACvjdBhz2oH2Um6eu2sRZqlWl0OyV9ouYzHNsC5i7Xzv1Wg2zvdNUCDtBnjai8WaKxdZR+2B/VkOX9/qERPYLf21QUMHZN1NepB7WFhWRPePPasTyeIDL9miksDOg/twFk6x3JJ6d5ngxuj7lBqFRq0pYl+3S1F52FmJ8E3xZAu8NJwndN2eLYu1nlqs2a5S7xM2X+3w8YqfeyIDnp7baROZaibNTSmtbIgLXbqO3/X+tMmIpeJyEwRmZmfnx+jZrUORQGtfba9Bu4eoCeArCTI9sO3RdAzWctqALxeoBm0+wdqyYDQxP190+H9bXpyydzLT0bdE6IPyPZp4DivGI6Yp/dVBLXsxxND4ZF9NMv3Yp7Wl9qVwhqYUQSbK/eubVGwxz4N1q9jpVuKLpLplqLlN/IqIT8GQ4O7+6hURAxzntQJUkXruOUk62diZCZ8tB32zfA+K0m6Uhvg22J9TmQJHb/AiZ30M21MW2BBWuysB/pEfN8b2Bh5gHPucedcrnMut0uXLjFtXDysrYAbVsDRc+HypTp/ZW+sKof9ZsAH22BImhaTPX4+tPdrbagawOfTQOhvg+G7XC3gmZVUew7YTX21xll7vwZRzfHhCKKBVwD440DNEgia8Tu8g24lVRrQAHJ2MVzcfefXcA7uXK2Tq69YposNLlisJ7s422OfhrbXr+NlTTk8vhFeGKaBz6T+sXnfXS2uicxIV6Pz41ZVamBW7TSLVlgDhdXwnzxd5PPrvuEV2r1SYHHpzq+7pEynNxjTFliQFjszgCEiMkBEUoAzgTfj3Ka4WVwK42fpXJab+0L/NK3z9GmdYYySGp1fU1TPfJotVVqr6caV8IueeiL4phgObA9D02FJud7ngMOyYdEBcE1v3bJpVKau+NxRA5O36Osdkg0/7aJZuVBtqKYQ9EQ1p1S3mApVcA+g20hl+mD4dOgwVeeqXdMbutRTY+1fmzTYXHQAzMqFdQdprbbrVmigW93Uhu4969MJ5LMdcFxHrQ94ehddTBMPkfXVQt9n+HR3gxTRz/zWat0Hd14JXL4cLlwKx+XAVb3Dr3N6F5hRDE9s0ukBAQdPboLpRXCGxfqmjbAdB2LEOVcjIlcBH6Dn7iedcwvj3Ky4mbQGbuwDN/bV74/tqHtp3rhSA5Gg0xVkD2/Q4ZvNVTq5/t6BeiV9+VKdvxVwmp26qIuW0eiSpPNgvvdqN4XmqK2s0Krp1/fR4GhuCYzI1Pkupy7QQrZdU+DLHZrVOiYH3iyo3WahcfN7nPecKm+Hg18s1/uy/Pp1mlf6wwEbqmBhiQZcyXUunR7bCPcP0rpToNv9dEzSEghT8nTOz6md4KEh0CG5sf8Te8/6dGLZVKVFkqcWajC0vio279svBdZGvFfkZyRZdIFOv1RY7C1gmFqkJ56HNurwZ5pPF+78rn/t181Kgg9GwSVL4der9L7+afD+KH3MmLbAitkmqNZe9LPbNJidW3vYIugg60vYcrBmj17Og/+M1GO2VGndsUOzdYLxud3g9XzdAWBtxBytFDTIqXF681G7flk7n5bZGJyuQ4urK3T4pU+qBlJHZGshztMXatQRdPr8n3WBf+fvPrvmR09Qkcf40DpslUFI8sFVPb0hWAcrKnZ+DUEXL+zfPnxf76/g2aG6YrVjkp6EF5fCf3doAJsisL5Sh3A/Ha3z7OqTCEU/W3u/jofKILyWr8WSy4Lw447wVF5s3ntImvbL+tQtBN3er5uwd02Bnin6ud0nHT4avfOFSV3rvPfoW89eoInQr42JFhvuNHHROTn8hzdkc5Veeaf5NIP20JBwENctBa7uBX/6XrNnj2yAG/roEEqkKnSrm4AL7wAQqoYOWhJjRIbOY3t+i84Xe2azrvb8bIfWSzu1M5zcUeeL1QDtffDyHgI00NWhOfUsDS0PQs9UHeZ5fxv8dRB8XxkeFhqZocFj72QN8sbPhqD3Zs5p5u3UhbpTwkMbdBueUFYvr8qbPyeaUZwwG17a0oD/ANPifV2ohZczv4CzFsPwDDiva/QDtPSIr0MXGinA4NTaK0mC6Gf5nC76uQ44uLM/rDkIvhoHHfxaDqeyAcP1fdPqD9CMae0sSDNxcVlPHdoMTdwvC+hw5AXd9ap6Y5Wu+AINVCat1uKX1Q5yknQ+2ZXLYUUlZNXTiyOnsOUkwSvDdRjUD8wrBZ/T8gR/XKNFZrum6FyeI+bCC1vgil46kTnDp0GfHz0BJQH9U2FQqmYG2vv0sX6pcGUP2BHQ4dRsv86Dqz5CdxfomKyrR9dVwl1rNWvn0DblV+vPNXmEvkcAeNoLtN4s0CCsMgBT8nXumgM+3KH/3jNQs4lHdtCyBjVoRuXB9c3+X2YSyPIyrRf2i17hWoAf74AHd1q20Xihfl536W7oM1BOePXzEe2hR7IOQ6b49WLqxWHwyCA4uyt0SoLJ+ZqRvmsA3NpPd0IQgf0yvf1xbQN2Y3bJgjQTF1f30rpgg7/VTZn7fq3ZrvsG6uMT2sNrW/WP+4VL4O61el+2T+uN5VeHM1sle7gSL6jRVZHpEb29As265dfAo0Ogk18rs2f74aLFeuK4qpcGiAE0E5CTpF+n+WBjpc4nqwyGh3Tu+d7L3jkY316P8Qm082sQurZS9xLdXhP+4P2sqwamiAaNofPVEm9V2/NbdPXnDX10Tl7viOFhP3DrKs3UbavRTB7oUNLv1zYsQ2FahqCD367WaQJZX8Chc+DEHF0d/UgzBGaRIgOnO/qG7w/189/01dIYAF8UwcB0nfO5tEw/I3etha9K4Nlh8P0EeHAwDErXshmhnQeqgvDeNr2QSW9KYUJjWjkL0kxc+AT+MhiWHwh39dfCrjtqYNxMvQ1Mg1+tgMuW6tL9zsmaATsyRwOmykZOpdxcDdsDO2+WHkQzdN+W6MlpRYUOkf5onu5YsP5gzRoEnFZv3z9Lg8Qq9EQ2KEMDqzv7wfxcGJ2pm0rPLg7vK/rABg0QJ7TXoqObq3Szd9Cs3TObNOC6ZoVOpAY4pbP+u7ZCs3z3DNIVrFur4ff9w20X9D1WlGtQKcCiMm3z97uYK2RanlO/0/IwJ3fSC5niGngmT/t1U+X4a2ejk9HP1ymd4B8RO2Cki34Ox7fXeZWd/VomY0axlpfZfLBePE0ZruV03irQjNkF3TXI/LYIhnwLpy+ArtP0sWeHNb39xrRmFqSZuAk4nYx/2kK4fbVeWS8rh2Vl8OwWncP18Xa96i6s0eHEj3fA9c1c0z50nhPCc8qq0Fpk26rhlr4wPht+2RsGpMOh7eHgbA2WFh6owzZ3rtX5bVf30q1tCmp0FWr6F/DsZp1P8+xQnV9270AY4208XQNsqdEh1SC6qCEnCQ7toI8PSNM2/OQ7GDtLfw+vb/UWNRAe1u2Woptep3on0vxqDe5My7eqTD8bLw6Df+wLr+ZrP2ku2wOajQ7N7wx9Ht4ogC3eN350iD6/Gm5bDcMzoczBD3M0O3xzX93549BsLX1zWU94Y6s+NysJpo6DkzpqRnlqoc773HywztU0xuyaBWkmLhaW6l5+v1oRLlPRwa/ZrNKgfp9XrWUFDm6v9z/obb58X5TmW/VP0RppPrRNNcD+szRQGpcFt63S7MCcUjignRa/BXh/Px3a7Pm1zpNL8unxR2Xr0Oj/DYI399PyGKd30ZPfi8Og+FDoG7HyocppZmLtQeH7Lumhv4dpRXBgOxjXToM/gAwJP29NuW48Pa6dDsMelRMe/jQtU9D7f/37es0A/3I5pH0O/y2MzvvtKil3dAftxwH0szAqUzNlR+XoRUOo+GzQwfwSnZ9WHgwv1gG9iHh4X9h8iN6eGWbDnMY0hAVpJuaCDk5bAOd11z/6xV4V/u2BneuQVTl4rQCu6aWFXosD9a+gbIxdzVNeXaUnohEZOrQJOmx4wRINEAsDOok/Ca32P6NIhxRvWOUFlwE4MhuWHggzc+GjsfDOKH1uqNLNL3rph270TPjDOtg/WzMRT+2rmYX1B0O7iOAq6LQNA9J0+HR5uQ75dkiCiV10WDi02CDVD/NLtY3P2TBSi/Z/67QczYBv4QFvzlledThzGo1yeBLxb49krTmY5dPs9WBvSee+6XrR9OVYXW383+1QWgNvbtXiyr1ToU+Krs4emwX/3KjH2IboxuwdC9JMzE0v0vlcB7XTUhhBagdn9/SHwd6cLUFXQD64AQZ6QyPbmrgd0u7OF0PTYU0FbPLSCiMytX1+dEI+6ArORzfBIXN0q6bX8nRotiyoqy7PWhQOyg5ur3PQir02p/rg7f3gQW/HgZM6wYrxcGEPzTbUtboCzuiq+xXmVenJbnqR7o7wfYXWj+ucrFsA/bSLDne+MMyyaC3ZlDy4cZUGSjn+8DzFSM29mUAK4c+FA/48SP8NlcD5zlvUsrUG/rhOS9ak+aB7qu7POXEB/Cdfhy+Hz9ALmfu+1xIb16+AA2Zp/zXGNI79KTcxVxLQkhRT8muXysgCSoHfrtGreNATRdcUXSHpi8ElxZLy8Nc+4JMd0NGv7dxYpQFj0OmctdDiyQrg2p6aMXDA10Vwy0r402Cd0J/hh8yI7J+IZsOOzNlze8ZkwV/Xw+P7wm/763u/uVWHvoak644J1U73auyXBjP213lzpuW6bTWMb6dbnEWbH+2P1V6EloRmZS9frnMuL+gOXabpxcqSMrirHwzO0IuHaYVaRHlYBvy4s26IXlwDnZN0B41XhuvQv3Nw8yq4Zjm8NCL6P5MxrYkFaSbqnNMTQciEbFhUCrOK9Gq8zIt2Qvur1wCbIqK3dZW6orO+ekrJNH9WAXQ+jc9pALY9EN4sukOSTrKuisjmdU2Cvw3RHRJuXKmLAx7dpCUzLl6qQ7V7WwvqoPZavuBnCzVIS/fpTgPZSfDeqPC8nhM6NeGHNXG1uVJXbr5VoBPziwOwPEbv3T8VVldqJthHeNV0WQD+sBbuXqMXHt9Xwjld4aV8rct23mI4q6tmit/ZBk9uhi/G6JSEQd/AqyM1QAP97P+mH/T8SudNptlcNGMazIY7TdQ8vQl6TAPf59BpKvx5nQZsmd4Kx3KnxWK71HOpEPo7/r/Nmp0GdnU1Z4AWKuIJOvQY2m0qVcIZv96pGpRF2lqjQ48/6wqnddZMYWkADpoDx3SE2/o1oU0Cr47Q1XSnLdBN6IMOPh9jE69bgznFMHKGlrrokaJFi2NpZaVmhAOudlmbILpCuSCgX/8gW6cBLCmFK5fBo/vAA0Pg5z3htZHwww4aaILW58uq0zfTfRrs1djcNGMaxYI006ymF8EvlsEBMzWLNDILHttHayv9erVurP75Dpi8RbNV22o0yKk7ETp0rgr9TY8sNxENfnS7pmSfTo7eWKn7eSahc25CZpbAqsraK9f8aGZhcSl8XqjzezJ9WgPuzv7h1W97K90PvxsAKw7S4qAP7aMZC9PyXb1ch/4DQfiqSIfR4yE0dJ9K+KTg0EUAAN28jFtRQPv5kR1qP/+yHvDuNv365M66cCDSk5u0TqBtjG5M41iQZprNPzfqBOK+qbC4TFdIVgfhgm7w7ii4ogf8cS1ctESv2g9tryeHZKk/IzbIWzwQGcA1xw4yXerJQAWA61Zqe/O8IGxDpU7Ar+9D4iICt2rgi0KdML2gBEZl6VyeThZImXrkV8HMIl0d/FWRrtitZvcLWppTqFumSO1dOACCAodnwxHZevGRX63HTSvUQs5fjNHs7p/W1X7e9prwVlGT+sM7BXDKd1o+5ILFugvBA4Mb39ZFpfDlDiiJ5hWaMQnMgjTTLIpq4JZVOgx3c1+tdfblWD3xnPgdDPxG/3DXAO/uBz/pHF6BWFXn7JTt9cotXlqhhnBw1hwnsvzdDCnVAMOz4PSuOk/nszHwu/5wfpfawWLoJa7vqceBlisIoLXK/rIXJyTTulUF4YqlsM90uHQpHDg73J9j+Ye4Cu2r6ydoBi/03kloFvmrIjigPTy5r2aRB6bB/YPglRHQNVUXNTy1OXyhUhaAu9ZoSR3QqQKzc3XHguXluvhlwQGaVW+o9RVw8Gw4fr4uOuj3DTy2Yc/PM6a1seSzaRZfFeof4yEZ+sfbjwZly8t13tk7+8GtK3XI5LJlWuz1kx3h5/sID7kUhhYSeP/GKsOQjAZZ04pgbolOzq9xWlB23+m129ohSRc9dEnVnzVZdPeB8xfDSZ211IYxANtAgxZ0AAAgAElEQVSr4Xdr4KU87eMXdtNFApGaa5tVYdefl1CXDKJD8MWB2k+oARaWaVD27zxY5gVVr4+AfSMCrF/2gp8vhf1mwMhM3bbtR53glz3Dx2T6db7a3vrpIjjBm8/pF91Q/qh5Ojfz8A57fr4xrYWdSkyTzSmGD7ZpQFZWo8MhJ3TSWk/J6BZKm6vgg+36/TdFuuR/aMaeXztyZLJbEy4pOiVp3am6fOiVihAeckoRrZheFdRaaAfNhk7JOrEb9CRXWKP7aH5dpEVmk0S3fqpyteermbZtTTn0+VoXBuRV6+1vG3RD8mjwo306w6cLXrL9cFYX7btdksMBXMckLahc5bSsTOhE8MVoLaexrUaLLGf54PVt4dcvC+iG7n8aqHNNT+msGfOnhoZXczbVghKdahAK0EAv/m7uA//ctPvnGtPaWJBm9lrAwYWL4dQFOiclrwq6fa1B2wvDdI7KuirdRPy4+Zp9CqBz1TZVaSYqZFdX/5EFXrc0cl5KZIBXUBPehzBS0LuF4j/nfR1EJzqf0UVPWFl+3XEg06cnvGt66XDtnBLdJueMrvDhNl0Bd4Rd6Rs0o3zKAi0I+87I5suWRfKjFxjtfZrNrUHfpyIIx+RoGZisJF19WeO0WHO6D9ZWhoc6U7znJgOzSzXDNioT3iyAh/eBZzZrMdrzFsOgb3X3gct76f6yZ3eDfRpwsdUYBTW6irpu2Zq+aXphZExbYsOdZq89u1k3RL+ypy6/H5wOS8t0v8tD2ms2ar9MvQIekg7Hf6cB2voq2FwQLqAJ4SDNT3i+lx8tIFtXmtRecbkrdRNaoZ0DXMQt9N6hY7O8E1ZZAN7fDh+M0gnPayq0WGxRAA6aBZPzvPZVagHPDD+csxj+M0JPeqbtKg/A7avhiY1QFNTA6JQFzf8+oc9BlpfJrQ5qAFYR1OH2j7fplIJOyVoSJui0WPOT+8KvVkK/VA3I1lXo8Vf1hoc2aKHaozpoaY2Ds7W0zH3r9LP9p4FwbrfadQ+b27gsXXi0slxrBIZM3qLBpjFtiZ1OzF57OU+X4v9xnQ4NrizXAMuh28jM3F+HOQFmFGsBWJ94qzld/Ss1QwFa92TNQGRHVupH65Cl+RrecSPfIwnNcnVMqh2YOXQydRLw8nANxEak65DR79fqxuhj2kFOsgZqaybAXwdrUc+/DtbyGBd2h9Xjbb6MgQuXaODz6Rjt6+394fmVzSkAHNJOi0GHtkrzoRdFlUEtxFwehJqAFp3tmaxDoCsrYNpYLSo9IA3O6qaT/OeUaOZ4Ua6WzLliGYycrsO17xZATpIWuD1uvl7EREu7JPj9ADhyLjyyAd7aqlutLSzTz5wxbYk4F6tp2aYxcnNz3cyZM+PdjN06aq5edZcGdOumycN0dViHqXqF/7v+cHxHuGo5TPe2uMnw6cpPP7rnZH1DkKBzyArqGd5M8rZl2tM5L8OnbcqrCu9oEGl3E6z9wGldNBh8dSsUH7aHN2shRGSWcy43nm1oCf26KVaV6xzGdQdpdqrHVzpE11wxTWSmGWovuAk97vOOCXpf90jRgtHFQc2KvZgHqw6q//VrgvCDuTrc+Ks+cMMKLcMxKF0X/wTR3S+GZ+rFSTR9uh3+tQkKquGHOXBFT13MU1ci9GtjosUyaWavTewMayt08vEzQ3UPv4uXaIDm0D04X86DX/fTvTf/Nli3OQrZWr1zNu3wdnpffQGaoPNqHPV33Mg5aGVB2FKp83IidfDBeV00qxCS5r1e/1QYk6knuPklMLGTZhqMaajl5TA6S7c+enSj9tfmSKIJtQO0oWm6R6agfXpQmj7eI1Uzvvtl6nE+4PQuUOI0M72mXEvbfL6jdq2/kHe2aZZ78nAYkaGrnGfsrxdjXxbqPLHb++lK1Wj7YQ68MBzeHw239K0/QDOmtbMgzey1y7yhhx018EY+dPsKXsjX4cSQBzfo1fgpnfWq+F7v6jvLr1fnyejcmlCw9mVx7QxXEnBKR/06VfRYR+0T3ykdwzXKUiVcrLPcwRt1Sh2UOnguHzZ4Gbxk0e2fxmTq0OzSchidqe29bQ3c0KcpvyHT1gzN0IUzmyvhtlW15102hSMcoA1Ogz8OgvxDdb7ZIR101WgQuKGXTkGYVwoZAvtmQPcUyM3SFZPfV2nZjIuXwK9X7fw+04vgxE46LSH0fmk+zYhPL9LvU33N93MZY3bPrk3MXkv1aTbtja3wt/U676ZvCuTXhDdsDjhYVaFZs5IATJirzy0JQGF5uARG5CT+kBNydGLzO9vC89hSfHpQqsCxOfDWNnh3u74PhE8s6T6dj1N3A/Zqp++X6W3sHvBquv2sK8wthU+2w5IyzRzcO1B3DjBmd5zTEjTvbNPyK71TYeh0nbcVmqPZHDJEd+q4dxBct0KH8x3wbRF0S9ZM0w2rwqumy51mzaYVahbs4GxYUa67BqT4dM/Qc7vVLjLbN00/A6Cvd2B7vbhaUKqrnZ3TXQQmdm6mH8oYs1uWSTNNct9AzWIVB/VKfn2VBkcBp5OVQU8k5QH4bLTuj+mvgcDjwNUQDNS+KhfCnfKF4fDZWC1qWe0NG3XyLiuGZujctou7h4dAAXokaRmCdn4NGH1esHhpN308TbTQ5n2DtISGeG96cz8d4lk5XoeF3hgJV/aK4i/OtArOwdmL4IaVuh3aOwWwqEzngVXTPEOdfvSio8zB8AydL3nPQN1uKb9a974dlqnvv/0QXWHczRvO75ain48BaVr/bE4udE/V/ULP6BLebzPkzK4wtVD32gw4/bz+ehV8XahDuUfP09qAv+3XDD+YMWaPLEgzTTI4A5aMDwdPQbTo7OhM+KwwvCl6FXDEPHjsAwieBrwILAL5SDNbPVK0eKYQPrHVOK3WHpqfNj8X1k3QJfrflWqmop1fSwmEfF+tgVhfr85SwMEbI2DKVn38lj46h+7K5VBaAz29k9iLW+ClLXDsfM0eWK0zsyfTCnUnilfyda7Xh9u1zz03TAOa5pAEXNkjPH9sQxV8U6jlbvZvB2OzNIibWazZrdcL4PJlWhanV6quVB6Tpcde2lMvUEJ21NT+HjR79v4oLRrbdRr8YJ7Ob7uxjxZwvqQHTN9fgzxjTPRZkGaarHuq3kCDrBv7wvzSOgdtgeCjsPDn4Ly5Lb5kcE9CitNgSbwT0fAM7ZjdvoKuX+kmy+18MCJT6zOd101Phn3TNFCbkK2B2eA0OCwb1k6A87vDjoDOyTmpC3wyWtv2wAY9wQ1K0zk9Y9ppoPdSntY+u7InPD8sunWgTMu3qhx+vEC3ULqzPzw/HNaWa+mN8xc332rOIPDYJg2KftxJpxFctgwuWaoLdb4cC3cN0Izag+thSj7c1V+HXq/rpcOvR+XoKuV5JeHXnV2sW1Od3mXn9xyVBV+P0/02lxwIU8fBpAHwp0FavNa2PDMmdmxOmmkWq8uhaxLk1eiGyP8bwawGHgHeovaZK0lXwKU7SP8MsibC/u11qGVVOXw7TuehBYE7Vuu8mVDg9KNO8Id1WsJjebkGb/MOgMuXwuwSGPIt/KADvDcKfjRf67eNbQ9X9YRHN+lw7MB03fj9myId0vlF79j9rkzL94+NcFF3mFGktcP6p+rcy70JznL8sD3iiaHyMD70MzCxs84p65WqQ/9PD6v9/Nv66qT+97fpv18Xwamd4P0CLXGzoRIeHgI/nOvNK0P7/RP71t7Ro64eqbt+zBgTGzEJ0kTkMWCDc+73zXmsSQzrK3R+V4rAhDSYUeINc34EPADUzaoB+MCXCmWlkP4U/PRiuH0dXNpDt5g6dr4GXwtL4dTOmh0I2ScDDs3W4Zdnh2qw9ko+LCiD2bm1q5Tf0R+Om6fZhp911RPYrBLNeGT4tIK6BWimsVZV6DD7l96QfmEgfGGyuxp89YkM0PxAuuik/wC6UOCVfF1d+XI+fD5m5+eLaBHmE+brFmbdkvVY5+DqXjp0mZ0EKw7UHQgEPb6dXaIbk/Aa9DEVkTXAz51zH+/NmzjnrojGsS2FiJwBTAKGAQc651pFNc/SAFyyBD7eriU11lZCn1QIrgfuAFbX/zz/IAiUQkmhDnFuKYGbHoffXga3ehOS767ULacGp2sGobBGT3yhoZbnh8FNK2Gf6brbwf7t4M2RtQM0gGt765DUPzZqVuHwDvCfkRqg5STvvD+gaZjW2qcbakwmTFqjw4+D0rR0S0hjArQUtA+We08KAjUCZ3eBF/N1vqSg+95+OkYXCNQnzQ8fj4HnNutOAbf0hV/08hYOBOGQOfBpoe4xa4xpOZp8LSUiSc65Rm593eYsAH4C/CPeDWlO163Qk9S6Cbr67NKF8MR7wG/R4mO7EFgZ/toB1RUw+r9w6x/C9/dI1duXO2DiAlhcqhmDM7vC/w3SLMAj+8DfB+tCgAz/Tm/zPyd31ptpVq2yTzdUz1TNoDmnFxN7Kyjaf0PSRffefD4Pru0Ff9sA1/SCvw7Z82v5RUtrnNRJM8chST6dx/nBNl0ZaoxpOfY4BVREngP6Am+JSImI3CwiTkQuEZF1wH+94/4tIptFpFBEvhCRERGv8bSI3O19/QMRWS8iN4hInohsEpGL9vLYTiLylogUicgMEblbRKY222+nmTjnFjvnlsa7Hc2pNKC7CTwwJBwgfXMmcDO7DdDqc9JJ8O6bO9+/rAx+slBXZBYeBqvGa9bs3MXhY5J8uw/QTHS0xj7dGFML9d8AeqHSay9XOwZCmTKft6uAT4vPggZooOU2Gio7qf6t1vKqrWK/MS3RHoM059x5wDrgZOdcFvCK99AR6FDHcd737wFDgK7AbOCF3bxsdyAb6AVcAjwsIjl7cezD6Iyn7sAF3q3FEpHLRGSmiMzMz8+Pd3N2q7hG56CFdhd4/31YtKgRL+CDzn1hwQJ46y3oUs8V/qMbdb++07tqlqBLCvxzX90HdFlZs/wYJgZaUr9uqEx/+I9njyTYvIs9aHfn/G46FO+Ai3vqvrdBrwBtaOHAFT10KLOhjsmBNRUwJWLbpiWlOtx/frfGt9EYE19NWUw9yTlX6pwrB3DOPemcK3bOVaJzVUaLSPYunlsN/M45V+2cexcoAfZtzLEi4gdOA+50zpU55xYBzzTh52kSEflYRBbUczu1oa/hnHvcOZfrnMvtUl/UkkC6peiWNP/19gC8+mrIyGjgk1Og/TWQvxZGjNj1YSvLYf+s2vel+HRbmzUVe91000DN0aehZfXrhuqREq7nt24vNlBPQeuqhQo+VwXhoGzdSWN8O72vTyr8ZXAjX9enhZhvWgX7z4Sj5sLBc7R8xn5Ze36+MSaxNCUB/n3oCy9g+gNwBtCF8N+vzkBhPc8tqDOPrQzY1Z+QXR3bBW3/9xGPRX4dU865o+P13vEgAn8ZBOcsgpNXwJo1EGhIeXU/nPYqvHTCng8dkwUfbYeJEef1ohqYVawrP010tbU+3VCfbIff7mJRzJ740SzZDX0gM0k3Ogd4YrNO9r+2j1b7T0JXcu7NUP64drD8QB2SLQtq7UBbyWlMy9TQj259C5Yi7zsbOBU4GliDDk9uJ7xvdjTko3N3ewPLvPtsO+wYOqkzvJUMR54NLhUNn3dh4EB45x149FHwfwJJJ+759a/sCbmzdI7O+d1hYyXcuor/b+/Ow6Mur7+Pv08S9n0JICAIsgmKQKN1rVZxQdytooJK1Wq1aLW2tQJaVKz6iFbFXX+Wat33vShaRRSRiIKgCIgge8IihEAISe7njzNjhhAghMnMJHxe15WLzMw3kxP7JT2c+77P4ZxWfuJTJNHeWw2nfe2//BpFxqHtjGJ8ssZDy3zG5opCfz4twPM5fohg5WZ4uBt0rLfdt9qujDQ4clsbSESk2qjocucKYHvbVxvh28VXAfWBf+xiXDsUQigGXgZGmVl9M+sBnF/V37cyzOw0M1sMHAy8ZWbjkx1TvKz8FAqWAgWQHvlXf3rMv/4zMqB/f/j+e+jRA669FsaNg+XLd/zee9SBiX29Ye3B0+Di77yx570VOOkmVasm39PbEgJcPc/HhjXYiQStbNE3dzNkNSpt1twsA4Z3hD+0gz+1hzm/9D1qIiIVraTdCow1s/8HjC7n9SfwAwRLgNV4E4bL4hLh9g0DxgHLge/wiZBZCfi+OyWE8ArwSrLjqAqvvQZFZRqwFEc26DRtCuvXQ9eucNNNpa+3agV33gl33LHj99+7Hjyxz46vk8Sqyff0tuRshqWFcHpL7zkG0CQN1m4nWctqCNnrt3zusMY+lmltsU+7OLCxj3n69sCqi11EqqcKVdJCCK+FEDqEEJqGEMaEECx2n1gIYX0I4ZQQQqMQQscQwhORa+ZFXh8aQhgZ+fzDEEL7Mu+/V7RR7k5emxtCGBhCaBxCOCByyeJK/reQSnjoIa8wTJoEe+0Fmzb54xDg3nv91GaLFp7IRT/OOguOPDLZkYvsnIbp3lx2foHv4zC2n6A1TfcEzYDBmf4Y4NN1cFxz+DoLrtrT91cu3om2NeuK4Kq50OYTaD4JLpwNy3ay7Y2IVA/VejtpZImzNvA1cADeouPipAaVQgoKoG7dxHyvf/8bli2DPWN2BZaUwMqVnpTtt19i4hCprNxCeGgpZOdBh7re/iX2gEqDdK+ivb8G2teGRYXehibajDY6DqoOvvesoMSHoA9oBk/k+BD263+A9cVw0R7QI/Leb6yEAxtVLMYQ4MSvvXXHJ/18X9u9i+GIr+CrLPUMFKlpdqUFRypohO9Ly8f7t90JvJbUiBJoQ5mN+qNGweOP++fr13tl6513tv31f/87vF5OE9lYjz8OS5bsOJaxY2HhQpgxo/Rj5kzIzVWCJqlvcQEc8AUsLIAL2kCTdDh0GvxzkTecjRrb1StqyyJ90WKnBUQ/3QRcugd0qud9BOukw+S+MLAFjO7k3+O1lT6X9v4l8Kfvt5xNuz0f/QSrN8PjPXwrQLs6cPve0LWeN5cWkZqlWidpIYSpIYQuIYT6kWXQW0MIOzM672dmlmFml5rZf81shplNN7N3zOz3ZlbJfuJVZ8UK2GMPmDzZHy9ZAnffDSNGePI2dqxv2v/zn/1f32UtXgw33wxDh275+pw58N57/vm8eXDJJZ787UidOtC69dYfLTWOSaqBf/wIg1rBYz28Z9lDS6F9HbhuPnT9DCb+5MlaowyY1M+TOPDqWc/63t8MSpcmHl8BszfA1DxvItsl0kOwU13vZTZvI5w5y5Out/eDw5pWLM6Z+T5/Nq3Mufkjm/prIlKzVOskLc6eBPrgjXhPAAYCNwL7A/9JXljlu+MOT4BuvNEfjxoFv/41HHIInH66Py4p8UTrzTe3/vqLL4Y2bSAvD+6/v/T5K66AwYPhnHPgj3+EYcPg5Zfhh53oC/Xuu15JE6kuJqzxNi+z8+HKefBBH3i3t5++XLjJlxObfAw3/eDVsR71oUdkWsCKQt9j1sC8J9DQVt6bbNovPLHr+wXcvQhGzPf3ub8rvNUbvjkQnu8FWY0rHme3+jBl3db/8Ppsnb8mIjWLkrRS/UIIl4UQPgshLI58fBZCuAzom+zgYq1Y4cuQEybAt9/6kuV//uPJ2PHHw/jxsHmzX9ehgydbsb/UFy/2atl998G558INN/jrn34K333nVbHnnvNrbrwRLrsM/rGDpio5Of71l10Gxx0Hv/1t+RU8kVTULMP78P17he8X27MO9M6GzQGGtPImtBtK4O8LvXff3I1weiv/una14e3VUBCgFvCrZj7Ps08juG5Pr7Z9us4Ttv/18WSwsvpHep9dPc+XPfOL4fYffR/dua3i8B9CRFJKlSdpZjbKzFKuElWONWZ2ppn9/N/EzNLMbBDemDdl3HEHDBkCnTrB8OFw+eX+/JAhcPvt/rkZ9O0L++/vS6FvvunNZF9/3atobdt6xe3++0uraTfeCOef70kc+JinGTPg6qt3XE279loflP7QQ5CW5hMIxtf4zllSU1y0B4z4AZYXeoKV9QWsKvLk7KkcT7QOb+K9zaat93Yc/10Fm0pgwSYY0QGaZvjhgoeW+tIpQPt6/twV7eC2vXd9UkaawX97w+oiaDcZmk2CyWu98qepAiI1j1VyC1fFv4HZKKBLCGFIlX6jXWRmewG3A0dRmpQ1Bf4H/C2EUMlBMJWTlZUVsrOzt3p+xQrYZx/4+mto184Tp86dPUEaMQIOPtgTtBCgXz9vItu0KRQW+vMlJf4eL77oSRrABRfAK69A8+b+nt9+C2vWwGGHeVVtwgQYOdK/7tFHt4513jw46CBPznJzfa/cmjXQsydkZ/v3leQzsy9CCEntI7it+zrZSgL85Xt4cIlXxNLwE5rp+JJnAMZ0hhdyYUqef03LWjCyI9y0wNtiFOEJ3pBW8H89/JoDIlW35YfE/+RlcaTVTcZuvh6SCve1SFWJ619vM7vWzJaYWZ6ZfWdmA4HhwCAzW29m0yPXNTGz/zOzZZHrR0fmf2JmQ83sEzMba2ZrzWy2mR0dzzjLE0JYEEIYFELIxLuoHxJCaBV5LqEJ2vbccQccfTTk5/sm/zvv9OfHj/c9aVC6zPjVV77smZvr7TE2bSp9/corPYnq2dMPH+TlQe/eMHWqn8Zs1syXLz/7DAYOhG++8UkBy5ZtHdMtt8B558GqVZ6o3XwzNGmiapqkrk0l8OhSOG0mDPnG96Td2QXu6eptLUqAhmmlbTUA7l7sCVotoC5wVFN4cjn8VOQVrna1/eKpeXDFXOg2xTfz392lalpjpJsSNJGaLm5/xc2sOz4B4IAQQiN8AsFsfETUcyGEhiGE/SOX/xv/h2cXfL/XsWzZ3+yXwHx8QPvfgZfNrHm8Yt2REMKqEMLK6GMzOyZR33tHFi6E6dO9cnbiifDUU9C4sbfbqF0b6kXm/Z17Lhx7rDeSLYzMB8zLgzPO8ERq+XL/mu7dfekTPFnLyPBTmf36eTLXoYNX6wYOhMce86pcrHnz4I03YNYsv/7IIz0pbNDAT5mOGLH13rTi4tKYRBKtsAROmAHP58KgTDi0CVw+B0YvgI0lvmx5biv/PNqxOx1YXOi/MOulQYn5aKfsLCg+Es7IhJs7wSv7eouO11b6vrYP+8CFeyTtRxWRai6euxiK8T6OPc0sN4SwAMDKrHWZWWtgANA0hLARyDezfwKXAA9HLssB7o6003jOzK7BT1s+Gcd4d8b/AR2S9L238MILpZ+vWuXNY886y5c/FywoHdH09NOly55RhYU+GaB2bU/GevSAu+7yhG3MGLjnHv+aCRP8+nr1YO5cf8+jjvI9cFFFRV4x++EHr6Lde6+/Z8OGvretb1/fCzdtGnzwgVf/wJPMk0+GLl3gpZeq9D+VSLleyPUDAR/uX9rK4vRM6PE5PNnD96X1rO+/0KKin9c2yC/xStsZmf7c0k3w39Vwx97et+xEtZ0RkTiJW5IWQphnZlfhLSx6RQYu/6mcSzviKwbLYhK4NGBRzDVLyvQ7WwhU6chhM9tWW1cDWlTl966sunV9JubmzV796htzBrV1a0+8MjN9iTI6qmn6dE++1q/3wwLDh3srjrVrPcFau9bfNzPTJxbMmeOHEWITNIBnnvHvnZZWWnUrLvYDCiUlMGWK/9m9O3Tr5l/z7LN+6rOgwBPK6dP9YINIIr2/Bga33rLXWOva8OumPg2gYTqMXOAnN9cWeWJWEPltVBx8+TPd4M1Vfv2DS+G6Dp6giYjEU1zPA4UQngaeNrPGeFXsdmBemcsW4U25W8bO/yyjnZlZTKLWAdhBb/xddjgwBCgzDhkDUnL0cYMG3qy2PNdc441qH3vMkyUzvz4/HzZu9GsKC721xl13wejR3n5j0iTf9J+XB8cc49e8+ir85S+lBwCiVbSrrvIK2oYNvgx6zDGl1/TtCwccAIce6hW2oUN9dBRAemR/zqhRfmBBJJGi7TYAstd52411RTBjPVzcBgY096XQgmJPxvZt4K81zPAErnG690mbme/7117oCQc1Se7PJCI1U9yStMietHbAJ0ABsBGvkK0AjjGztBBCSQhhmZm9C9xpZtfjSVEnoH0I4aPI27UCrjSzB4BTgX2At+MV6zZ8BmyIiSH2Z/uuir93XC1fDv/6l5/IjF1tjh0j1aaNn9aMVtMaNICPP/YqW0GBf7zxhi93DhjgS6DHRHbmPfOMn+AEf/+5c6FjR2/fMXDg1vFcfHFpggb+verXh//9T9U0SbyhbaD/dFizGR5d5lW0phmwqMAPB5j5RIENJV4hzl7v1bTVRZBXDNSGh7pDi5SbQyIiNU08zwbVAW4DVgLL8URrOBDdRbXKzKZFPj8fH4z+Dd7u4kUgdnvtFKBr5L1uAX4TQlgVx1jLMx8odzt7COFXVfy94yraR23//X0JEvy0ZuwC8vLl/n9GhYXePuPYY/35wkK/Li3NK2YheLPbUaP882gV7corPfHq39+XW//619JrYj35pCeMUdHDDWvX+rUVGTklEk/7NYTjm8HYpb7XLGczzMiHWmmwoMArZIUlXjVrXgtG7+Vjl8BbdXzSRwmaiCRG3JK0EMKMEMKBIYRGIYTmIYQTQwhLIyclDwshNAsh9ItcuzbS3b99CKFJCKFvCOHZLd8uDIu81i2E8G684tyOOcAYM1tgZrebWZ8EfM+4i1bRrr3Wm9eC7zFbvXrra1u39j/nz/eTnX37ls7aLCnxJcyhQ/1gwurVXk2LVtEmTYLf/MZbduyxhy9bFhTA2zH1zjVr4KKL/L2i6tXz5zMyPGEbP96raSKJsrEYns6BtrXh1X3h8rbwi4Z+IGBBAawq9FOda4q8ynbnYt/HVsu87cZr5fxdEhGpCupRHRFCuAe4x8w6AmcD/zKzusAzwLMhhDlJDbCCHnzQq2B/+AN8+aU/V1BQ/rXRnmfR6tewYZ5Ugbf12LgRJk70ROrMM73qtXatv37//d6eo3lzT7zGj/dRVaNGwQkneHHudKgAABckSURBVJXuH//wKlus6NcXFvq+t4wM7+emJU9JlPfXeAWtZS0fqVTH4LsNcHAjmJwH3evBkk3+9+L0lv76tHw/NDCguc/JFBFJBLVCLCOEsDCEcHsIoS9wLnAa8G2Sw6qw6GGB88/3x9dc40nbtmTEpOnRBA1g3TpvflurllfSXnvNK2Jjxnh/tV/9ysdHnXwyDBrkhwQ+/tife/ttr5aNHbvl9yobR/36PsHgggt25ScW2TkhMlFg0SZPzrLXw8bg457AE7TL2nmbjbsWw22L/IDAuB7w5XroVDeZ0YvI7iTlKmkhhHHAuGR9fzOrBRyPV9OOBj4CbkxWPDurUyf/mDgRfvyxtF9a7dq+YX/NGmjUyBOmDRtKK121a5c2mI32VzPzPWglJVsuSV5wgSdxRUXeWw38+mhFbNkyr6KVbVgbu+zZtau364i25xCJh9xCuG+JDzTfozb8vi0cUubk5VHNfXxTCJBb5AlbEaWNa+umwf4NoG0taFIL3u7tS6PP5MBLufClBhCJSIKkXJKWLJGpAufgTXM/B54FLgkh5Cc1sEo67DBf7jz+eG95MXq070G7+GJfZiwrNkFr1sw/P/tsr8y1LNOc84cfyp8YUKdO6Z6zTp38BOny5d5zLTfXq3ItW/phg0cf9cRQJF5WFMLB0+DoZnBNe5izEc6cBf9vb++LFtUg3WdrDonUx6MHoBulQ5taMLcApqyDRrX8gEDPz/3vxb4N4K39oK36oYlIgihJKzUceBr4cwih2m8NTkuDL77wJcs2bUqXHjMzYenS0usyMvwEaPREZ1qat9445JBtv3dGhg9ojy6plvXAA56stWgBrVr50Pdp0+Dzz+Gdd3xGqEi83bUITmgO90Wqs8cDhzWBgV/DmZlQO2a5fXBruHA2FAY4oKFXzOZu8Ara3AKf63liJjzX08dDFZZAM53oFJEEq5I9aWb2oZkVRIaqr4/tM2Zm55rZQjPLN7NXY2dymllzM3sl8tpCMzu3KuIrTwjh1yGER2tCghbVq5cfBjj/fF+ivOACP8G5557+ev/+/me3br4UGh2M/ve/b/99//AHf68PPij/9YEDvUXH/Pk+Q7R3b/+zcePyT5mKxMOHP8E5rbd8rl8j73n23QZ4cyUc9AU0mAh9s6F+mp/YnL8JVm72qQLvrPF/udZOg2d7+lSCBulK0EQkOaqykjYshPBY7BNm1gufRDAQmAY8AjyA7/8CuB/vVdYa6AO8ZWbTQwizqjDOGuugg/wjqrDQxzTttResXOnNZEtKfNLAYYfBE0/4ac0JE+Duu32iQFkFBTBunF933HG+D61Xry2v6dPHT3qefroPdI9atMj3sB15ZBX8sLLba1kLfizwgelRBcW+DDp+tVfaHu7uPc+mrIPTZvoMz/Na+761+RvhniVeNevb0KcNiIgkU6JPdw4G3gghTAwhrAeuB043s0Zm1gA4A7g+hLA+hDAJHwV1XoJjrLGeeALat/e5miF4ghZtv/Hpp56stYhMKb355i03+kddcYWfyuzf3w8OxCZhsb7/3t/z2GNLP155xQ8zlG14KxIPl7SFUQs8UQNYWgC9psL6Ehj+Q+QATZqf2uxcD/7dw/ej3bMYhs6Gfy2HpumwajPc3zWJP4iISERVVtJuNbPbgO+AESGED4FewKfRC0II35tZIdAN/91ZXKYf2XTgiCqMcbdRWAi33OLLncccA9nZXlWbONETs3XrfBnznXf8oMGaNX4ydMiQ0veIVtGKinxPWtu2ntjNmrV1NS3aSFckUU5pCXM2wP7Z0K0ufLEe6qXDFW3hoaVw195w6kz/pdeilu9HA8is5UudCwqgfjq8uz/sXT+pP4qICFB1lbRrgc74LM9HgDfMbG+gIbC2zLVrgUY7eE120fPPQ04OfP01fPghrFrllS7wz2vV8hFO8+b5EmmPHj5UPbaadsUV/qeZPz9xoj/eVjVNJNH+0gG+OQBWFXmbjdF7+b/+NgcYs8h7nJ3XBhYcDGM6+y/AR7rDhP1hcj/IOQSOapbkH0JEJKJKkrQQwpQQQl4IYVMI4d/40PUT8GHqjctc3hjI28FrsosOPdT3mV1zjS9Xnn66j3164AH405880Soo8OrYpEnw6qveNuPpp/3rY6to4InaKadAw4al1TSRVPDkCq+OHd0M/rgn3N0Vrm7vTWsHNvdeatnr4JYf4fzW8MBSP2DQu6Hf1yIiqSJRe9ICvv1jFvDzACAz64wPZp8T+cgws9jdIPtHvqZaM7M7zGy2mc2InF5tmugYOnWC3/3OW2Okp8OMGdCxoy97Ll0KTz3liVh0T1q3bn6Y4MorvWq2cWPp/4GZeYI3a5a32GjUCH7720T/RJJMqXBPb8ubq+DK9jB5HayJNGv+XVtIxxOyl1bCWd/4NcPaweJNSQ1XRGSb4p6kmVlTMzvOzOqaWYaZDQZ+BYwHngJOMrPDIwcFbgJejlTd8oGXgZvMrIGZHQqcAjwZ7xiT4D1g3xBCbzwZvS5ZgbRo4Q1ua9WCn36Cjz7yIel1Ig06Fyzw5K1jR2+GGx3v9PzzPp0gPd2vi84D/fFH/3zq1PIb3EqNlTL3dFn10qBOGvy2DRw7A95eBTmFPq+zSQasOATmHwSXtoW3VsOB2lAhIimqKg4O1AJGAz3w34uzgVNDCN8BmNnv8WStBTABiK3BXA48DuQAq4DLakL7jRDCuzEPPwN+k6xYTjzRP8oaMwZeeMHbZGRmejKWk+MTAl56yRO2unW92W29en5a85tvYOZMeOQRn2agCQK7j1S6p8sa0hpu/RHe7w0vrIR/LIT5BdA43fuiffAT7FMfXl/pI6Qm9U12xCIi5Yt7khZCyAUO2M7rT+Od/ct7bTVwarxjSjEXAs8lO4iy/vxnb3J79tlw+eXeS+3hh+H9932fWnSG56ZNPvPzmWc8KatTx5dFu6plwe4spe7pwa3hs3XQ9XM4trn/S7F+GnzUB2Zv8MRs8SY4qDF83Be66iSniKQojYWKEzObALQp56URIYTXIteMwOc4P7WN97gEuASgQ4cOVRRp+YqL4aab4N57/c8mTTxJS0/3MVC9esH48dC5M+Tne6+z886Dxx7b8XtL9RSPezpyTULv6zSD+7vBH9vDJ2vh3FZwTDPISPOE7KSWO34PEZFUYEGdRRPCzC4Afg8cHULYsKPrs7KyQnZ2dtUHFvHccz4N4JNP4PDDfdlzwQJ/7uqrfTlzwAAfmP7pp36wYOZMb1rbtm3CwpRdYGZfhBCy4vh+O3VPQ+Lva6n54n1fi6SSRE8c2C2Z2fF477iTK/p/ZokUraJFZ3YuWQIrVsDo0TBihFfTVq/2FhyTJ/vhg++/92raDTckNXRJklS8p0sCfJUHX+b55yIi1Z2StMS4D2/K+56ZfWVmDyU7oFgvvujDz4891h8PHerLmq++6gncMcf4frRWrbwv2p13+mGC887z1h1LlyY1fEmOlLqnJ6+F7p/DoG/g7G/888llW2OLiFQz2pOWACGELsmOYXvGjIG8vC1Pfdar58uZ118PI0f6Kc+cHG/dMXiwX1Nc7D3Tbr/dl0Vl95FK9/RPm+HEr+F3e8A5rbwp7eur4JSZMOdAaFor2RGKiFSOkjThwQd9eTPWkiXw1796M1uA2bOhXTvYe29o0KD0OjNvdCuSDEUlMPBryC+GbzfAyTOhW314sRcc2RSez/XB6yIi1ZGSNCErZsttSYkPWZ80CU46Cb79tvS1Cy+ExYvhjTcSH6NIee5cDCsK4fJ2cFcXKA5w2Ry4ap7P6cxRg2URqcaUpMkWXn4ZzjwTjjzS+6FFK2lR0bFRIqlg3HL4awcfnn5rZ580cGtn6DgZ9qgD/+qe7AhFRCpPSZr8rKQEbrwRBg3y/WdTpiQ7IpHtW1sE/ZvChDVwzHS4qr1X0zaWQK/6cGiTZEcoIlJ5Ot0pP3v5ZT8w8OSTsHChz/WsjPz8+MYlsi3HNYdxK+DpfeCCNvDoMhi9EPaq6/vSzJIdoYhI5SlJE6C0ijZqlJ/gHDnSH++sd9+FDh18eLtIVRu1ly95/m6OD1bvWR+WFcJT+/iEARGR6ky/xgQoraINGOCPhwzZ+WpaCN4Qt3lzteSQxOhYF6b9ArrX84HpddJg6i/gIC1zikgNoD1pAvjEga5dvVFtVJcu/vz771fsPd59F9auhbffhkMOgT/+EZo2rZp4RaJa1oa/dUx2FCIi8adKmgBw1lnQqZPP5ox+7Left+GoiBB8qfSGGzzZO+kkVdNERER2hSppAvgetFhTp8K++/oSaEVEq2hnnumPR4yAgw5SNU1ERKSyVEmTreTmep+0f/6zYtdHq2iDB8PcuT6dYPNmOOAAVdNEREQqS5U02cqYMXDEEZ6kDRvmw9e3Jz8fCgq8dceTT275WuvWVReniIhITaYkTbaQmwuPPQbTp8O118J998Hw4dv/moYN4csvExOfiIjI7kLLnbKFMWPgnHOgfXu4/nqvpq1bl+yoREREdj9K0uRn0Sra3/7mj3v0gGOP9WqaiIiIJJaSNPnZmDE+LeCdd+DRR/2jbVvvnZaXV3rdxo1w111+YEBERESqhvakyc+6d4fVq+Hzz7d8ftAgKC4uffzII3DNNX79wIGJjVFERGR3oSRNfnbhhf6xPRs3wu23ex+0UaPghBM0xFoqbk0RrNkMzWolOxIRkdSn5U7ZKY88Ar/8pY+L2rTJR0CJVNSqzdB5CvxrWbIjERFJfUrSpMKiVbQbboC0NB+mPmqU9qZJxXWpB1P6wbXz4dv8rV8vDvDMChg0CwZ/A2+s1P0lIrsvJWlSYdEqWt++/vi001RNk53XrT78tg08tWLL50PwxOzuxTCgORzRFK6bD1fPS06cIiLJpj1pUiEhwG23wT77wMUXlz5fuzbceuv2DxCsX+8TCVq2rPo4pXpolgErNm/53P9+ghn58GUW1In883FQK+jxOVzaFvZpkPg4RUSSSUmaVNjdd2/ZigN8iHq7dtv/usxMn+VZVFR1sUn1sbEYnlgB93TZ8vn31nhSViemvt8kA05p4a8pSROR3Y2SNKkQM2/FsbOeeMKraOBtO+68M75xSfWSUwgHTYNfNIL+zbZ8rVkG/FCw9dcsL4RDmyQmPhGRVKI9aVKlLr0UGjSAVq28Eie7t/XFMLAF/GXPrVu3nNsKns+BKTFjyP67Cj5dB6dqqVxEdkNK0qTKRKtoTz0FH3wAJSVeTZPdV14xzMqHgV/DCTMgL2YJvH1dGNcDTvoaDp0GWdlw8Xfwci9opJq/iOyGLOh8e0rKysoK2dnZyQ5jl9SrB+npfnAAoHVrWLlyy+kFkjhm9kUIISuZMfwiKyt8kZ1NUYknYPXS4cFuW15TUAyT1kKtNDiksf8psi2pcF+LVBX9+pMqEa2i9ejhp0LHjoWhQ1VN293N3gBnz4LP8+D2veE/K7w3Wqy66dA/0oJDCZqI7M60iJAAZnYzcApQAuQAQ0MIS5MbVdUaN87//OIL/4j11ls6QFDdVfae7lAHftUUzpgFY7vAphIoCpCu0WIiIlvRv1MT444QQu8QQh/gTeCGZAdU1T74AI44wqcStGgBq1Z5r7UQYPbsZEcncVCpe7pBOlzeDp7eB4bN9eXMOvotJCJSLv16TIAQQsx5NRoANX4j4IcfwpIlMHIknHqqTnbWNJW9pxcWwNMr4NVcyNkMIztWTXwiIjWBkrQEMbNbzGwRMJhtVB3M7BIzyzaz7Nzc3MQGGGejRsH110NGBgwfDg88AKtXJzsqiaeK3NOR636+rzf/lMvrK332a6N0OEz9z0REtklJWpyY2QQzm1nOxykAIYQRIYQ9gaeAYeW9RwjhkRBCVgghKzMzM5Hhx1W0inbuuf64c2dV06qjeNzTket+vq+7tMnk4e7wYwGc38YPCYiISPnUgiPBzKwj8FYIYd/tXVedW3AMGAA//gi9e3v1LDMTli2DqVMhJwfq1k12hLunqmpVUNF7GqD+PlmhzqPZnN4S7u+qJE12nVpwSE2m050JYGZdQwhzIw9PBmr01vnhw2HhQnjxRXj/fbjrLmjeHC6/HGrVSnZ0Eg+Vvaf3qgcfHQiZtasuNhGRmkJJWmLcZmbd8XYFC4HfJzmeKnX44dCvH1x0EdSvD3PmwH33JTsqibNK3dP105SgiYhUlJK0BAghnJHsGBLtb3/zQwOdOnnPtOuug3btkh2VxMvueE+LiCSaDg5I3OXnwyOP+GSBW27xatqttyY7KhERkepFSZrEXbSKNnIkDBwIbdt6NW3JkmRHJiIiUn0oSZO42rjRq2i//CU8/jg8/DAceqjP7FQ1TUREpOK0J03i6rPPoKjIh6v/5z+lzxcXe6ImIiIiFaMkTeIqMxMuu2zr5/v0gXPOSXw8IiIi1ZWSNImrffdVuw0REZF40J40ERERkRSkJE1EREQkBSlJExEREUlBGrCeoswsFx+3s6taAivj8D5VqTrECNU/zo4hhMxEBxMrTvd1df/fIZVUhxhh+3Em/b4WqSpK0mo4M8sOIWQlO47tqQ4xguJMFdXl56sOcVaHGKH6xCkSb1ruFBEREUlBStJEREREUpCStJrvkWQHUAHVIUZQnKmiuvx81SHO6hAjVJ84ReJKe9JEREREUpAqaSIiIiIpSElaDWdmd5jZbDObYWavmFnTZMdUHjM708xmmVmJmaXUKS4zO97MvjOzeWb2t2THUx4ze9zMcsxsZrJjSQTd17tO97VI6lOSVvO9B+wbQugNzAGuS3I82zITOB2YmOxAYplZOnA/MADoCZxjZj2TG1W5xgHHJzuIBNJ9vQt0X4tUD0rSargQwrshhKLIw8+A9smMZ1tCCN+GEL5LdhzlOBCYF0KYH0IoBJ4FTklyTFsJIUwEVic7jkTRfb3LdF+LVANK0nYvFwLvJDuIaqYdsCjm8eLIc5I6dF/vPN3XItVARrIDkF1nZhOANuW8NCKE8FrkmhFAEfBUImOLVZE4U5CV85yORCeA7usqpftapBpQklYDhBD6b+91M7sAOBE4OiSx58qO4kxRi4E9Yx63B5YmKZbdiu7rKqX7WqQa0HJnDWdmxwPXAieHEDYkO55qaCrQ1cw6mVlt4Gzg9STHtNvTfb3LdF+LVANK0mq++4BGwHtm9pWZPZTsgMpjZqeZ2WLgYOAtMxuf7JgAIpvThwHjgW+B50MIs5Ib1dbM7BlgMtDdzBab2UXJjqmK6b7eBbqvRaoHTRwQERERSUGqpImIiIikICVpIiIiIilISZqIiIhIClKSJiIiIpKClKSJiIiIpCAlaZKyzOxDMzuuzHNXmdkDZvZfM/vJzN4s8/rHkZYMX5nZUjN7NbFRi2xfZe7rmOvGmtn6xEQqIsmmiQOSyp7Bm2zG9pY6G/gLUBuoD1wa+wUhhMOjn5vZS0CqjuWR3ddO39cAZpYFNE1EgCKSGlRJk1T2InCimdUBMLO9gLbApBDC+0Detr7QzBoBRwGqpEmq2en72szSgTuAvyYuTBFJNiVpkrJCCKuAz4HjI0+dDTxXwTmNpwHvhxDWVVV8IpVRyft6GPB6CGFZVccnIqlDSZqkuujSEJE/n6ng152zE9eKJFqF72szawucCYxNQFwikkKUpEmqexU42sz6AfVCCNN29AVm1gI4EHirqoMTqaSdua/7Al2AeWa2AKhvZvMSEKOIJJkODkhKCyGsN7MPgcepeGXsTODNEEJBlQUmsgt25r4OIbwFtIk+NrP1IYQuVRuhiKQCVdKkOngG2B94NvqEmX0MvIBXIxaXaWmwM8uiIsmys/e1iOxmrGJ7sEVEREQkkVRJExEREUlBStJEREREUpCSNBEREZEUpCRNREREJAUpSRMRERFJQUrSRERERFKQkjQRERGRFKQkTURERCQF/X8e8zmViRvzFQAAAABJRU5ErkJggg==\n",
      "text/plain": [
       "<Figure size 1008x648 with 6 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    }
   ],
   "source": [
    "# Setup parameters visualization parameters\n",
    "seed = 17\n",
    "test_size = 492 # number of fraud cases\n",
    "noise_dim = 32\n",
    "\n",
    "np.random.seed(seed)\n",
    "z = np.random.normal(size=(test_size, noise_dim))\n",
    "real = synthesizer.get_data_batch(train=train_sample, batch_size=test_size, seed=seed)\n",
    "real_samples = pd.DataFrame(real, columns=data_cols+label_cols)\n",
    "labels = fraud_w_classes['Class']\n",
    "\n",
    "model_names = ['GAN']\n",
    "colors = ['deepskyblue','blue']\n",
    "markers = ['o','^']\n",
    "class_labels = ['Class 1','Class 2']\n",
    "\n",
    "col1, col2 = 'V17', 'V10'\n",
    "\n",
    "base_dir = 'cache/'\n",
    "\n",
    "#Actual fraud data visualization\n",
    "model_steps = [ 0, 200, 500]\n",
    "rows = len(model_steps)\n",
    "columns = 5\n",
    "\n",
    "axarr = [[]]*len(model_steps)\n",
    "\n",
    "fig = plt.figure(figsize=(14,rows*3))\n",
    "\n",
    "for model_step_ix, model_step in enumerate(model_steps):        \n",
    "    axarr[model_step_ix] = plt.subplot(rows, columns, model_step_ix*columns + 1)\n",
    "    \n",
    "    for group, color, marker, label in zip(real_samples.groupby('Class_1'), colors, markers, class_labels ):\n",
    "        plt.scatter( group[1][[col1]], group[1][[col2]], \n",
    "                         label=label, marker=marker, edgecolors=color, facecolors='none' )\n",
    "    \n",
    "    plt.title('Actual Fraud Data')\n",
    "    plt.ylabel(col2) # Only add y label to left plot\n",
    "    plt.xlabel(col1)\n",
    "    xlims, ylims = axarr[model_step_ix].get_xlim(), axarr[model_step_ix].get_ylim()\n",
    "    \n",
    "    if model_step_ix == 0: \n",
    "        legend = plt.legend()\n",
    "        legend.get_frame().set_facecolor('white')\n",
    "    \n",
    "    for i, model_name in enumerate( model_names[:] ):\n",
    "\n",
    "        [model_name, with_class, generator_model] = models[model_name]\n",
    "\n",
    "        generator_model.load_weights( base_dir + '_generator_model_weights_step_'+str(model_step)+'.h5')\n",
    "\n",
    "        ax = plt.subplot(rows, columns, model_step_ix*columns + 1 + (i+1) )\n",
    "\n",
    "        if with_class:\n",
    "            g_z = generator_model.predict([z, labels])\n",
    "            gen_samples = pd.DataFrame(g_z, columns=data_cols+label_cols)\n",
    "            for group, color, marker, label in zip( gen_samples.groupby('Class_1'), colors, markers, class_labels ):\n",
    "                plt.scatter( group[1][[col1]], group[1][[col2]], \n",
    "                                 label=label, marker=marker, edgecolors=color, facecolors='none' )\n",
    "        else:\n",
    "            g_z = generator_model.predict(z)\n",
    "            gen_samples = pd.DataFrame(g_z, columns=data_cols+['label'])\n",
    "            gen_samples.to_csv('Generated_sample.csv')\n",
    "            plt.scatter( gen_samples[[col1]], gen_samples[[col2]], \n",
    "                             label=class_labels[0], marker=markers[0], edgecolors=colors[0], facecolors='none' )\n",
    "        plt.title(model_name)   \n",
    "        plt.xlabel(data_cols[0])\n",
    "        ax.set_xlim(xlims), ax.set_ylim(ylims)\n",
    "\n",
    "plt.suptitle('Comparison of GAN outputs', size=16, fontweight='bold')\n",
    "plt.tight_layout(rect=[0.075,0,1,0.95])\n",
    "\n",
    "# Adding text labels for traning steps\n",
    "vpositions = np.array([ i._position.bounds[1] for i in axarr ])\n",
    "vpositions += ((vpositions[0] - vpositions[1]) * 0.35 )\n",
    "for model_step_ix, model_step in enumerate( model_steps ):\n",
    "    fig.text( 0.05, vpositions[model_step_ix], 'training\\nstep\\n'+str(model_step), ha='center', va='center', size=12)\n",
    "\n",
    "plt.savefig('Comparison_of_GAN_outputs.png')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "pycharm": {
     "name": "#%%\n"
    }
   },
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 2",
   "language": "python",
   "name": "python2"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.7.3"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 1
}