{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2019-08-08T23:15:22.414470Z",
     "start_time": "2019-08-08T23:15:17.857221Z"
    }
   },
   "outputs": [],
   "source": [
    "import tensorflow as tf\n",
    "import numpy as np\n",
    "import matplotlib.pyplot as plt\n",
    "import matplotlib.gridspec as gridspec\n",
    "import os\n",
    "from tensorflow.examples.tutorials.mnist import input_data\n",
    "\n",
    "sess = tf.InteractiveSession()\n",
    "\n",
    "mb_size = 128\n",
    "Z_dim = 100"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2019-08-08T07:28:52.183097Z",
     "start_time": "2019-08-08T07:28:38.608442Z"
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "WARNING:tensorflow:From <ipython-input-1-e627224ef189>:13: read_data_sets (from tensorflow.contrib.learn.python.learn.datasets.mnist) is deprecated and will be removed in a future version.\n",
      "Instructions for updating:\n",
      "Please use alternatives such as official/mnist/dataset.py from tensorflow/models.\n",
      "WARNING:tensorflow:From /usr/local/lib/python3.7/site-packages/tensorflow/contrib/learn/python/learn/datasets/mnist.py:260: maybe_download (from tensorflow.contrib.learn.python.learn.datasets.base) is deprecated and will be removed in a future version.\n",
      "Instructions for updating:\n",
      "Please write your own downloading logic.\n",
      "WARNING:tensorflow:From /usr/local/lib/python3.7/site-packages/tensorflow/contrib/learn/python/learn/datasets/mnist.py:262: extract_images (from tensorflow.contrib.learn.python.learn.datasets.mnist) is deprecated and will be removed in a future version.\n",
      "Instructions for updating:\n",
      "Please use tf.data to implement this functionality.\n",
      "Extracting ./data/MNIST_data/train-images-idx3-ubyte.gz\n",
      "WARNING:tensorflow:From /usr/local/lib/python3.7/site-packages/tensorflow/contrib/learn/python/learn/datasets/mnist.py:267: extract_labels (from tensorflow.contrib.learn.python.learn.datasets.mnist) is deprecated and will be removed in a future version.\n",
      "Instructions for updating:\n",
      "Please use tf.data to implement this functionality.\n",
      "Extracting ./data/MNIST_data/train-labels-idx1-ubyte.gz\n",
      "WARNING:tensorflow:From /usr/local/lib/python3.7/site-packages/tensorflow/contrib/learn/python/learn/datasets/mnist.py:110: dense_to_one_hot (from tensorflow.contrib.learn.python.learn.datasets.mnist) is deprecated and will be removed in a future version.\n",
      "Instructions for updating:\n",
      "Please use tf.one_hot on tensors.\n",
      "Extracting ./data/MNIST_data/t10k-images-idx3-ubyte.gz\n",
      "Extracting ./data/MNIST_data/t10k-labels-idx1-ubyte.gz\n",
      "WARNING:tensorflow:From /usr/local/lib/python3.7/site-packages/tensorflow/contrib/learn/python/learn/datasets/mnist.py:290: DataSet.__init__ (from tensorflow.contrib.learn.python.learn.datasets.mnist) is deprecated and will be removed in a future version.\n",
      "Instructions for updating:\n",
      "Please use alternatives such as official/mnist/dataset.py from tensorflow/models.\n"
     ]
    }
   ],
   "source": [
    "mnist = input_data.read_data_sets('./data/MNIST_data', one_hot=True)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2019-08-08T07:28:52.850101Z",
     "start_time": "2019-08-08T07:28:52.190346Z"
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "WARNING:tensorflow:From /usr/local/lib/python3.7/site-packages/tensorflow/python/framework/op_def_library.py:263: colocate_with (from tensorflow.python.framework.ops) is deprecated and will be removed in a future version.\n",
      "Instructions for updating:\n",
      "Colocations handled automatically by placer.\n"
     ]
    }
   ],
   "source": [
    "def weight_var(shape, name):\n",
    "    return tf.get_variable(name=name, shape=shape, initializer=tf.contrib.layers.xavier_initializer())\n",
    "\n",
    "\n",
    "def bias_var(shape, name):\n",
    "    return tf.get_variable(name=name, shape=shape, initializer=tf.constant_initializer(0))\n",
    "\n",
    "\n",
    "# discriminater net\n",
    "\n",
    "X = tf.placeholder(tf.float32, shape=[None, 784], name='X')\n",
    "\n",
    "D_W1 = weight_var([784, 128], 'D_W1')\n",
    "D_b1 = bias_var([128], 'D_b1')\n",
    "\n",
    "D_W2 = weight_var([128, 1], 'D_W2')\n",
    "D_b2 = bias_var([1], 'D_b2')\n",
    "\n",
    "\n",
    "theta_D = [D_W1, D_W2, D_b1, D_b2]\n",
    "\n",
    "\n",
    "# generator net\n",
    "\n",
    "Z = tf.placeholder(tf.float32, shape=[None, 100], name='Z')\n",
    "\n",
    "G_W1 = weight_var([100, 128], 'G_W1')\n",
    "G_b1 = bias_var([128], 'G_B1')\n",
    "\n",
    "G_W2 = weight_var([128, 784], 'G_W2')\n",
    "G_b2 = bias_var([784], 'G_B2')\n",
    "\n",
    "theta_G = [G_W1, G_W2, G_b1, G_b2]\n",
    "def generator(z):\n",
    "    G_h1 = tf.nn.relu(tf.matmul(z, G_W1) + G_b1)\n",
    "    G_log_prob = tf.matmul(G_h1, G_W2) + G_b2\n",
    "    G_prob = tf.nn.sigmoid(G_log_prob)\n",
    "\n",
    "    return G_prob\n",
    "\n",
    "\n",
    "def discriminator(x):\n",
    "    D_h1 = tf.nn.relu(tf.matmul(x, D_W1) + D_b1)\n",
    "    D_logit = tf.matmul(D_h1, D_W2) + D_b2\n",
    "    D_prob = tf.nn.sigmoid(D_logit)\n",
    "    return D_prob, D_logit\n",
    "\n",
    "G_sample = generator(Z)\n",
    "D_real, D_logit_real = discriminator(X)\n",
    "D_fake, D_logit_fake = discriminator(G_sample)\n",
    "D_loss = -tf.reduce_mean(tf.log(D_real) + tf.log(1. - D_fake))\n",
    "G_loss = -tf.reduce_mean(tf.log(D_fake))\n",
    "\n",
    "D_loss_real = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(\n",
    "    logits=D_logit_real, labels=tf.ones_like(D_logit_real)))\n",
    "D_loss_fake = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(\n",
    "    logits=D_logit_fake, labels=tf.zeros_like(D_logit_fake)))\n",
    "D_loss = D_loss_real + D_loss_fake\n",
    "G_loss = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(\n",
    "    logits=D_logit_fake, labels=tf.ones_like(D_logit_fake)))\n",
    "D_optimizer = tf.train.AdamOptimizer().minimize(D_loss, var_list=theta_D)\n",
    "G_optimizer = tf.train.AdamOptimizer().minimize(G_loss, var_list=theta_G)\n",
    "def sample_Z(m, n):\n",
    "    '''Uniform prior for G(Z)'''\n",
    "    return np.random.uniform(-1., 1., size=[m, n])\n",
    "\n",
    "sess.run(tf.global_variables_initializer())"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2019-08-08T07:30:39.628853Z",
     "start_time": "2019-08-08T07:28:52.853135Z"
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Iter: 0 D loss: 1.501 G_loss: 2.398\n",
      "Iter: 1000 D loss: 0.02053 G_loss: 6.749\n",
      "Iter: 2000 D loss: 0.07285 G_loss: 5.144\n",
      "Iter: 3000 D loss: 0.1927 G_loss: 5.546\n",
      "Iter: 4000 D loss: 0.1536 G_loss: 4.8\n",
      "Iter: 5000 D loss: 0.2518 G_loss: 4.739\n",
      "Iter: 6000 D loss: 0.2418 G_loss: 3.371\n",
      "Iter: 7000 D loss: 0.4397 G_loss: 3.515\n",
      "Iter: 8000 D loss: 0.3669 G_loss: 2.932\n",
      "Iter: 9000 D loss: 0.4245 G_loss: 3.447\n"
     ]
    }
   ],
   "source": [
    "for it in range(10000):\n",
    "    X_mb, _ = mnist.train.next_batch(mb_size)\n",
    "\n",
    "    _, D_loss_curr = sess.run([D_optimizer, D_loss], feed_dict={\n",
    "                              X: X_mb, Z: sample_Z(mb_size, Z_dim)})\n",
    "    _, G_loss_curr = sess.run([G_optimizer, G_loss], feed_dict={\n",
    "                              Z: sample_Z(mb_size, Z_dim)})\n",
    "\n",
    "    if it % 1000 == 0:\n",
    "        print('Iter: {}'.format(it),\n",
    "              'D loss: {:.4}'.format(D_loss_curr), \n",
    "              'G_loss: {:.4}'.format(G_loss_curr))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2019-08-08T16:15:55.268637Z",
     "start_time": "2019-08-08T16:15:55.122431Z"
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "WARNING:tensorflow:From <ipython-input-8-c0d77d66cdab>:15: calling reduce_sum_v1 (from tensorflow.python.ops.math_ops) with keep_dims is deprecated and will be removed in a future version.\n",
      "Instructions for updating:\n",
      "keep_dims is deprecated, use keepdims instead\n",
      "[[5 4 3 0 0]\n",
      " [2 3 0 0 0]\n",
      " [2 3 5 4 2]]\n"
     ]
    }
   ],
   "source": [
    "def get_shape(tensor):\n",
    "    static_shape = tensor.shape.as_list()\n",
    "    dynamic_shape = tf.unstack(tf.shape(tensor))\n",
    "    dims = [s[1] if s[0] is None else s[0]\n",
    "          for s in zip(static_shape, dynamic_shape)]\n",
    "    return dims\n",
    "\n",
    "choose = tf.placeholder(tf.int64,[None,5])\n",
    "minValue = tf.placeholder(tf.int64,[None,1])\n",
    "ex=6\n",
    "feed_dict = {\n",
    "    choose:[[5,4,3,0,1],[2,3,0,4,2],[2,3,5,4,2]],\n",
    "    minValue:[[3],[2],[2]]}\n",
    "\n",
    "x = tf.tile(tf.reshape(tf.arg_max(tf.cast(choose<minValue,tf.int64),1),(-1,1)),[1,5])\n",
    "y = tf.tile(tf.reduce_sum(tf.cast(choose<minValue,tf.int64),axis=1,keep_dims=True),[1,5])\n",
    "index = tf.tile(tf.expand_dims(tf.range(5,dtype=tf.int64),0),[get_shape(choose)[0],1])\n",
    "result = tf.where(index<x,choose,tf.zeros_like(choose))\n",
    "result = tf.where(index<x,choose,tf.zeros_like(choose)) + tf.where(tf.equal(y,0),choose,tf.zeros_like(choose))\n",
    "sess =tf.Session()\n",
    "def see(x):\n",
    "    print(sess.run(x, feed_dict))\n",
    "\n",
    "see(result)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 28,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2019-08-09T00:30:29.541458Z",
     "start_time": "2019-08-09T00:30:29.445382Z"
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "(?, 6, 5)\n",
      "[[[5 4 3 0 1]\n",
      "  [2 3 0 4 2]\n",
      "  [2 3 5 4 2]\n",
      "  [0 0 0 0 0]\n",
      "  [0 0 0 0 0]\n",
      "  [0 0 0 0 0]]]\n"
     ]
    }
   ],
   "source": [
    "a = tf.placeholder(tf.int32,[None, None, 5])\n",
    "\n",
    "\n",
    "batch = tf.shape(a)[0]\n",
    "size = tf.shape(a)[1]\n",
    "\n",
    "b = tf.zeros([batch, 6, 5], dtype=tf.int32)\n",
    "\n",
    "d = tf.slice(a, [0, 0, 0], [batch, size, 5])\n",
    "e = tf.slice(b, [0, 0, 0], [batch, 6-size, 5])\n",
    "f = tf.concat([d,e],1)\n",
    "g = tf.reshape(f, [-1, 6, 5])\n",
    "print(g.shape)\n",
    "print(sess.run(g, {a:[[[5,4,3,0,1],[2,3,0,4,2],[2,3,5,4,2]]]}))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2019-08-08T23:28:53.748474Z",
     "start_time": "2019-08-08T23:28:53.378116Z"
    },
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Tensor(\"concat:0\", shape=(2, 5), dtype=float32)\n",
      "WARNING:tensorflow:From /usr/local/lib/python3.7/site-packages/tensorflow/python/framework/op_def_library.py:263: colocate_with (from tensorflow.python.framework.ops) is deprecated and will be removed in a future version.\n",
      "Instructions for updating:\n",
      "Colocations handled automatically by placer.\n",
      "(array([[-0.8113182 ,  1.4845988 ,  0.06532937],\n",
      "       [-2.4427042 ,  0.0992484 ,  0.5912243 ]], dtype=float32), array([[ 0.7,  0.9,  0. ,  0. ,  0. ],\n",
      "       [80. , 90. ,  0. ,  0. ,  0. ]], dtype=float32))\n",
      "<tf.Variable 'Variable_1:0' shape=(2, 5) dtype=float32_ref>\n",
      "[array([], shape=(2, 0), dtype=float32), array([[1.],\n",
      "       [1.]], dtype=float32), array([[ 0.9,  0. ,  0. ,  0. ],\n",
      "       [90. ,  0. ,  0. ,  0. ]], dtype=float32), array([[ 1. ,  0.9,  0. ,  0. ,  0. ],\n",
      "       [ 1. , 90. ,  0. ,  0. ,  0. ]], dtype=float32)]\n",
      "[array([[ 0.7],\n",
      "       [80. ]], dtype=float32), array([[1.],\n",
      "       [1.]], dtype=float32), array([[0., 0., 0.],\n",
      "       [0., 0., 0.]], dtype=float32), array([[ 0.7,  1. ,  0. ,  0. ,  0. ],\n",
      "       [80. ,  1. ,  0. ,  0. ,  0. ]], dtype=float32)]\n",
      "[array([[ 0.7,  0.9],\n",
      "       [80. , 90. ]], dtype=float32), array([[1.],\n",
      "       [1.]], dtype=float32), array([[0., 0.],\n",
      "       [0., 0.]], dtype=float32), array([[ 0.7,  0.9,  1. ,  0. ,  0. ],\n",
      "       [80. , 90. ,  1. ,  0. ,  0. ]], dtype=float32)]\n",
      "[array([[ 0.7,  0.9,  0. ],\n",
      "       [80. , 90. ,  0. ]], dtype=float32), array([[1.],\n",
      "       [1.]], dtype=float32), array([[0.],\n",
      "       [0.]], dtype=float32), array([[ 0.7,  0.9,  0. ,  1. ,  0. ],\n",
      "       [80. , 90. ,  0. ,  1. ,  0. ]], dtype=float32)]\n",
      "[array([[ 0.7,  0.9,  0. ,  0. ],\n",
      "       [80. , 90. ,  0. ,  0. ]], dtype=float32), array([[1.],\n",
      "       [1.]], dtype=float32), array([], shape=(2, 0), dtype=float32), array([[ 0.7,  0.9,  0. ,  0. ,  1. ],\n",
      "       [80. , 90. ,  0. ,  0. ,  1. ]], dtype=float32)]\n",
      "[array([], shape=(2, 0), dtype=float32), array([[1.],\n",
      "       [1.]], dtype=float32), array([[ 0.9,  0. ,  0. ,  0. ],\n",
      "       [90. ,  0. ,  0. ,  0. ]], dtype=float32), array([[ 1. ,  0.9,  0. ,  0. ,  0. ],\n",
      "       [ 1. , 90. ,  0. ,  0. ,  0. ]], dtype=float32)]\n",
      "[array([[1.],\n",
      "       [1.]], dtype=float32), array([[1.],\n",
      "       [1.]], dtype=float32), array([[0., 0., 0.],\n",
      "       [0., 0., 0.]], dtype=float32), array([[1., 1., 0., 0., 0.],\n",
      "       [1., 1., 0., 0., 0.]], dtype=float32)]\n",
      "[array([[1., 1.],\n",
      "       [1., 1.]], dtype=float32), array([[1.],\n",
      "       [1.]], dtype=float32), array([[0., 0.],\n",
      "       [0., 0.]], dtype=float32), array([[1., 1., 1., 0., 0.],\n",
      "       [1., 1., 1., 0., 0.]], dtype=float32)]\n",
      "[array([[1., 1., 1.],\n",
      "       [1., 1., 1.]], dtype=float32), array([[1.],\n",
      "       [1.]], dtype=float32), array([[0.],\n",
      "       [0.]], dtype=float32), array([[1., 1., 1., 1., 0.],\n",
      "       [1., 1., 1., 1., 0.]], dtype=float32)]\n",
      "[array([[1., 1., 1., 1.],\n",
      "       [1., 1., 1., 1.]], dtype=float32), array([[1.],\n",
      "       [1.]], dtype=float32), array([], shape=(2, 0), dtype=float32), array([[1., 1., 1., 1., 1.],\n",
      "       [1., 1., 1., 1., 1.]], dtype=float32)]\n"
     ]
    }
   ],
   "source": [
    "x = tf.placeholder(tf.float32, shape=(2, 2), name=\"input\")\n",
    "xx=tf.zeros([2,3],tf.float32)\n",
    "xx2=tf.concat([x,xx],axis=1)\n",
    "columnTensor=tf.ones([2,1],tf.float32)\n",
    "print (xx2)\n",
    "w1= tf.Variable(tf.random_normal([2, 3], stddev=1, seed=1),trainable=False)\n",
    "w2= tf.Variable(xx2,trainable=False)\n",
    "#编写程序修改Tensor矩阵的某一列\n",
    "embed_size=300\n",
    "max_sentence_length=50\n",
    "max_node_size=max_sentence_length*2-1#训练语料库句子节点的最大长度。注意，是句子节点的最大长度。不是句子单词的最大数目。\n",
    "def modify_one_column(tensor,columnTensor,index):#index也是tensor\n",
    "#tensor为二维矩阵\n",
    "#columnTensor的维度就是tensor中的一列\n",
    "    numlines=tensor.shape[0].value #行数\n",
    "    numcolunms=tensor.shape[1].value #列数\n",
    "    new_tensor_left=tf.slice(tensor, [0, 0], [numlines, index])\n",
    "    new_tensor_right=tf.slice(tensor, [0, index+1], [numlines, numcolunms-(index+1)])\n",
    "    new_tensor=tf.concat([new_tensor_left,columnTensor,new_tensor_right],1)\n",
    "    return new_tensor_left,new_tensor_right,new_tensor\n",
    "\n",
    "sess = tf.Session()\n",
    "init_op = tf.global_variables_initializer()  \n",
    "#print(sess.run(x, feed_dict={x: [[0.7,0.9]]}))\n",
    "sess.run(init_op,feed_dict={x: [[0.7,0.9],[80.0,90.0]]})\n",
    "print (sess.run((w1,w2)))\n",
    "print (w2)\n",
    "for index in range(5):\n",
    "    index_tensor=tf.constant(index,tf.int32)\n",
    "    new_tensor_left,new_tensor_right,w22=modify_one_column(w2,columnTensor,index_tensor)\n",
    "    print (sess.run([new_tensor_left,columnTensor,new_tensor_right,w22]))\n",
    "for index in range(5):\n",
    "    index_tensor=tf.constant(index,tf.int32)\n",
    "    new_tensor_left,new_tensor_right,w2=modify_one_column(w2,columnTensor,index_tensor)\n",
    "    print (sess.run([new_tensor_left,columnTensor,new_tensor_right,w2]))\n",
    "#print (sess.run(w2,feed_dict={x: [[0.7,0.9]]}))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2019-08-08T16:26:39.843324Z",
     "start_time": "2019-08-08T16:26:39.809008Z"
    }
   },
   "outputs": [
    {
     "ename": "ValueError",
     "evalue": "could not broadcast input array from shape (1,3,5) into shape (1,6,5)",
     "output_type": "error",
     "traceback": [
      "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
      "\u001b[0;31mValueError\u001b[0m                                Traceback (most recent call last)",
      "\u001b[0;32m<ipython-input-17-f589b0147a8a>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[1;32m      2\u001b[0m \u001b[0ma\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mnp\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0marray\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m5\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m4\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m3\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m2\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m3\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m4\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m2\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m2\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m3\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m5\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m4\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m2\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m      3\u001b[0m \u001b[0mb\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mnp\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mzeros\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;36m6\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;36m5\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 4\u001b[0;31m \u001b[0mb\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0ma\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m      5\u001b[0m \u001b[0mb\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;31mValueError\u001b[0m: could not broadcast input array from shape (1,3,5) into shape (1,6,5)"
     ]
    }
   ],
   "source": [
    "import numpy as np\n",
    "a = np.array([[[5,4,3,0,1],[2,3,0,4,2],[2,3,5,4,2]]])\n",
    "b = np.zeros([1, 6, 5])\n",
    "b[:,:,:]=a[:,:,:]\n",
    "b"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2019-08-08T07:30:39.638169Z",
     "start_time": "2019-08-08T07:30:39.631192Z"
    }
   },
   "outputs": [],
   "source": [
    "def plot(samples):\n",
    "    fig = plt.figure(figsize=(4, 4))\n",
    "    gs = gridspec.GridSpec(4, 4)\n",
    "    gs.update(wspace=0.05, hspace=0.05)\n",
    "\n",
    "    for i, sample in enumerate(samples):  # [i,samples[i]] imax=16\n",
    "        ax = plt.subplot(gs[i])\n",
    "        plt.axis('off')\n",
    "        ax.set_xticklabels([])\n",
    "        ax.set_aspect('equal')\n",
    "        plt.imshow(sample.reshape(28, 28), cmap='Greys_r')\n",
    "\n",
    "    return fig"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2019-08-08T07:30:39.681779Z",
     "start_time": "2019-08-08T07:30:39.644289Z"
    }
   },
   "outputs": [],
   "source": [
    "if not os.path.exists('out/'):\n",
    "    os.makedirs('out/')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2019-08-08T09:26:24.571056Z",
     "start_time": "2019-08-08T07:30:39.689127Z"
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Iter: 0 D loss: 1.544 G_loss: 2.342\n",
      "Iter: 1000 D loss: 0.02131 G_loss: 5.716\n",
      "Iter: 2000 D loss: 0.04296 G_loss: 5.928\n",
      "Iter: 3000 D loss: 0.1877 G_loss: 4.682\n",
      "Iter: 4000 D loss: 0.1888 G_loss: 4.941\n",
      "Iter: 5000 D loss: 0.31 G_loss: 5.372\n",
      "Iter: 6000 D loss: 0.2874 G_loss: 4.947\n",
      "Iter: 7000 D loss: 0.4094 G_loss: 4.192\n",
      "Iter: 8000 D loss: 0.2166 G_loss: 4.171\n",
      "Iter: 9000 D loss: 0.4293 G_loss: 4.104\n",
      "Iter: 10000 D loss: 0.4423 G_loss: 3.256\n",
      "Iter: 11000 D loss: 0.495 G_loss: 2.984\n",
      "Iter: 12000 D loss: 0.4791 G_loss: 4.471\n",
      "Iter: 13000 D loss: 0.8266 G_loss: 3.148\n",
      "Iter: 14000 D loss: 0.7278 G_loss: 3.156\n",
      "Iter: 15000 D loss: 0.6981 G_loss: 1.802\n",
      "Iter: 16000 D loss: 0.8532 G_loss: 2.315\n",
      "Iter: 17000 D loss: 0.726 G_loss: 1.827\n",
      "Iter: 18000 D loss: 0.8994 G_loss: 1.944\n",
      "Iter: 19000 D loss: 0.7799 G_loss: 1.877\n",
      "Iter: 20000 D loss: 0.713 G_loss: 2.182\n",
      "Iter: 21000 D loss: 0.7728 G_loss: 2.119\n",
      "Iter: 22000 D loss: 0.9354 G_loss: 1.81\n",
      "Iter: 23000 D loss: 0.8594 G_loss: 1.974\n",
      "Iter: 24000 D loss: 0.9692 G_loss: 2.055\n",
      "Iter: 25000 D loss: 0.6639 G_loss: 2.166\n",
      "Iter: 26000 D loss: 0.7305 G_loss: 1.98\n",
      "Iter: 27000 D loss: 0.879 G_loss: 1.891\n",
      "Iter: 28000 D loss: 1.072 G_loss: 2.212\n",
      "Iter: 29000 D loss: 0.863 G_loss: 1.962\n",
      "Iter: 30000 D loss: 1.032 G_loss: 1.824\n",
      "Iter: 31000 D loss: 0.7438 G_loss: 2.081\n",
      "Iter: 32000 D loss: 0.8968 G_loss: 1.782\n",
      "Iter: 33000 D loss: 0.9869 G_loss: 1.965\n",
      "Iter: 34000 D loss: 0.7359 G_loss: 2.125\n",
      "Iter: 35000 D loss: 0.6755 G_loss: 2.335\n",
      "Iter: 36000 D loss: 0.7148 G_loss: 1.799\n",
      "Iter: 37000 D loss: 1.003 G_loss: 2.025\n",
      "Iter: 38000 D loss: 0.7293 G_loss: 2.01\n",
      "Iter: 39000 D loss: 0.7603 G_loss: 2.262\n",
      "Iter: 40000 D loss: 0.8418 G_loss: 1.989\n",
      "Iter: 41000 D loss: 0.8385 G_loss: 1.978\n",
      "Iter: 42000 D loss: 1.025 G_loss: 2.072\n",
      "Iter: 43000 D loss: 0.6358 G_loss: 2.199\n",
      "Iter: 44000 D loss: 0.6881 G_loss: 2.305\n",
      "Iter: 45000 D loss: 0.821 G_loss: 2.025\n",
      "Iter: 46000 D loss: 0.8257 G_loss: 2.141\n",
      "Iter: 47000 D loss: 0.7728 G_loss: 2.101\n",
      "Iter: 48000 D loss: 0.7759 G_loss: 1.95\n",
      "Iter: 49000 D loss: 0.7331 G_loss: 2.119\n",
      "Iter: 50000 D loss: 0.7401 G_loss: 2.075\n",
      "Iter: 51000 D loss: 0.8186 G_loss: 2.245\n",
      "Iter: 52000 D loss: 0.6612 G_loss: 1.904\n",
      "Iter: 53000 D loss: 0.8658 G_loss: 1.819\n",
      "Iter: 54000 D loss: 0.8385 G_loss: 1.801\n",
      "Iter: 55000 D loss: 0.7422 G_loss: 2.165\n",
      "Iter: 56000 D loss: 0.6863 G_loss: 1.908\n",
      "Iter: 57000 D loss: 0.8586 G_loss: 1.659\n",
      "Iter: 58000 D loss: 0.7537 G_loss: 1.976\n",
      "Iter: 59000 D loss: 0.8374 G_loss: 2.131\n",
      "Iter: 60000 D loss: 0.7122 G_loss: 2.151\n",
      "Iter: 61000 D loss: 0.8032 G_loss: 1.983\n",
      "Iter: 62000 D loss: 0.6886 G_loss: 2.169\n",
      "Iter: 63000 D loss: 0.7122 G_loss: 2.224\n",
      "Iter: 64000 D loss: 0.7872 G_loss: 1.865\n",
      "Iter: 65000 D loss: 0.7498 G_loss: 2.198\n",
      "Iter: 66000 D loss: 0.6331 G_loss: 2.059\n",
      "Iter: 67000 D loss: 0.7091 G_loss: 2.222\n",
      "Iter: 68000 D loss: 0.7004 G_loss: 2.543\n",
      "Iter: 69000 D loss: 0.5373 G_loss: 1.667\n",
      "Iter: 70000 D loss: 0.6126 G_loss: 2.054\n",
      "Iter: 71000 D loss: 0.798 G_loss: 2.342\n",
      "Iter: 72000 D loss: 0.5602 G_loss: 2.275\n",
      "Iter: 73000 D loss: 0.5723 G_loss: 1.876\n",
      "Iter: 74000 D loss: 0.6179 G_loss: 2.098\n",
      "Iter: 75000 D loss: 0.5568 G_loss: 2.35\n",
      "Iter: 76000 D loss: 0.7194 G_loss: 1.896\n",
      "Iter: 77000 D loss: 0.8059 G_loss: 2.053\n",
      "Iter: 78000 D loss: 0.6278 G_loss: 2.1\n",
      "Iter: 79000 D loss: 0.6576 G_loss: 1.903\n",
      "Iter: 80000 D loss: 0.6774 G_loss: 2.101\n",
      "Iter: 81000 D loss: 0.5631 G_loss: 1.998\n",
      "Iter: 82000 D loss: 0.6186 G_loss: 1.983\n",
      "Iter: 83000 D loss: 0.678 G_loss: 2.251\n",
      "Iter: 84000 D loss: 0.7291 G_loss: 2.072\n",
      "Iter: 85000 D loss: 0.5071 G_loss: 2.365\n",
      "Iter: 86000 D loss: 0.6213 G_loss: 1.776\n",
      "Iter: 87000 D loss: 0.4952 G_loss: 2.173\n",
      "Iter: 88000 D loss: 0.6121 G_loss: 2.339\n",
      "Iter: 89000 D loss: 0.5809 G_loss: 2.287\n",
      "Iter: 90000 D loss: 0.7233 G_loss: 2.178\n",
      "Iter: 91000 D loss: 0.6082 G_loss: 2.009\n",
      "Iter: 92000 D loss: 0.6611 G_loss: 2.225\n",
      "Iter: 93000 D loss: 0.6127 G_loss: 2.025\n",
      "Iter: 94000 D loss: 0.7054 G_loss: 2.122\n",
      "Iter: 95000 D loss: 0.4732 G_loss: 2.117\n",
      "Iter: 96000 D loss: 0.6713 G_loss: 2.452\n",
      "Iter: 97000 D loss: 0.504 G_loss: 2.198\n",
      "Iter: 98000 D loss: 0.6812 G_loss: 2.126\n",
      "Iter: 99000 D loss: 0.5724 G_loss: 2.351\n",
      "Iter: 100000 D loss: 0.5772 G_loss: 1.954\n",
      "Iter: 101000 D loss: 0.6564 G_loss: 2.414\n",
      "Iter: 102000 D loss: 0.6183 G_loss: 2.195\n",
      "Iter: 103000 D loss: 0.6516 G_loss: 1.977\n",
      "Iter: 104000 D loss: 0.5772 G_loss: 2.04\n",
      "Iter: 105000 D loss: 0.6606 G_loss: 2.354\n",
      "Iter: 106000 D loss: 0.5806 G_loss: 2.276\n",
      "Iter: 107000 D loss: 0.6999 G_loss: 1.911\n",
      "Iter: 108000 D loss: 0.5647 G_loss: 2.316\n",
      "Iter: 109000 D loss: 0.4875 G_loss: 2.337\n",
      "Iter: 110000 D loss: 0.6329 G_loss: 2.118\n",
      "Iter: 111000 D loss: 0.6095 G_loss: 2.388\n",
      "Iter: 112000 D loss: 0.5724 G_loss: 2.32\n",
      "Iter: 113000 D loss: 0.5568 G_loss: 2.16\n",
      "Iter: 114000 D loss: 0.6402 G_loss: 2.098\n",
      "Iter: 115000 D loss: 0.643 G_loss: 2.365\n",
      "Iter: 116000 D loss: 0.47 G_loss: 2.248\n",
      "Iter: 117000 D loss: 0.6509 G_loss: 2.229\n",
      "Iter: 118000 D loss: 0.5308 G_loss: 2.378\n",
      "Iter: 119000 D loss: 0.5384 G_loss: 1.927\n",
      "Iter: 120000 D loss: 0.5127 G_loss: 2.314\n",
      "Iter: 121000 D loss: 0.5999 G_loss: 2.246\n",
      "Iter: 122000 D loss: 0.5455 G_loss: 2.379\n",
      "Iter: 123000 D loss: 0.6088 G_loss: 2.419\n",
      "Iter: 124000 D loss: 0.5564 G_loss: 2.515\n",
      "Iter: 125000 D loss: 0.7592 G_loss: 2.306\n",
      "Iter: 126000 D loss: 0.6268 G_loss: 2.303\n",
      "Iter: 127000 D loss: 0.6308 G_loss: 2.573\n",
      "Iter: 128000 D loss: 0.528 G_loss: 2.3\n",
      "Iter: 129000 D loss: 0.6127 G_loss: 2.556\n",
      "Iter: 130000 D loss: 0.448 G_loss: 2.256\n",
      "Iter: 131000 D loss: 0.5726 G_loss: 2.385\n",
      "Iter: 132000 D loss: 0.514 G_loss: 2.356\n",
      "Iter: 133000 D loss: 0.6269 G_loss: 2.266\n",
      "Iter: 134000 D loss: 0.5572 G_loss: 2.704\n",
      "Iter: 135000 D loss: 0.6338 G_loss: 2.416\n",
      "Iter: 136000 D loss: 0.5207 G_loss: 2.359\n",
      "Iter: 137000 D loss: 0.452 G_loss: 2.222\n",
      "Iter: 138000 D loss: 0.5775 G_loss: 2.183\n",
      "Iter: 139000 D loss: 0.569 G_loss: 2.311\n",
      "Iter: 140000 D loss: 0.6732 G_loss: 2.593\n",
      "Iter: 141000 D loss: 0.4624 G_loss: 2.423\n",
      "Iter: 142000 D loss: 0.5659 G_loss: 2.496\n",
      "Iter: 143000 D loss: 0.536 G_loss: 2.562\n",
      "Iter: 144000 D loss: 0.5208 G_loss: 2.509\n",
      "Iter: 145000 D loss: 0.5322 G_loss: 2.222\n",
      "Iter: 146000 D loss: 0.6085 G_loss: 2.339\n",
      "Iter: 147000 D loss: 0.4957 G_loss: 2.56\n",
      "Iter: 148000 D loss: 0.5176 G_loss: 2.338\n",
      "Iter: 149000 D loss: 0.4099 G_loss: 2.857\n",
      "Iter: 150000 D loss: 0.5383 G_loss: 2.28\n",
      "Iter: 151000 D loss: 0.5748 G_loss: 2.408\n",
      "Iter: 152000 D loss: 0.5137 G_loss: 2.463\n",
      "Iter: 153000 D loss: 0.4844 G_loss: 2.314\n",
      "Iter: 154000 D loss: 0.615 G_loss: 2.462\n",
      "Iter: 155000 D loss: 0.5696 G_loss: 2.657\n",
      "Iter: 156000 D loss: 0.6722 G_loss: 2.244\n",
      "Iter: 157000 D loss: 0.5455 G_loss: 2.35\n",
      "Iter: 158000 D loss: 0.4547 G_loss: 2.461\n",
      "Iter: 159000 D loss: 0.49 G_loss: 2.475\n",
      "Iter: 160000 D loss: 0.5544 G_loss: 2.422\n",
      "Iter: 161000 D loss: 0.5966 G_loss: 2.296\n",
      "Iter: 162000 D loss: 0.5913 G_loss: 2.126\n",
      "Iter: 163000 D loss: 0.4783 G_loss: 2.267\n",
      "Iter: 164000 D loss: 0.5445 G_loss: 2.321\n",
      "Iter: 165000 D loss: 0.5651 G_loss: 1.914\n",
      "Iter: 166000 D loss: 0.5543 G_loss: 2.233\n",
      "Iter: 167000 D loss: 0.4719 G_loss: 2.366\n",
      "Iter: 168000 D loss: 0.5682 G_loss: 2.423\n",
      "Iter: 169000 D loss: 0.6161 G_loss: 2.579\n",
      "Iter: 170000 D loss: 0.4376 G_loss: 2.554\n",
      "Iter: 171000 D loss: 0.5315 G_loss: 2.699\n",
      "Iter: 172000 D loss: 0.4931 G_loss: 2.63\n",
      "Iter: 173000 D loss: 0.4865 G_loss: 2.614\n",
      "Iter: 174000 D loss: 0.5092 G_loss: 2.531\n",
      "Iter: 175000 D loss: 0.4979 G_loss: 2.521\n",
      "Iter: 176000 D loss: 0.6738 G_loss: 2.679\n",
      "Iter: 177000 D loss: 0.5674 G_loss: 2.532\n",
      "Iter: 178000 D loss: 0.5493 G_loss: 2.315\n",
      "Iter: 179000 D loss: 0.5525 G_loss: 2.6\n",
      "Iter: 180000 D loss: 0.6501 G_loss: 2.539\n",
      "Iter: 181000 D loss: 0.4999 G_loss: 2.332\n",
      "Iter: 182000 D loss: 0.5159 G_loss: 2.392\n",
      "Iter: 183000 D loss: 0.4909 G_loss: 2.404\n",
      "Iter: 184000 D loss: 0.4687 G_loss: 2.877\n",
      "Iter: 185000 D loss: 0.5294 G_loss: 2.77\n",
      "Iter: 186000 D loss: 0.5107 G_loss: 2.532\n",
      "Iter: 187000 D loss: 0.5236 G_loss: 2.596\n",
      "Iter: 188000 D loss: 0.6538 G_loss: 2.484\n",
      "Iter: 189000 D loss: 0.5173 G_loss: 2.173\n",
      "Iter: 190000 D loss: 0.5091 G_loss: 2.593\n",
      "Iter: 191000 D loss: 0.6051 G_loss: 2.294\n",
      "Iter: 192000 D loss: 0.6698 G_loss: 2.54\n",
      "Iter: 193000 D loss: 0.4413 G_loss: 2.374\n",
      "Iter: 194000 D loss: 0.5118 G_loss: 2.555\n",
      "Iter: 195000 D loss: 0.6215 G_loss: 2.361\n",
      "Iter: 196000 D loss: 0.5334 G_loss: 2.513\n",
      "Iter: 197000 D loss: 0.7206 G_loss: 2.536\n",
      "Iter: 198000 D loss: 0.5581 G_loss: 2.92\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Iter: 199000 D loss: 0.6608 G_loss: 2.704\n",
      "Iter: 200000 D loss: 0.592 G_loss: 2.709\n",
      "Iter: 201000 D loss: 0.5032 G_loss: 2.96\n",
      "Iter: 202000 D loss: 0.5221 G_loss: 2.42\n",
      "Iter: 203000 D loss: 0.4931 G_loss: 2.512\n",
      "Iter: 204000 D loss: 0.4904 G_loss: 2.255\n",
      "Iter: 205000 D loss: 0.5094 G_loss: 2.958\n",
      "Iter: 206000 D loss: 0.5405 G_loss: 2.373\n",
      "Iter: 207000 D loss: 0.5446 G_loss: 2.452\n",
      "Iter: 208000 D loss: 0.58 G_loss: 2.404\n",
      "Iter: 209000 D loss: 0.458 G_loss: 2.535\n",
      "Iter: 210000 D loss: 0.5119 G_loss: 2.618\n",
      "Iter: 211000 D loss: 0.4609 G_loss: 2.385\n",
      "Iter: 212000 D loss: 0.6054 G_loss: 2.331\n",
      "Iter: 213000 D loss: 0.4997 G_loss: 2.756\n",
      "Iter: 214000 D loss: 0.6896 G_loss: 2.615\n",
      "Iter: 215000 D loss: 0.5326 G_loss: 2.124\n",
      "Iter: 216000 D loss: 0.6012 G_loss: 2.353\n",
      "Iter: 217000 D loss: 0.532 G_loss: 2.781\n",
      "Iter: 218000 D loss: 0.6329 G_loss: 2.646\n",
      "Iter: 219000 D loss: 0.5466 G_loss: 2.641\n",
      "Iter: 220000 D loss: 0.4219 G_loss: 2.57\n",
      "Iter: 221000 D loss: 0.5124 G_loss: 2.584\n",
      "Iter: 222000 D loss: 0.5833 G_loss: 2.384\n",
      "Iter: 223000 D loss: 0.5177 G_loss: 2.654\n",
      "Iter: 224000 D loss: 0.5443 G_loss: 2.826\n",
      "Iter: 225000 D loss: 0.4671 G_loss: 2.715\n",
      "Iter: 226000 D loss: 0.59 G_loss: 2.743\n",
      "Iter: 227000 D loss: 0.583 G_loss: 2.734\n",
      "Iter: 228000 D loss: 0.535 G_loss: 2.397\n",
      "Iter: 229000 D loss: 0.5938 G_loss: 2.331\n",
      "Iter: 230000 D loss: 0.5406 G_loss: 2.515\n",
      "Iter: 231000 D loss: 0.4685 G_loss: 2.594\n",
      "Iter: 232000 D loss: 0.4642 G_loss: 2.775\n",
      "Iter: 233000 D loss: 0.4571 G_loss: 2.843\n",
      "Iter: 234000 D loss: 0.5629 G_loss: 2.611\n",
      "Iter: 235000 D loss: 0.5112 G_loss: 2.43\n",
      "Iter: 236000 D loss: 0.6542 G_loss: 2.669\n",
      "Iter: 237000 D loss: 0.4327 G_loss: 2.729\n",
      "Iter: 238000 D loss: 0.5662 G_loss: 2.854\n",
      "Iter: 239000 D loss: 0.5148 G_loss: 2.446\n",
      "Iter: 240000 D loss: 0.4913 G_loss: 2.616\n",
      "Iter: 241000 D loss: 0.6209 G_loss: 2.868\n",
      "Iter: 242000 D loss: 0.7584 G_loss: 2.347\n",
      "Iter: 243000 D loss: 0.6322 G_loss: 1.945\n",
      "Iter: 244000 D loss: 0.4912 G_loss: 2.122\n",
      "Iter: 245000 D loss: 0.4953 G_loss: 2.438\n",
      "Iter: 246000 D loss: 0.53 G_loss: 2.153\n",
      "Iter: 247000 D loss: 0.5021 G_loss: 2.24\n",
      "Iter: 248000 D loss: 0.5478 G_loss: 2.641\n",
      "Iter: 249000 D loss: 0.5881 G_loss: 2.535\n",
      "Iter: 250000 D loss: 0.5393 G_loss: 2.547\n",
      "Iter: 251000 D loss: 0.507 G_loss: 2.144\n",
      "Iter: 252000 D loss: 0.4752 G_loss: 2.688\n",
      "Iter: 253000 D loss: 0.4851 G_loss: 2.608\n",
      "Iter: 254000 D loss: 0.5197 G_loss: 2.472\n",
      "Iter: 255000 D loss: 0.5413 G_loss: 2.608\n",
      "Iter: 256000 D loss: 0.5317 G_loss: 2.74\n",
      "Iter: 257000 D loss: 0.5305 G_loss: 2.641\n",
      "Iter: 258000 D loss: 0.5269 G_loss: 2.676\n",
      "Iter: 259000 D loss: 0.4718 G_loss: 2.586\n",
      "Iter: 260000 D loss: 0.4331 G_loss: 2.796\n",
      "Iter: 261000 D loss: 0.4545 G_loss: 2.976\n",
      "Iter: 262000 D loss: 0.3833 G_loss: 2.822\n",
      "Iter: 263000 D loss: 0.5635 G_loss: 2.808\n",
      "Iter: 264000 D loss: 0.4397 G_loss: 2.645\n",
      "Iter: 265000 D loss: 0.497 G_loss: 2.8\n",
      "Iter: 266000 D loss: 0.4993 G_loss: 2.456\n",
      "Iter: 267000 D loss: 0.4998 G_loss: 2.865\n",
      "Iter: 268000 D loss: 0.4585 G_loss: 2.664\n",
      "Iter: 269000 D loss: 0.493 G_loss: 2.747\n",
      "Iter: 270000 D loss: 0.4219 G_loss: 2.678\n",
      "Iter: 271000 D loss: 0.4326 G_loss: 3.143\n",
      "Iter: 272000 D loss: 0.4327 G_loss: 2.66\n",
      "Iter: 273000 D loss: 0.5578 G_loss: 2.738\n",
      "Iter: 274000 D loss: 0.536 G_loss: 2.555\n",
      "Iter: 275000 D loss: 0.402 G_loss: 2.427\n",
      "Iter: 276000 D loss: 0.5035 G_loss: 2.584\n",
      "Iter: 277000 D loss: 0.5368 G_loss: 2.464\n",
      "Iter: 278000 D loss: 0.4182 G_loss: 2.883\n",
      "Iter: 279000 D loss: 0.5486 G_loss: 2.654\n",
      "Iter: 280000 D loss: 0.4729 G_loss: 3.175\n",
      "Iter: 281000 D loss: 0.565 G_loss: 3.03\n",
      "Iter: 282000 D loss: 0.6523 G_loss: 2.737\n",
      "Iter: 283000 D loss: 0.4613 G_loss: 2.436\n",
      "Iter: 284000 D loss: 0.4652 G_loss: 2.525\n",
      "Iter: 285000 D loss: 0.4712 G_loss: 2.73\n",
      "Iter: 286000 D loss: 0.5108 G_loss: 2.829\n",
      "Iter: 287000 D loss: 0.4388 G_loss: 2.629\n",
      "Iter: 288000 D loss: 0.486 G_loss: 2.723\n",
      "Iter: 289000 D loss: 0.5141 G_loss: 2.717\n",
      "Iter: 290000 D loss: 0.4675 G_loss: 3.104\n",
      "Iter: 291000 D loss: 0.4731 G_loss: 3.524\n",
      "Iter: 292000 D loss: 0.5009 G_loss: 2.952\n",
      "Iter: 293000 D loss: 0.5081 G_loss: 3.023\n",
      "Iter: 294000 D loss: 0.478 G_loss: 2.59\n",
      "Iter: 295000 D loss: 0.4926 G_loss: 2.425\n",
      "Iter: 296000 D loss: 0.4771 G_loss: 2.923\n",
      "Iter: 297000 D loss: 0.4696 G_loss: 2.604\n",
      "Iter: 298000 D loss: 0.5209 G_loss: 3.267\n",
      "Iter: 299000 D loss: 0.5565 G_loss: 2.771\n",
      "Iter: 300000 D loss: 0.4953 G_loss: 2.717\n",
      "Iter: 301000 D loss: 0.5321 G_loss: 2.725\n",
      "Iter: 302000 D loss: 0.4949 G_loss: 2.717\n",
      "Iter: 303000 D loss: 0.4629 G_loss: 2.878\n",
      "Iter: 304000 D loss: 0.4631 G_loss: 2.507\n",
      "Iter: 305000 D loss: 0.5106 G_loss: 2.842\n",
      "Iter: 306000 D loss: 0.4799 G_loss: 2.686\n",
      "Iter: 307000 D loss: 0.6016 G_loss: 2.691\n",
      "Iter: 308000 D loss: 0.4069 G_loss: 2.758\n",
      "Iter: 309000 D loss: 0.446 G_loss: 2.742\n",
      "Iter: 310000 D loss: 0.4158 G_loss: 3.004\n",
      "Iter: 311000 D loss: 0.6416 G_loss: 2.385\n",
      "Iter: 312000 D loss: 0.5404 G_loss: 2.999\n",
      "Iter: 313000 D loss: 0.4038 G_loss: 2.529\n",
      "Iter: 314000 D loss: 0.4186 G_loss: 2.798\n",
      "Iter: 315000 D loss: 0.5571 G_loss: 2.523\n",
      "Iter: 316000 D loss: 0.4219 G_loss: 2.805\n",
      "Iter: 317000 D loss: 0.3931 G_loss: 2.581\n",
      "Iter: 318000 D loss: 0.4044 G_loss: 2.573\n",
      "Iter: 319000 D loss: 0.431 G_loss: 3.018\n",
      "Iter: 320000 D loss: 0.5181 G_loss: 2.963\n",
      "Iter: 321000 D loss: 0.375 G_loss: 2.85\n",
      "Iter: 322000 D loss: 0.5657 G_loss: 2.644\n",
      "Iter: 323000 D loss: 0.4385 G_loss: 2.829\n",
      "Iter: 324000 D loss: 0.4893 G_loss: 2.486\n",
      "Iter: 325000 D loss: 0.6235 G_loss: 2.944\n",
      "Iter: 326000 D loss: 0.5565 G_loss: 2.932\n",
      "Iter: 327000 D loss: 0.435 G_loss: 2.814\n",
      "Iter: 328000 D loss: 0.5504 G_loss: 3.202\n",
      "Iter: 329000 D loss: 0.4775 G_loss: 2.47\n",
      "Iter: 330000 D loss: 0.4327 G_loss: 2.881\n",
      "Iter: 331000 D loss: 0.5546 G_loss: 2.528\n",
      "Iter: 332000 D loss: 0.5679 G_loss: 2.901\n",
      "Iter: 333000 D loss: 0.5986 G_loss: 2.985\n",
      "Iter: 334000 D loss: 0.4512 G_loss: 3.118\n",
      "Iter: 335000 D loss: 0.4795 G_loss: 2.685\n",
      "Iter: 336000 D loss: 0.5221 G_loss: 2.972\n",
      "Iter: 337000 D loss: 0.4789 G_loss: 2.771\n",
      "Iter: 338000 D loss: 0.4361 G_loss: 2.454\n",
      "Iter: 339000 D loss: 0.4547 G_loss: 3.004\n",
      "Iter: 340000 D loss: 0.4649 G_loss: 2.548\n",
      "Iter: 341000 D loss: 0.3969 G_loss: 2.832\n",
      "Iter: 342000 D loss: 0.3838 G_loss: 2.741\n",
      "Iter: 343000 D loss: 0.3864 G_loss: 2.64\n",
      "Iter: 344000 D loss: 0.3342 G_loss: 2.668\n",
      "Iter: 345000 D loss: 0.5555 G_loss: 2.519\n",
      "Iter: 346000 D loss: 0.4594 G_loss: 2.925\n",
      "Iter: 347000 D loss: 0.4972 G_loss: 2.923\n",
      "Iter: 348000 D loss: 0.4071 G_loss: 3.289\n",
      "Iter: 349000 D loss: 0.3796 G_loss: 3.12\n",
      "Iter: 350000 D loss: 0.4084 G_loss: 3.144\n",
      "Iter: 351000 D loss: 0.3977 G_loss: 3.378\n",
      "Iter: 352000 D loss: 0.4749 G_loss: 2.86\n",
      "Iter: 353000 D loss: 0.3589 G_loss: 3.191\n",
      "Iter: 354000 D loss: 0.459 G_loss: 2.915\n",
      "Iter: 355000 D loss: 0.4588 G_loss: 2.701\n",
      "Iter: 356000 D loss: 0.4059 G_loss: 3.09\n",
      "Iter: 357000 D loss: 0.4736 G_loss: 2.83\n",
      "Iter: 358000 D loss: 0.4102 G_loss: 2.9\n",
      "Iter: 359000 D loss: 0.3222 G_loss: 2.691\n",
      "Iter: 360000 D loss: 0.366 G_loss: 2.872\n",
      "Iter: 361000 D loss: 0.3332 G_loss: 3.227\n",
      "Iter: 362000 D loss: 0.5037 G_loss: 2.994\n",
      "Iter: 363000 D loss: 0.3386 G_loss: 3.017\n",
      "Iter: 364000 D loss: 0.4011 G_loss: 2.813\n",
      "Iter: 365000 D loss: 0.4003 G_loss: 2.68\n",
      "Iter: 366000 D loss: 0.3979 G_loss: 2.382\n",
      "Iter: 367000 D loss: 0.406 G_loss: 2.977\n",
      "Iter: 368000 D loss: 0.4542 G_loss: 3.01\n",
      "Iter: 369000 D loss: 0.4929 G_loss: 2.827\n",
      "Iter: 370000 D loss: 0.4452 G_loss: 3.094\n",
      "Iter: 371000 D loss: 0.421 G_loss: 3.147\n",
      "Iter: 372000 D loss: 0.5163 G_loss: 2.882\n",
      "Iter: 373000 D loss: 0.4369 G_loss: 3.42\n",
      "Iter: 374000 D loss: 0.3729 G_loss: 3.273\n",
      "Iter: 375000 D loss: 0.3766 G_loss: 2.785\n",
      "Iter: 376000 D loss: 0.4069 G_loss: 2.95\n",
      "Iter: 377000 D loss: 0.3254 G_loss: 3.146\n",
      "Iter: 378000 D loss: 0.4078 G_loss: 2.82\n",
      "Iter: 379000 D loss: 0.4244 G_loss: 3.344\n",
      "Iter: 380000 D loss: 0.3997 G_loss: 3.032\n",
      "Iter: 381000 D loss: 0.3073 G_loss: 2.85\n",
      "Iter: 382000 D loss: 0.42 G_loss: 3.19\n",
      "Iter: 383000 D loss: 0.3432 G_loss: 2.867\n",
      "Iter: 384000 D loss: 0.3337 G_loss: 2.764\n",
      "Iter: 385000 D loss: 0.4196 G_loss: 2.611\n",
      "Iter: 386000 D loss: 0.491 G_loss: 3.126\n",
      "Iter: 387000 D loss: 0.4816 G_loss: 3.238\n",
      "Iter: 388000 D loss: 0.3741 G_loss: 3.249\n",
      "Iter: 389000 D loss: 0.3709 G_loss: 3.513\n",
      "Iter: 390000 D loss: 0.413 G_loss: 2.778\n",
      "Iter: 391000 D loss: 0.4226 G_loss: 3.179\n",
      "Iter: 392000 D loss: 0.3366 G_loss: 2.954\n",
      "Iter: 393000 D loss: 0.4988 G_loss: 3.864\n",
      "Iter: 394000 D loss: 0.3716 G_loss: 2.964\n",
      "Iter: 395000 D loss: 0.3728 G_loss: 3.112\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Iter: 396000 D loss: 0.5249 G_loss: 3.176\n",
      "Iter: 397000 D loss: 0.4017 G_loss: 3.373\n",
      "Iter: 398000 D loss: 0.3934 G_loss: 2.77\n",
      "Iter: 399000 D loss: 0.5815 G_loss: 3.391\n",
      "Iter: 400000 D loss: 0.3384 G_loss: 2.761\n",
      "Iter: 401000 D loss: 0.4027 G_loss: 2.875\n",
      "Iter: 402000 D loss: 0.5267 G_loss: 2.944\n",
      "Iter: 403000 D loss: 0.4381 G_loss: 2.985\n",
      "Iter: 404000 D loss: 0.3429 G_loss: 3.268\n",
      "Iter: 405000 D loss: 0.4041 G_loss: 2.813\n",
      "Iter: 406000 D loss: 0.4559 G_loss: 2.761\n",
      "Iter: 407000 D loss: 0.4998 G_loss: 2.847\n",
      "Iter: 408000 D loss: 0.3892 G_loss: 3.48\n",
      "Iter: 409000 D loss: 0.3632 G_loss: 3.003\n",
      "Iter: 410000 D loss: 0.3065 G_loss: 2.731\n",
      "Iter: 411000 D loss: 0.4013 G_loss: 2.858\n",
      "Iter: 412000 D loss: 0.382 G_loss: 3.048\n",
      "Iter: 413000 D loss: 0.5001 G_loss: 2.688\n",
      "Iter: 414000 D loss: 0.4133 G_loss: 3.136\n",
      "Iter: 415000 D loss: 0.4601 G_loss: 2.818\n",
      "Iter: 416000 D loss: 0.4819 G_loss: 2.873\n",
      "Iter: 417000 D loss: 0.3763 G_loss: 3.038\n",
      "Iter: 418000 D loss: 0.3783 G_loss: 3.192\n",
      "Iter: 419000 D loss: 0.3531 G_loss: 2.922\n",
      "Iter: 420000 D loss: 0.2943 G_loss: 3.093\n",
      "Iter: 421000 D loss: 0.3819 G_loss: 3.072\n",
      "Iter: 422000 D loss: 0.3827 G_loss: 3.251\n",
      "Iter: 423000 D loss: 0.4067 G_loss: 3.329\n",
      "Iter: 424000 D loss: 0.3002 G_loss: 3.046\n",
      "Iter: 425000 D loss: 0.3515 G_loss: 2.97\n",
      "Iter: 426000 D loss: 0.3975 G_loss: 2.893\n",
      "Iter: 427000 D loss: 0.3591 G_loss: 3.077\n",
      "Iter: 428000 D loss: 0.4053 G_loss: 3.163\n",
      "Iter: 429000 D loss: 0.4275 G_loss: 2.585\n",
      "Iter: 430000 D loss: 0.3994 G_loss: 2.681\n",
      "Iter: 431000 D loss: 0.3205 G_loss: 2.92\n",
      "Iter: 432000 D loss: 0.3592 G_loss: 2.942\n",
      "Iter: 433000 D loss: 0.3994 G_loss: 2.673\n",
      "Iter: 434000 D loss: 0.3747 G_loss: 2.884\n",
      "Iter: 435000 D loss: 0.4615 G_loss: 2.876\n",
      "Iter: 436000 D loss: 0.3105 G_loss: 3.111\n",
      "Iter: 437000 D loss: 0.5573 G_loss: 3.415\n",
      "Iter: 438000 D loss: 0.3893 G_loss: 3.005\n",
      "Iter: 439000 D loss: 0.3843 G_loss: 3.282\n",
      "Iter: 440000 D loss: 0.2562 G_loss: 2.753\n",
      "Iter: 441000 D loss: 0.3806 G_loss: 2.808\n",
      "Iter: 442000 D loss: 0.3239 G_loss: 2.767\n",
      "Iter: 443000 D loss: 0.329 G_loss: 3.276\n",
      "Iter: 444000 D loss: 0.2938 G_loss: 3.008\n",
      "Iter: 445000 D loss: 0.3759 G_loss: 3.139\n",
      "Iter: 446000 D loss: 0.4065 G_loss: 3.18\n",
      "Iter: 447000 D loss: 0.397 G_loss: 3.242\n",
      "Iter: 448000 D loss: 0.4298 G_loss: 2.955\n",
      "Iter: 449000 D loss: 0.3894 G_loss: 3.133\n",
      "Iter: 450000 D loss: 0.433 G_loss: 2.998\n",
      "Iter: 451000 D loss: 0.3848 G_loss: 2.953\n",
      "Iter: 452000 D loss: 0.3142 G_loss: 3.455\n",
      "Iter: 453000 D loss: 0.3392 G_loss: 2.865\n",
      "Iter: 454000 D loss: 0.387 G_loss: 3.028\n",
      "Iter: 455000 D loss: 0.3636 G_loss: 2.83\n",
      "Iter: 456000 D loss: 0.3801 G_loss: 3.365\n",
      "Iter: 457000 D loss: 0.4666 G_loss: 3.878\n",
      "Iter: 458000 D loss: 0.4572 G_loss: 2.89\n",
      "Iter: 459000 D loss: 0.3481 G_loss: 3.273\n",
      "Iter: 460000 D loss: 0.36 G_loss: 3.113\n",
      "Iter: 461000 D loss: 0.2395 G_loss: 2.837\n",
      "Iter: 462000 D loss: 0.3684 G_loss: 3.069\n",
      "Iter: 463000 D loss: 0.3241 G_loss: 2.749\n",
      "Iter: 464000 D loss: 0.3581 G_loss: 3.279\n",
      "Iter: 465000 D loss: 0.345 G_loss: 3.156\n",
      "Iter: 466000 D loss: 0.2691 G_loss: 2.731\n",
      "Iter: 467000 D loss: 0.4467 G_loss: 2.857\n",
      "Iter: 468000 D loss: 0.3955 G_loss: 2.923\n",
      "Iter: 469000 D loss: 0.4826 G_loss: 2.923\n",
      "Iter: 470000 D loss: 0.3616 G_loss: 3.058\n",
      "Iter: 471000 D loss: 0.3767 G_loss: 3.204\n",
      "Iter: 472000 D loss: 0.4204 G_loss: 2.821\n",
      "Iter: 473000 D loss: 0.2705 G_loss: 2.689\n",
      "Iter: 474000 D loss: 0.3467 G_loss: 2.902\n",
      "Iter: 475000 D loss: 0.2545 G_loss: 3.173\n",
      "Iter: 476000 D loss: 0.2456 G_loss: 3.145\n",
      "Iter: 477000 D loss: 0.3983 G_loss: 3.055\n",
      "Iter: 478000 D loss: 0.3226 G_loss: 3.016\n",
      "Iter: 479000 D loss: 0.3816 G_loss: 3.439\n",
      "Iter: 480000 D loss: 0.4354 G_loss: 2.887\n",
      "Iter: 481000 D loss: 0.3223 G_loss: 3.1\n",
      "Iter: 482000 D loss: 0.3601 G_loss: 2.939\n",
      "Iter: 483000 D loss: 0.4424 G_loss: 3.246\n",
      "Iter: 484000 D loss: 0.3591 G_loss: 3.138\n",
      "Iter: 485000 D loss: 0.2881 G_loss: 3.326\n",
      "Iter: 486000 D loss: 0.2443 G_loss: 3.59\n",
      "Iter: 487000 D loss: 0.332 G_loss: 2.769\n",
      "Iter: 488000 D loss: 0.3254 G_loss: 2.998\n",
      "Iter: 489000 D loss: 0.3844 G_loss: 2.596\n",
      "Iter: 490000 D loss: 0.2988 G_loss: 3.46\n",
      "Iter: 491000 D loss: 0.3297 G_loss: 2.951\n",
      "Iter: 492000 D loss: 0.3933 G_loss: 2.496\n",
      "Iter: 493000 D loss: 0.4207 G_loss: 2.879\n",
      "Iter: 494000 D loss: 0.317 G_loss: 2.941\n",
      "Iter: 495000 D loss: 0.3441 G_loss: 3.368\n",
      "Iter: 496000 D loss: 0.3539 G_loss: 3.033\n",
      "Iter: 497000 D loss: 0.2951 G_loss: 3.221\n",
      "Iter: 498000 D loss: 0.5497 G_loss: 2.841\n",
      "Iter: 499000 D loss: 0.3257 G_loss: 3.177\n",
      "Iter: 500000 D loss: 0.2869 G_loss: 2.615\n",
      "Iter: 501000 D loss: 0.2636 G_loss: 2.92\n",
      "Iter: 502000 D loss: 0.2773 G_loss: 3.102\n",
      "Iter: 503000 D loss: 0.3203 G_loss: 2.715\n",
      "Iter: 504000 D loss: 0.2582 G_loss: 2.97\n",
      "Iter: 505000 D loss: 0.282 G_loss: 3.143\n",
      "Iter: 506000 D loss: 0.3398 G_loss: 3.262\n",
      "Iter: 507000 D loss: 0.3498 G_loss: 3.312\n",
      "Iter: 508000 D loss: 0.3279 G_loss: 2.883\n",
      "Iter: 509000 D loss: 0.354 G_loss: 3.497\n",
      "Iter: 510000 D loss: 0.2947 G_loss: 3.115\n",
      "Iter: 511000 D loss: 0.3156 G_loss: 2.995\n",
      "Iter: 512000 D loss: 0.359 G_loss: 3.095\n",
      "Iter: 513000 D loss: 0.32 G_loss: 2.978\n",
      "Iter: 514000 D loss: 0.3898 G_loss: 3.112\n",
      "Iter: 515000 D loss: 0.3581 G_loss: 3.066\n",
      "Iter: 516000 D loss: 0.4762 G_loss: 2.96\n",
      "Iter: 517000 D loss: 0.3496 G_loss: 3.186\n",
      "Iter: 518000 D loss: 0.3416 G_loss: 2.918\n",
      "Iter: 519000 D loss: 0.2582 G_loss: 3.089\n",
      "Iter: 520000 D loss: 0.2926 G_loss: 3.429\n",
      "Iter: 521000 D loss: 0.2787 G_loss: 2.833\n",
      "Iter: 522000 D loss: 0.3665 G_loss: 3.204\n",
      "Iter: 523000 D loss: 0.3495 G_loss: 3.0\n",
      "Iter: 524000 D loss: 0.2842 G_loss: 3.007\n",
      "Iter: 525000 D loss: 0.3337 G_loss: 3.194\n",
      "Iter: 526000 D loss: 0.4483 G_loss: 2.86\n",
      "Iter: 527000 D loss: 0.3798 G_loss: 2.793\n",
      "Iter: 528000 D loss: 0.2592 G_loss: 3.468\n",
      "Iter: 529000 D loss: 0.304 G_loss: 3.055\n",
      "Iter: 530000 D loss: 0.336 G_loss: 3.231\n",
      "Iter: 531000 D loss: 0.2161 G_loss: 3.338\n",
      "Iter: 532000 D loss: 0.4098 G_loss: 3.282\n",
      "Iter: 533000 D loss: 0.3467 G_loss: 3.606\n",
      "Iter: 534000 D loss: 0.3236 G_loss: 2.813\n",
      "Iter: 535000 D loss: 0.3701 G_loss: 2.993\n"
     ]
    },
    {
     "ename": "KeyboardInterrupt",
     "evalue": "",
     "output_type": "error",
     "traceback": [
      "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
      "\u001b[0;31mKeyboardInterrupt\u001b[0m                         Traceback (most recent call last)",
      "\u001b[0;32m<ipython-input-6-f903891985af>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[1;32m      6\u001b[0m         samples = sess.run(G_sample, feed_dict={\n\u001b[1;32m      7\u001b[0m                            Z: sample_Z(16, Z_dim)})  # 16*784\n\u001b[0;32m----> 8\u001b[0;31m         \u001b[0mfig\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mplot\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0msamples\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m      9\u001b[0m         \u001b[0mplt\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msavefig\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'out/{}.png'\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mformat\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mstr\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mi\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mzfill\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m3\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mbbox_inches\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m'tight'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     10\u001b[0m         \u001b[0mi\u001b[0m \u001b[0;34m+=\u001b[0m \u001b[0;36m1\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m<ipython-input-4-709c49e001c1>\u001b[0m in \u001b[0;36mplot\u001b[0;34m(samples)\u001b[0m\n\u001b[1;32m      5\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m      6\u001b[0m     \u001b[0;32mfor\u001b[0m \u001b[0mi\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0msample\u001b[0m \u001b[0;32min\u001b[0m \u001b[0menumerate\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0msamples\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m  \u001b[0;31m# [i,samples[i]] imax=16\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 7\u001b[0;31m         \u001b[0max\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mplt\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msubplot\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mgs\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mi\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m      8\u001b[0m         \u001b[0mplt\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0maxis\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'off'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m      9\u001b[0m         \u001b[0max\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mset_xticklabels\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/usr/local/lib/python3.7/site-packages/matplotlib/pyplot.py\u001b[0m in \u001b[0;36msubplot\u001b[0;34m(*args, **kwargs)\u001b[0m\n\u001b[1;32m   1082\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1083\u001b[0m     \u001b[0mfig\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mgcf\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1084\u001b[0;31m     \u001b[0ma\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mfig\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0madd_subplot\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m   1085\u001b[0m     \u001b[0mbbox\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0ma\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mbbox\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1086\u001b[0m     \u001b[0mbyebye\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/usr/local/lib/python3.7/site-packages/matplotlib/figure.py\u001b[0m in \u001b[0;36madd_subplot\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m   1365\u001b[0m                     \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_axstack\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mremove\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0max\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1366\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1367\u001b[0;31m             \u001b[0ma\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0msubplot_class_factory\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mprojection_class\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m   1368\u001b[0m         \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_axstack\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0madd\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mkey\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0ma\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1369\u001b[0m         \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msca\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0ma\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/usr/local/lib/python3.7/site-packages/matplotlib/axes/_subplots.py\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self, fig, *args, **kwargs)\u001b[0m\n\u001b[1;32m     68\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     69\u001b[0m         \u001b[0;31m# _axes_class is set in the subplot_class_factory\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 70\u001b[0;31m         \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_axes_class\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m__init__\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfig\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfigbox\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m     71\u001b[0m         \u001b[0;31m# add a layout box to this, for both the full axis, and the poss\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     72\u001b[0m         \u001b[0;31m# of the axis.  We need both because the axes may become smaller\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/usr/local/lib/python3.7/site-packages/matplotlib/axes/_base.py\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self, fig, rect, facecolor, frameon, sharex, sharey, label, xscale, yscale, **kwargs)\u001b[0m\n\u001b[1;32m    489\u001b[0m         \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_rasterization_zorder\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    490\u001b[0m         \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_connected\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m{\u001b[0m\u001b[0;34m}\u001b[0m  \u001b[0;31m# a dict from events to (id, func)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 491\u001b[0;31m         \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcla\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    492\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    493\u001b[0m         \u001b[0;31m# funcs used to format x and y - fall back on major formatters\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/usr/local/lib/python3.7/site-packages/matplotlib/axes/_base.py\u001b[0m in \u001b[0;36mcla\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m   1094\u001b[0m         \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mset_axis_on\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1095\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1096\u001b[0;31m         \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mxaxis\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mset_clip_path\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpatch\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m   1097\u001b[0m         \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0myaxis\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mset_clip_path\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpatch\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1098\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/usr/local/lib/python3.7/site-packages/matplotlib/axis.py\u001b[0m in \u001b[0;36mset_clip_path\u001b[0;34m(self, clippath, transform)\u001b[0m\n\u001b[1;32m    923\u001b[0m     \u001b[0;32mdef\u001b[0m \u001b[0mset_clip_path\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mclippath\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtransform\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mNone\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    924\u001b[0m         \u001b[0martist\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mArtist\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mset_clip_path\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mclippath\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtransform\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 925\u001b[0;31m         \u001b[0;32mfor\u001b[0m \u001b[0mchild\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmajorTicks\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mminorTicks\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    926\u001b[0m             \u001b[0mchild\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mset_clip_path\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mclippath\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtransform\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    927\u001b[0m         \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mstale\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;32mTrue\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/usr/local/lib/python3.7/site-packages/matplotlib/axis.py\u001b[0m in \u001b[0;36m__get__\u001b[0;34m(self, instance, cls)\u001b[0m\n\u001b[1;32m    680\u001b[0m             \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    681\u001b[0m                 \u001b[0minstance\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mminorTicks\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 682\u001b[0;31m                 \u001b[0mtick\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0minstance\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_get_tick\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmajor\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mFalse\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    683\u001b[0m                 \u001b[0minstance\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mminorTicks\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mappend\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtick\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    684\u001b[0m                 \u001b[0;32mreturn\u001b[0m \u001b[0minstance\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mminorTicks\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/usr/local/lib/python3.7/site-packages/matplotlib/axis.py\u001b[0m in \u001b[0;36m_get_tick\u001b[0;34m(self, major)\u001b[0m\n\u001b[1;32m   1816\u001b[0m         \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1817\u001b[0m             \u001b[0mtick_kw\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_minor_tick_kw\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1818\u001b[0;31m         \u001b[0;32mreturn\u001b[0m \u001b[0mXTick\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0maxes\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;36m0\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m''\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mmajor\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mmajor\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mtick_kw\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m   1819\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1820\u001b[0m     \u001b[0;32mdef\u001b[0m \u001b[0m_get_label\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/usr/local/lib/python3.7/site-packages/matplotlib/axis.py\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self, axes, loc, label, size, width, color, tickdir, pad, labelsize, labelcolor, zorder, gridOn, tick1On, tick2On, label1On, label2On, major, labelrotation, grid_color, grid_linestyle, grid_linewidth, grid_alpha, **kw)\u001b[0m\n\u001b[1;32m    174\u001b[0m         \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtick1line\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_get_tick1line\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    175\u001b[0m         \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtick2line\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_get_tick2line\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 176\u001b[0;31m         \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mgridline\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_get_gridline\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    177\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    178\u001b[0m         \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mlabel1\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_get_text1\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/usr/local/lib/python3.7/site-packages/matplotlib/axis.py\u001b[0m in \u001b[0;36m_get_gridline\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m    497\u001b[0m                           \u001b[0malpha\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_grid_alpha\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    498\u001b[0m                           \u001b[0mmarkersize\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 499\u001b[0;31m                           **self._grid_kw)\n\u001b[0m\u001b[1;32m    500\u001b[0m         \u001b[0ml\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mset_transform\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0maxes\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mget_xaxis_transform\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mwhich\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m'grid'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    501\u001b[0m         \u001b[0ml\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mget_path\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_interpolation_steps\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mGRIDLINE_INTERPOLATION_STEPS\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/usr/local/lib/python3.7/site-packages/matplotlib/lines.py\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self, xdata, ydata, linewidth, linestyle, color, marker, markersize, markeredgewidth, markeredgecolor, markerfacecolor, markerfacecoloralt, fillstyle, antialiased, dash_capstyle, solid_capstyle, dash_joinstyle, solid_joinstyle, pickradius, drawstyle, markevery, **kwargs)\u001b[0m\n\u001b[1;32m    384\u001b[0m         \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_color\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    385\u001b[0m         \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mset_color\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mcolor\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 386\u001b[0;31m         \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_marker\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mMarkerStyle\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmarker\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfillstyle\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    387\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    388\u001b[0m         \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_markevery\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/usr/local/lib/python3.7/site-packages/matplotlib/markers.py\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self, marker, fillstyle)\u001b[0m\n\u001b[1;32m    243\u001b[0m         \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_marker_function\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    244\u001b[0m         \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mset_fillstyle\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfillstyle\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 245\u001b[0;31m         \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mset_marker\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmarker\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    246\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    247\u001b[0m     \u001b[0;32mdef\u001b[0m \u001b[0m_recache\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/usr/local/lib/python3.7/site-packages/matplotlib/markers.py\u001b[0m in \u001b[0;36mset_marker\u001b[0;34m(self, marker)\u001b[0m\n\u001b[1;32m    316\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    317\u001b[0m         \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_marker\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mmarker\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 318\u001b[0;31m         \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_recache\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    319\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    320\u001b[0m     \u001b[0;32mdef\u001b[0m \u001b[0mget_path\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/usr/local/lib/python3.7/site-packages/matplotlib/markers.py\u001b[0m in \u001b[0;36m_recache\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m    249\u001b[0m             \u001b[0;32mreturn\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    250\u001b[0m         \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_path\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0m_empty_path\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 251\u001b[0;31m         \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_transform\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mIdentityTransform\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    252\u001b[0m         \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_alt_path\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    253\u001b[0m         \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_alt_transform\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;31mKeyboardInterrupt\u001b[0m: "
     ]
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP8AAACzCAYAAABGksrdAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAFkNJREFUeJztnXmU1eMfx1+3mZIWWpFGU+pERCllrSzJsVdS6OAQp2MpIg7Zl0624iAVpd2JFssJh2PJXiQkgyJRSA3aiJaZ3x9z3t/n3jv9aJn5fu/3+3xe/8zc79w78zz3O899f57P81lSpaWlGIbhH1WiHoBhGNFgi98wPMUWv2F4ii1+w/AUW/yG4Sm2+A3DU2zxG4an2OI3DE/JD/OPpVKpxEUUlZaWpvR90ucHyZ9j0ueXjim/YXiKLX7D8BRb/IbhKbb4DcNTbPEbhqfY4jcMT7HFbxieEuo5f1Tst99+ADz11FMA7L777gB8/PHHAFx77bUAWGGT+FG9enUA/v7774hHsmOcf/75AEydOhWAv/76C4DatWsDUFJSUuljSPTir1q1KgCXXXYZAJ07dwZg1apVAMyaNQuAKlXKDKCtW7eGPcRyaCyFhYUAFBQUANCvXz8AVq5cyVdffQXAlClTMp77xx9/ALB27VognH+gsNlnn30A+PzzzwGoX78+AHfeeScAd999dzQD20F++OGHjMc1atQAID+/bElu2rSp0sdgZr9heEqilX/JkiUA7L333oD7VG3YsCEAW7ZsASCV2mb0YyRUq1YNgEMPPRSARx99FHAWwObNm3nllVcAeOuttwBYt24dAP/880+oYw2D7Hvz5ZdfAlCnTh3AWTdHHnlkuAPbCTSX0tLS4H9PaMs5YMAAAEaMGJFxvTIw5TcMT0mk8suhJ4XX3l97+m+//RaAUaNGAZT7FI6Sk08+GYAHH3wQcA6tlStXArBmzRqGDRsGQMuWLQH47LPPAOc0koUjkrD3f+CBBwCoV68e4BTxjjvuAGDatGmRjGtHSFdx7el1TV///PPPcs+tLEz5DcNTEqn8Bx98MAC77bYb4PZaUvj7778fKNs/5xra8+l4UmPUXvftt98OTivGjRsHwLvvvgvAkCFDAMjLywNg48aNgFOTOCKrTe+LkDLK76H3JC7Iuy/0Pzpo0CAARo8eXeljMOU3DE9JnPKnUin69u2bcU2K/+OPPwLufD+X0Pm+9vFSguXLlwPwzDPPANCoUSNOPPFEwO1/9XjkyJEANGnSBIBjjz02jKFXKldeeSXgLAAhq0a+kDhZN9WqVQt8FUKWjGI4wsCU3zA8JXHK3759+yCiT0gl3nvvPQA2bNgQ+rj+i0svvRRw5/nFxcUADBw4EHAxC82bN6djx46AU/66desCzk+QBHRio8i99DNygFtvvRVwyh+n0OwtW7YEVpnmJe//p59+Gto4TPkNw1MSo/zpXmF5+cWCBQsAuOWWW0If1/aivb1iEaR477zzTsbztmzZwvXXXw84tdNrFi1aBMAhhxxS+QOuZJYuXQq4RBf5bXT6MXv2bMBZdXFS/pKSknI+DCn/fffdF9o4TPkNw1MSo/zK7urcuXMQ4fbrr78CMHPmzIzHucRee+0FwOLFiwGnAEOHDgVg2bJlgIvi27p1Kz/99BPg/ALaN/bv3x+gnOUTNxo0aBBEZ4r58+cDMHbsWABWr14NxEvxRX5+frlxa35h5meY8huGp8Re+RUppSyogoKC4FNVMfA6I89FlVBGnrLU5s2bB8AxxxwDuJgE7YFXrVpFhw4dAOfn0CmGcv7lL1DsQHb8eK6ie/nBBx8EY1degnwfKsiS63P5N9q0aRNEYYoGDRqEPo7YL36ZvKeeeipQFtoqB5hMqN9//z2awW0HqkAjR5YWtL5qfjoCbNmyZbAg5ARTOLO2Ac2aNQNccY+4LBQlMzVr1qzcEdiECROA+Mzl3xg4cGC5VOWrr74aCHd+ZvYbhqfEXvlPP/10wClEKpWiqKgIcEd8cUhp/f777wHo0aMHAOeccw4Av/32G0BwvHfAAQcEx19SiYULFwIunFf14eKCTF5ZbyUlJUGZK81fDtE4I2uuV69ewTXdQ9WTDBNTfsPwlNgqvxwmjRo1AmDPPfcEyj5JpSS5GMb7X2jfrkIj2hsqqKVBgwaBlSAlkT/gxhtvBGCPPfYAnGrmOhMnTgTcsWeVKlWCcmaXXHIJkBvFVXcVzUmhy+B8PlGUYDPlNwxPia3y16pVC4Bu3boBrnTV5s2bmTt3LuBSeJOASnQdddRR5X6mYzFZQXEp5KnjzU6dOgGuZFlJSUlw7bvvvtvma6WiYZS4rijkq0lH/7dRlJIz5TcMT4mt8ussu0uXLhnXS0pKgrRIqWWckTdYPo7bb789UAntF6UoKlGeS6XIt4XG9/rrrwPlFXH16tVBWPP/I06KLyZNmgRk3h8lJqVbrmFhym8YnhJb5W/fvj3gklhUxmnNmjW89NJLQDzO97cXebsLCwsD5VdosE46unfvDrjw3lylT58+ALRr1y7juk5nWrVqlQjvfjYffvghAF27dg3UX3O2Pb9hGKERO+XX3qht27aA2z9pT1xUVBR4kZPIkCFDuO222wDXyEIFIN544w0ALrjgAsC1+soV0v0W4PwZuodqvrlmzZoIRld5KB5j3333BcosUr0XN998M1DeT2NNOwzDqDRip/w6C1bkl9AnZ+3atfn6669DH1dYzJkzJ8hdULtqKcuBBx4IOAXNNR5//HHAndQoPkH7XrUqSxryS7Vq1Qoo899o7vLTqN2YvP1h+DxM+Q3DU2Kn/Irs0ienFF++gCVLluRkua4dJTvyS/Ncv34911xzDeBiHLQ/VDGMZ599NtSx/heySI477jjA3UOpm9qMxanxxs7w9NNPA2XtxHXP5AeQD8DKeBmGUenETvkV2aV9bfZZ8aOPPpqIai/Ziq+vhx9+eNDSq7CwMOM1imtQ5F+uIIVXPILGp7oDL7zwQjQDCwlFmso3s3nz5sD6eeKJJwCXqxKm9ZMKc6GkUqkK/2Nyeil9NezOu6WlpcEZTWXMb1s899xzgOvRp6Il33zzDQBXXHEFUDFOo/T5wc7NUSmsOqI877zzAIKeiu+///6uDXIXieIehkn2PRRm9huGp8Re+aPGN9VI+hyTPr90TPkNw1Ns8RuGp9jiNwxPscVvGJ5ii98wPCVUb79hGLmDKb9heEqo4b1JP0NN+vwg+XNM+vzSMeU3DE+xxW8YnmKL3zA8xRa/YXiKLX7D8BRb/IbhKbb4DcNTYlfGa3to3LgxQFDCe/LkyQAMHDgQiKY1kmFsizPPPBOAs88+G4DFixcDMHTo0Er/24ks5qEyVo899hjgatvtscceQMV2763sABFVdVUn29q1awe96wcMGADAEUcckfEalTIbO3YsQFDtVx96O3LPowjyycvL48ILLwTc2FX/T4vjpptuAuCLL74Adq1rbxRBPqrn9/HHHwOul4E6+WrelVGKTZjZbxiekjjlz8vLo7i4GCDo2bd+/XoAjjnmGMCpRUVQ0apx2mmnAfDHH38Aru+einU2bNgwqPDaqFEjwPUwyGbixIkAfP/99wDcddddGvN2jycM5a9bty7gujENHjw4UH5dUx8DKeFbb70FuKq3p556KuD63e8IUSi/eizoPqtrkbaqJ510ElAxVqopv2EYGSTO4Ve1atVAJYTqxOdiDz/V42/Tpg0ANWvWzPgq5V+5ciVQVgZb+0XNS6WxtafX/Pv06QPARx99BMDDDz8MwNq1aytrOtuF5qyy68OHDwfgrLPOAtx8wPlrsjs0SenfeecdADp06JDxONeRxaL5yKchK21nLJgdxZTfMDwlccpfo0aN4NM0e2+bi0d8GuPChQsB14FI/eu0N5S3f+jQoYwZMwaAnj17AvDQQw8BMHv27IzfMX78eACuu+46wPk+okIWSdu2bYGy7koArVu3Blw321QqFSi+3peDDjoo4zmyBDRX+UF0Xa/PVbSXz+7IJB9AGJjyG4anJE75+/XrF6iDkFLmcskyKdWsWbMA18te3m11Hn7xxRdZvXo14E4t9JyRI0cGzwE44YQTAGc11KtXDyA4DQkb7dMfeeQRAAoKCrb5vJKSEtasWQPABx98ALjArYYNGwLuXmqPX1RUlHE9l0mlUvTq1QtwloosgWnTpgHhzMOU3zA8JXHK37Rp0yAqTvsoRU3FgWyPvZRByldUVBQo5kUXXZTxWnnzX3rpJcB5z6W0UXXvlfd+2LBhAOy9996Am5v26+vWrQPKGo5eddVVALRo0QJwpx5CUYyrVq0C3NzioPz5+fkMHjwYcP+jS5YsAZxlEwam/IbhKYlT/oKCguDTVPvoqM+1dwTFqGdH7UnZunbtyl577QXA/vvvD7j4BXnRDz74YMCpyj///AO4KLKw0Zn1zz//DLg4dqm0FF8/v+mmm4JzcOVn6LFec8899wDw5JNPZlyPA6lUisLCwoxrP/74I+CstjAw5TcMT0mc8qfHQkv5KyIzKiyk/MuXLwdgv/32A9zedtasWYGSKwZAPg5ZC/odep5i+qPm3XffBeDYY48F3H3RuOXnaNy4Mc2bNwdcFKB8IVLIJ554AnDvS5yoUqVKRhQjwOjRo4FwY1FM+Q3DUxKj/NrfnnLKKcE17ZOjjmzbGbSfnzlzJuAUrkmTJoE6KB48m7lz5wIuM2xXct0rAin6Cy+8ALj9+Q033AC4OATNZ+rUqUGshp47b948AC655BKAINYhjnTo0CGwdjS/FStWZDwOg8Qs/n333RdwCTHgHEm5GNb7X8gklmPrnHPOAcrSX/VBl80PP/wAuKowUS96ofd/0aJFgNvSKDBHhTnkBKtfv365OX755ZeAS3CKk4Mvm4ceeiiYn+7Rt99+G/o4zOw3DE9JjPKr7JGcXwDXX389kPtJHv/GggULAJe0k66IUlQlg1x66aVAdOG7/4WsGY1X2wAl9lx88cVAmfJn07VrV8CVLHvttdeAeFkA2tboKBbKb1/+X1JaZWDKbxiekhjlb9q0KZCZDvr/9sZxQqGw6b4MqYLm+dxzzwHR7Bt3Bo1fe/xzzz0XcIk+6fdN3yu8eerUqYBL8Y3TUV+TJk2AsvnrPXj++ecBdywbpiVjym8YnpIY5Z8/fz4AvXr1Cjyo29o7xoVq1aoBcNtttwEuoCcdHWUq6eX2228PaXS7huam8UoR5a/ZunVrcPQly0c/U1qyUp579+4N5LZfR8d6hx12GFBWkET3Tgk9UfguTPkNw1Nir/zaE6arvJRlypQpkYypIlCQzxlnnAE4Zdi0aVMwZ4WI6nGPHj0AGDVqVKhj3V50X5SApCAkBfQoTXfGjBlBwo6Sgu6++27AlTDv3r07AA0aNABye++vIKcHHngAKLtfOvkYN25cZOMy5TcMT4m98ksRDz/88OCaPlVVCEKlq+OEzuqz/RYrV64M0lvV7ELKr16Euar8StbR6UR2SS6lsw4ePDjwfmtPf+CBBwJurrrHFdl6rbLQPNPTtOXTiBJTfsPwlNgrv5RAqZ7pXlPFkscRJb2oQcenn34KlJXrlho++OCDGa959tlnQxzh9iPF07l+dmGOOXPmAC5pp7S0NNjTq4utCpjIqy/LIKrSZDuCrFKVK6tXr14Qual5RIEpv2F4SuyVP7tMdzpSC2WRxamoh5qMyl9x6KGHAmXFPZS1J6SgM2bMAJy1IO951OhsXi3ShTL0tP9VFN+IESPo0qUL4Oagc37lM6hseRwyNnXKoXuaSqW49957gWjHb8pvGJ4Se+XXnklnqSUlJYFK3HnnnUD5EtdxYOzYsYCL6X/11VcBePPNNwMlEdpLqtFFtoWTHjkXBcrie/PNNwF3kiGLQGW9FOOfn58f3E/NVXt7RXJ26tQpjKFXCBqzLLKNGzeWu4dRYMpvGJ4Se+XXflfK2KNHj0Dpsr3hceKTTz4BXIUinWpUrVo1+F77YamI2nzr7Pu3334Dovd16O9rPLLIFJ+vaMb0ppXyB6iCjyLhpk+fHtKoK47sugsAixcvjmo4AakwEwpSqVR8Ki9sJ6WlpUH+aWXM7/jjjwdg2bJlAFx11VVceeWVgFtU6mfXt29foGIXffr8oGLmWL16dcAd7Q0aNAhw9fz//vtvhg8fDoSTrFTZ9zBqsu+hMLPfMDzFlH8X8U01kj7HpM8vHVN+w/AUW/yG4Sm2+A3DU2zxG4an2OI3DE8J1dtvGEbuYMpvGJ4Sanhv0s9Qkz4/I1mY8huGp9jiNwxPscVvGJ5ii98wPMUWv2F4ii1+w/AUW/yG4SmxL+PlM+eddx4AkyZNAlxhzDFjxgCubdevv/4aweiMXCdxxTxSqRTDhg0DoFu3boDrcf/KK68AFVvTLoogH1WBXbhwIQAtW7YEXDcbdaxVmSzVN9wZLMgnuZjZbxiekjizv1atWkH994KCAsCZv61btwZg7dq10QxuF1BlW3BdilSld9OmTYCblyrDzps3L+O1lsRlpGPKbxiekjjlr1u3btAFVoonpVy3bl1k49pV0lVbnW40T3XsmTBhAgDTpk0DYMOGDeVeaxjClN8wPCVxyt+yZcugY4/6wqtJhB5H3cFmV+nZsycA++yzD+B68XXs2BFw3W3i0MHWiA5TfsPwlMQpf3FxMbvvvjvg9rrffPNNlEOqUPLy8ujduzdAME/1s/vll18AWL58eTSDM2KFKb9heErilL9OnTrlep8vWbIEcBFwcaZbt2506NABcKcZjRs3BuC6664DXC97w/g3TPkNw1MSp/yKegPn7b7nnnuAZJx3t2/fPvDuK27hk08+AeDtt9+ObFxG/DDlNwxPSYzy5+eXTaVdu3bBXlhWQBK83/JjXHjhhcH8NOfx48cD8Y9fMMLFlN8wPCUxyi/uuuuu4Hvltcd5r1+7dm0AJk+eDECzZs2Cn0nxX3755fAHZsSexCx+HePtueeewbUVK1YAmU7AuFGjRg0AOnXqBJSZ+jLv9YFgR3vGzmBmv2F4SmKUX2m7eXl5gZm/ceNGIN7BPZdffjlARuDSlClTABe2bAk8xs5gym8YnpIY5a9bty5Qdtyl1F2V74ojOsZr2rQpADVr1gx+dvTRRwPOsjGMncGU3zA8JTHKf+aZZwJlZa3lDY9joU6h4qMtWrQAXBLPpk2bWLBgAeBSelXA0zB2BFN+w/CUxCh/586dgTKFlPKrdHUcmTp1KgBt2rQBXKBScXExs2fPBmDz5s3RDM5IBKb8huEpiVF+hcGmE8fzb51UFBYWAq41l9J3+/fvT1FRUcZzDWNnsP8ew/CU2Cu/vOAqYw2wbNkyIJ6RffLg66sKdyh+f9GiRXTp0gWA6dOnAy7677+8/rIU4vi+GBWPKb9heEpilF/n4QBLly6Naji7jCL7dGIhL78sgXPPPTco4FlcXAzA3LlzAfj9998zXpONKb6Rjim/YXhKYpRf+97S0lJee+21jJ/FqZiHFF9n+Nklu/r16xdk9dWpUwdw7bt0Xf6BOM3bCB9TfsPwlMQof3oTzkWLFgHxVD7FJvTt2xeAp59+GnB7/u+++46ZM2cCrgW3KhbpPYjjvI3wSYX5j5JKpSr8j+kobP78+QA0bNiQVq1aAbB+/fqK/nPlKC0tTen7yphf1KTPz0gWZvYbhqfEXvmjxpTfiCum/IbhKbb4DcNTbPEbhqfY4jcMT7HFbxieEqq33zCM3MGU3zA8xRa/YXiKLX7D8BRb/IbhKbb4DcNTbPEbhqf8D0cqCqZlX1s1AAAAAElFTkSuQmCC\n",
      "text/plain": [
       "<Figure size 288x288 with 10 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    }
   ],
   "source": [
    "sess.run(tf.global_variables_initializer())\n",
    "\n",
    "i = 0\n",
    "for it in range(1000000):\n",
    "    if it % 1000 == 0:\n",
    "        samples = sess.run(G_sample, feed_dict={\n",
    "                           Z: sample_Z(16, Z_dim)})  # 16*784\n",
    "        fig = plot(samples)\n",
    "        plt.savefig('out/{}.png'.format(str(i).zfill(3)), bbox_inches='tight')\n",
    "        i += 1\n",
    "        plt.close(fig)\n",
    "\n",
    "    X_mb, _ = mnist.train.next_batch(mb_size)\n",
    "\n",
    "    _, D_loss_curr = sess.run([D_optimizer, D_loss], feed_dict={\n",
    "                              X: X_mb, Z: sample_Z(mb_size, Z_dim)})\n",
    "    _, G_loss_curr = sess.run([G_optimizer, G_loss], feed_dict={\n",
    "                              Z: sample_Z(mb_size, Z_dim)})\n",
    "\n",
    "    if it % 1000 == 0:\n",
    "        print('Iter: {}'.format(it),\n",
    "              'D loss: {:.4}'.format(D_loss_curr), \n",
    "              'G_loss: {:.4}'.format(G_loss_curr))"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.7.0"
  },
  "latex_envs": {
   "LaTeX_envs_menu_present": true,
   "autoclose": false,
   "autocomplete": true,
   "bibliofile": "biblio.bib",
   "cite_by": "apalike",
   "current_citInitial": 1,
   "eqLabelWithNumbers": true,
   "eqNumInitial": 1,
   "hotkeys": {
    "equation": "Ctrl-E",
    "itemize": "Ctrl-I"
   },
   "labels_anchors": false,
   "latex_user_defs": false,
   "report_style_numbering": false,
   "user_envs_cfg": false
  },
  "toc": {
   "base_numbering": 1,
   "nav_menu": {},
   "number_sections": true,
   "sideBar": true,
   "skip_h1_title": false,
   "title_cell": "Table of Contents",
   "title_sidebar": "Contents",
   "toc_cell": false,
   "toc_position": {},
   "toc_section_display": true,
   "toc_window_display": false
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
