{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# Assignment 6"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### 1.复习上课内容以及复现课程代码"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "在本部分，你需要复习上课内容和课程代码后，自己复现课程代码。"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {},
   "outputs": [],
   "source": [
    "from __future__ import absolute_import, division, print_function, unicode_literals"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {},
   "outputs": [],
   "source": [
    "import os"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {},
   "outputs": [],
   "source": [
    "/\n",
    "try:\n",
    "    %tensorflow_version 2.x\n",
    "    \n",
    "except Exception:\n",
    "    pass  "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Requirement already satisfied: tensorflow in /Users/stone/anaconda3/envs/tensorflow_37/lib/python3.7/site-packages (2.1.0)\n",
      "Requirement already satisfied: google-pasta>=0.1.6 in /Users/stone/anaconda3/envs/tensorflow_37/lib/python3.7/site-packages (from tensorflow) (0.1.8)\n",
      "Requirement already satisfied: keras-preprocessing>=1.1.0 in /Users/stone/anaconda3/envs/tensorflow_37/lib/python3.7/site-packages (from tensorflow) (1.1.0)\n",
      "Requirement already satisfied: wrapt>=1.11.1 in /Users/stone/anaconda3/envs/tensorflow_37/lib/python3.7/site-packages (from tensorflow) (1.12.0)\n",
      "Requirement already satisfied: opt-einsum>=2.3.2 in /Users/stone/anaconda3/envs/tensorflow_37/lib/python3.7/site-packages (from tensorflow) (3.1.0)\n",
      "Requirement already satisfied: numpy<2.0,>=1.16.0 in /Users/stone/anaconda3/envs/tensorflow_37/lib/python3.7/site-packages (from tensorflow) (1.18.1)\n",
      "Requirement already satisfied: tensorflow-estimator<2.2.0,>=2.1.0rc0 in /Users/stone/anaconda3/envs/tensorflow_37/lib/python3.7/site-packages (from tensorflow) (2.1.0)\n",
      "Requirement already satisfied: protobuf>=3.8.0 in /Users/stone/anaconda3/envs/tensorflow_37/lib/python3.7/site-packages (from tensorflow) (3.11.3)\n",
      "Requirement already satisfied: wheel>=0.26; python_version >= \"3\" in /Users/stone/anaconda3/envs/tensorflow_37/lib/python3.7/site-packages (from tensorflow) (0.33.4)\n",
      "Requirement already satisfied: astor>=0.6.0 in /Users/stone/anaconda3/envs/tensorflow_37/lib/python3.7/site-packages (from tensorflow) (0.8.1)\n",
      "Requirement already satisfied: termcolor>=1.1.0 in /Users/stone/anaconda3/envs/tensorflow_37/lib/python3.7/site-packages (from tensorflow) (1.1.0)\n",
      "Requirement already satisfied: tensorboard<2.2.0,>=2.1.0 in /Users/stone/anaconda3/envs/tensorflow_37/lib/python3.7/site-packages (from tensorflow) (2.1.0)\n",
      "Requirement already satisfied: gast==0.2.2 in /Users/stone/anaconda3/envs/tensorflow_37/lib/python3.7/site-packages (from tensorflow) (0.2.2)\n",
      "Requirement already satisfied: grpcio>=1.8.6 in /Users/stone/anaconda3/envs/tensorflow_37/lib/python3.7/site-packages (from tensorflow) (1.27.2)\n",
      "Requirement already satisfied: scipy==1.4.1; python_version >= \"3\" in /Users/stone/anaconda3/envs/tensorflow_37/lib/python3.7/site-packages (from tensorflow) (1.4.1)\n",
      "Requirement already satisfied: six>=1.12.0 in /Users/stone/anaconda3/envs/tensorflow_37/lib/python3.7/site-packages (from tensorflow) (1.14.0)\n",
      "Requirement already satisfied: keras-applications>=1.0.8 in /Users/stone/anaconda3/envs/tensorflow_37/lib/python3.7/site-packages (from tensorflow) (1.0.8)\n",
      "Requirement already satisfied: absl-py>=0.7.0 in /Users/stone/anaconda3/envs/tensorflow_37/lib/python3.7/site-packages (from tensorflow) (0.9.0)\n",
      "Requirement already satisfied: setuptools in /Users/stone/anaconda3/envs/tensorflow_37/lib/python3.7/site-packages (from protobuf>=3.8.0->tensorflow) (41.0.1)\n",
      "Requirement already satisfied: google-auth-oauthlib<0.5,>=0.4.1 in /Users/stone/anaconda3/envs/tensorflow_37/lib/python3.7/site-packages (from tensorboard<2.2.0,>=2.1.0->tensorflow) (0.4.1)\n",
      "Requirement already satisfied: werkzeug>=0.11.15 in /Users/stone/anaconda3/envs/tensorflow_37/lib/python3.7/site-packages (from tensorboard<2.2.0,>=2.1.0->tensorflow) (1.0.0)\n",
      "Requirement already satisfied: google-auth<2,>=1.6.3 in /Users/stone/anaconda3/envs/tensorflow_37/lib/python3.7/site-packages (from tensorboard<2.2.0,>=2.1.0->tensorflow) (1.11.2)\n",
      "Requirement already satisfied: markdown>=2.6.8 in /Users/stone/anaconda3/envs/tensorflow_37/lib/python3.7/site-packages (from tensorboard<2.2.0,>=2.1.0->tensorflow) (3.2.1)\n",
      "Requirement already satisfied: requests<3,>=2.21.0 in /Users/stone/anaconda3/envs/tensorflow_37/lib/python3.7/site-packages (from tensorboard<2.2.0,>=2.1.0->tensorflow) (2.23.0)\n",
      "Requirement already satisfied: h5py in /Users/stone/anaconda3/envs/tensorflow_37/lib/python3.7/site-packages (from keras-applications>=1.0.8->tensorflow) (2.10.0)\n",
      "Requirement already satisfied: requests-oauthlib>=0.7.0 in /Users/stone/anaconda3/envs/tensorflow_37/lib/python3.7/site-packages (from google-auth-oauthlib<0.5,>=0.4.1->tensorboard<2.2.0,>=2.1.0->tensorflow) (1.3.0)\n",
      "Requirement already satisfied: rsa<4.1,>=3.1.4 in /Users/stone/anaconda3/envs/tensorflow_37/lib/python3.7/site-packages (from google-auth<2,>=1.6.3->tensorboard<2.2.0,>=2.1.0->tensorflow) (4.0)\n",
      "Requirement already satisfied: pyasn1-modules>=0.2.1 in /Users/stone/anaconda3/envs/tensorflow_37/lib/python3.7/site-packages (from google-auth<2,>=1.6.3->tensorboard<2.2.0,>=2.1.0->tensorflow) (0.2.8)\n",
      "Requirement already satisfied: cachetools<5.0,>=2.0.0 in /Users/stone/anaconda3/envs/tensorflow_37/lib/python3.7/site-packages (from google-auth<2,>=1.6.3->tensorboard<2.2.0,>=2.1.0->tensorflow) (4.0.0)\n",
      "Requirement already satisfied: chardet<4,>=3.0.2 in /Users/stone/anaconda3/envs/tensorflow_37/lib/python3.7/site-packages (from requests<3,>=2.21.0->tensorboard<2.2.0,>=2.1.0->tensorflow) (3.0.4)\n",
      "Requirement already satisfied: certifi>=2017.4.17 in /Users/stone/anaconda3/envs/tensorflow_37/lib/python3.7/site-packages (from requests<3,>=2.21.0->tensorboard<2.2.0,>=2.1.0->tensorflow) (2019.11.28)\n",
      "Requirement already satisfied: idna<3,>=2.5 in /Users/stone/anaconda3/envs/tensorflow_37/lib/python3.7/site-packages (from requests<3,>=2.21.0->tensorboard<2.2.0,>=2.1.0->tensorflow) (2.9)\n",
      "Requirement already satisfied: urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1 in /Users/stone/anaconda3/envs/tensorflow_37/lib/python3.7/site-packages (from requests<3,>=2.21.0->tensorboard<2.2.0,>=2.1.0->tensorflow) (1.25.8)\n",
      "Requirement already satisfied: oauthlib>=3.0.0 in /Users/stone/anaconda3/envs/tensorflow_37/lib/python3.7/site-packages (from requests-oauthlib>=0.7.0->google-auth-oauthlib<0.5,>=0.4.1->tensorboard<2.2.0,>=2.1.0->tensorflow) (3.1.0)\n",
      "Requirement already satisfied: pyasn1>=0.1.3 in /Users/stone/anaconda3/envs/tensorflow_37/lib/python3.7/site-packages (from rsa<4.1,>=3.1.4->google-auth<2,>=1.6.3->tensorboard<2.2.0,>=2.1.0->tensorflow) (0.4.8)\n"
     ]
    }
   ],
   "source": [
    "! pip install tensorflow"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {},
   "outputs": [],
   "source": [
    "import tensorflow as tf\n",
    "# tf.enable_eager_execution()\n",
    "import cProfile"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "2.1.0\n"
     ]
    }
   ],
   "source": [
    "print(tf.__version__)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## tensorflow 2.0 相关基本用法"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "* 出现问题：tensorflow2.0未安装成功\n",
    "* 问题解决：重新安装了tf2.0"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "True"
      ]
     },
     "execution_count": 14,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "tf.executing_eagerly()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "x matmul x = [[4.]]\n"
     ]
    }
   ],
   "source": [
    "x = [[2.]]\n",
    "m = tf.matmul(x, x)\n",
    "print('x matmul x = {}'.format(m))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "tf.Tensor(\n",
      "[[1 2]\n",
      " [3 4]], shape=(2, 2), dtype=int32)\n"
     ]
    }
   ],
   "source": [
    "a = tf.constant([[1, 2],\n",
    "               [3, 4]])\n",
    "print(a)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "tf.Tensor(\n",
      "[[2 3]\n",
      " [4 5]], shape=(2, 2), dtype=int32)\n"
     ]
    }
   ],
   "source": [
    "b = tf.add(a, 1)\n",
    "print(b)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "tf.Tensor(\n",
      "[[ 2  6]\n",
      " [12 20]], shape=(2, 2), dtype=int32)\n"
     ]
    }
   ],
   "source": [
    "print(a*b)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "tf.Tensor(\n",
      "[[10 13]\n",
      " [22 29]], shape=(2, 2), dtype=int32)\n"
     ]
    }
   ],
   "source": [
    "print(tf.matmul(a, b))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[[ 2  6]\n",
      " [12 20]]\n"
     ]
    }
   ],
   "source": [
    "import numpy as np\n",
    "c = np.multiply(a, b)\n",
    "print(c)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[[1 2]\n",
      " [3 4]]\n"
     ]
    }
   ],
   "source": [
    "print(a.numpy())"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## 梯度计算"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 22,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "tf.Tensor([[2.]], shape=(1, 1), dtype=float32)\n"
     ]
    }
   ],
   "source": [
    "w = tf.Variable([[1.0]])\n",
    "with tf.GradientTape() as tape:\n",
    "    loss = w * w\n",
    "grad = tape.gradient(loss,w)\n",
    "print(grad)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## 模型数据准备"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 23,
   "metadata": {},
   "outputs": [],
   "source": [
    "(x_train,y_train),(x_test,y_test) = tf.keras.datasets.mnist.load_data()\n",
    "x_train = x_train[:10000,:,:]\n",
    "y_train = y_train[:10000]\n",
    "x_test = x_test[:1000,:,:]\n",
    "y_test = y_test[:1000]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 24,
   "metadata": {},
   "outputs": [],
   "source": [
    "# 对每一个纬度上进行一个归一化\n",
    "x_train = tf.cast(x_train[...,tf.newaxis]/255, tf.float32),\n",
    "x_test = tf.cast(x_test[...,tf.newaxis]/255, tf.float32),\n",
    "\n",
    "#y_train = y_train.astype('float32')\n",
    "#y_test = y_test.astype('float32')\n",
    "y_train = tf.keras.utils.to_categorical(y_train,10)\n",
    "y_test = tf.keras.utils.to_categorical(y_test,10)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 25,
   "metadata": {},
   "outputs": [],
   "source": [
    "#使用keras构建模型\n",
    "\n",
    "mnist_model = tf.keras.Sequential([\n",
    "    tf.keras.layers.Conv2D(32,[3,3],activation='relu',\n",
    "                          input_shape=(28,28,1)),\n",
    "    tf.keras.layers.Conv2D(64,[3,3],activation='relu'),\n",
    "    tf.keras.layers.MaxPooling2D(pool_size=(2,2)),\n",
    "    tf.keras.layers.Dropout(0.25),\n",
    "    tf.keras.layers.Flatten(),\n",
    "    tf.keras.layers.Dense(128,activation=\"relu\"),\n",
    "    tf.keras.layers.Dropout(0.5),\n",
    "    tf.keras.layers.Dense(10,activation=\"softmax\")\n",
    "])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 26,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Model: \"sequential\"\n",
      "_________________________________________________________________\n",
      "Layer (type)                 Output Shape              Param #   \n",
      "=================================================================\n",
      "conv2d (Conv2D)              (None, 26, 26, 32)        320       \n",
      "_________________________________________________________________\n",
      "conv2d_1 (Conv2D)            (None, 24, 24, 64)        18496     \n",
      "_________________________________________________________________\n",
      "max_pooling2d (MaxPooling2D) (None, 12, 12, 64)        0         \n",
      "_________________________________________________________________\n",
      "dropout (Dropout)            (None, 12, 12, 64)        0         \n",
      "_________________________________________________________________\n",
      "flatten (Flatten)            (None, 9216)              0         \n",
      "_________________________________________________________________\n",
      "dense (Dense)                (None, 128)               1179776   \n",
      "_________________________________________________________________\n",
      "dropout_1 (Dropout)          (None, 128)               0         \n",
      "_________________________________________________________________\n",
      "dense_1 (Dense)              (None, 10)                1290      \n",
      "=================================================================\n",
      "Total params: 1,199,882\n",
      "Trainable params: 1,199,882\n",
      "Non-trainable params: 0\n",
      "_________________________________________________________________\n"
     ]
    }
   ],
   "source": [
    "# 查看网络结构\n",
    "mnist_model.summary()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 27,
   "metadata": {},
   "outputs": [],
   "source": [
    "#Build the model using Model\n",
    "inputs = tf.keras.Input(shape=(None,None,1),name=\"digits\")\n",
    "conv_1 = tf.keras.layers.Conv2D(16,[3,3],activation=\"relu\")(inputs)\n",
    "conv_2 = tf.keras.layers.Conv2D(16,[3,3],activation=\"relu\")(conv_1)\n",
    "ave_pool = tf.keras.layers.GlobalAveragePooling2D()(conv_2)\n",
    "outputs = tf.keras.layers.Dense(10)(ave_pool)\n",
    "mnist_model_2 = tf.keras.Model(inputs=inputs,outputs=outputs)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 31,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Model: \"model\"\n",
      "_________________________________________________________________\n",
      "Layer (type)                 Output Shape              Param #   \n",
      "=================================================================\n",
      "digits (InputLayer)          [(None, None, None, 1)]   0         \n",
      "_________________________________________________________________\n",
      "conv2d_2 (Conv2D)            (None, None, None, 16)    160       \n",
      "_________________________________________________________________\n",
      "conv2d_3 (Conv2D)            (None, None, None, 16)    2320      \n",
      "_________________________________________________________________\n",
      "global_average_pooling2d (Gl (None, 16)                0         \n",
      "_________________________________________________________________\n",
      "dense_2 (Dense)              (None, 10)                170       \n",
      "=================================================================\n",
      "Total params: 2,650\n",
      "Trainable params: 2,650\n",
      "Non-trainable params: 0\n",
      "_________________________________________________________________\n"
     ]
    }
   ],
   "source": [
    "mnist_model_2.summary()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 37,
   "metadata": {},
   "outputs": [],
   "source": [
    "mnist_model.compile(optimizer='adam', loss='binary_crossentropy', metrics=['acc'])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 38,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Train on 10000 samples\n",
      "Epoch 1/3\n",
      "10000/10000 [==============================] - 8s 804us/sample - loss: 0.1042 - acc: 0.9631\n",
      "Epoch 2/3\n",
      "10000/10000 [==============================] - 8s 790us/sample - loss: 0.0380 - acc: 0.9877\n",
      "Epoch 3/3\n",
      "10000/10000 [==============================] - 8s 810us/sample - loss: 0.0256 - acc: 0.9919\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "<tensorflow.python.keras.callbacks.History at 0x1465be320>"
      ]
     },
     "execution_count": 38,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "mnist_model.fit(x_train,y_train,batch_size=128,epochs=3)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 39,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "1000/1000 [==============================] - 0s 293us/sample - loss: 0.0179 - acc: 0.9928\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "[0.017922543494030833, 0.9927999]"
      ]
     },
     "execution_count": 39,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "mnist_model.evaluate(x_test, y_test)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 40,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "<tf.Tensor: shape=(28, 28, 1), dtype=float32, numpy=\n",
       "array([[[0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ]],\n",
       "\n",
       "       [[0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ]],\n",
       "\n",
       "       [[0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ]],\n",
       "\n",
       "       [[0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.45490196],\n",
       "        [0.49019608],\n",
       "        [0.67058825],\n",
       "        [1.        ],\n",
       "        [1.        ],\n",
       "        [0.5882353 ],\n",
       "        [0.3647059 ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ]],\n",
       "\n",
       "       [[0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.6627451 ],\n",
       "        [0.99215686],\n",
       "        [0.99215686],\n",
       "        [0.99215686],\n",
       "        [0.99215686],\n",
       "        [0.99215686],\n",
       "        [0.99215686],\n",
       "        [0.85490197],\n",
       "        [0.11764706],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ]],\n",
       "\n",
       "       [[0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.6627451 ],\n",
       "        [0.99215686],\n",
       "        [0.99215686],\n",
       "        [0.99215686],\n",
       "        [0.8352941 ],\n",
       "        [0.5568628 ],\n",
       "        [0.6901961 ],\n",
       "        [0.99215686],\n",
       "        [0.99215686],\n",
       "        [0.47843137],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ]],\n",
       "\n",
       "       [[0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.20392157],\n",
       "        [0.98039216],\n",
       "        [0.99215686],\n",
       "        [0.8235294 ],\n",
       "        [0.1254902 ],\n",
       "        [0.04705882],\n",
       "        [0.        ],\n",
       "        [0.02352941],\n",
       "        [0.80784315],\n",
       "        [0.99215686],\n",
       "        [0.54901963],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ]],\n",
       "\n",
       "       [[0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.3019608 ],\n",
       "        [0.9843137 ],\n",
       "        [0.8235294 ],\n",
       "        [0.09803922],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.47843137],\n",
       "        [0.972549  ],\n",
       "        [0.99215686],\n",
       "        [0.25490198],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ]],\n",
       "\n",
       "       [[0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.12156863],\n",
       "        [0.07058824],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.81960785],\n",
       "        [0.99215686],\n",
       "        [0.99215686],\n",
       "        [0.25490198],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ]],\n",
       "\n",
       "       [[0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.45882353],\n",
       "        [0.96862745],\n",
       "        [0.99215686],\n",
       "        [0.7764706 ],\n",
       "        [0.03921569],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ]],\n",
       "\n",
       "       [[0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.29803923],\n",
       "        [0.96862745],\n",
       "        [0.99215686],\n",
       "        [0.90588236],\n",
       "        [0.24705882],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ]],\n",
       "\n",
       "       [[0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.5019608 ],\n",
       "        [0.99215686],\n",
       "        [0.99215686],\n",
       "        [0.5647059 ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ]],\n",
       "\n",
       "       [[0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.6901961 ],\n",
       "        [0.9647059 ],\n",
       "        [0.99215686],\n",
       "        [0.62352943],\n",
       "        [0.04705882],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ]],\n",
       "\n",
       "       [[0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.09803922],\n",
       "        [0.91764706],\n",
       "        [0.99215686],\n",
       "        [0.9137255 ],\n",
       "        [0.13725491],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ]],\n",
       "\n",
       "       [[0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.7764706 ],\n",
       "        [0.99215686],\n",
       "        [0.99215686],\n",
       "        [0.5529412 ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ]],\n",
       "\n",
       "       [[0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.30588236],\n",
       "        [0.972549  ],\n",
       "        [0.99215686],\n",
       "        [0.7411765 ],\n",
       "        [0.04705882],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ]],\n",
       "\n",
       "       [[0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.07450981],\n",
       "        [0.78431374],\n",
       "        [0.99215686],\n",
       "        [0.99215686],\n",
       "        [0.5529412 ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ]],\n",
       "\n",
       "       [[0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.5254902 ],\n",
       "        [0.99215686],\n",
       "        [0.99215686],\n",
       "        [0.6784314 ],\n",
       "        [0.04705882],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ]],\n",
       "\n",
       "       [[0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.972549  ],\n",
       "        [0.99215686],\n",
       "        [0.99215686],\n",
       "        [0.09803922],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ]],\n",
       "\n",
       "       [[0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.972549  ],\n",
       "        [0.99215686],\n",
       "        [0.99215686],\n",
       "        [0.16862746],\n",
       "        [0.07843138],\n",
       "        [0.07843138],\n",
       "        [0.07843138],\n",
       "        [0.07843138],\n",
       "        [0.01960784],\n",
       "        [0.        ],\n",
       "        [0.01960784],\n",
       "        [0.07843138],\n",
       "        [0.07843138],\n",
       "        [0.14509805],\n",
       "        [0.5882353 ],\n",
       "        [0.5882353 ],\n",
       "        [0.5882353 ],\n",
       "        [0.5764706 ],\n",
       "        [0.03921569],\n",
       "        [0.        ]],\n",
       "\n",
       "       [[0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.972549  ],\n",
       "        [0.99215686],\n",
       "        [0.99215686],\n",
       "        [0.99215686],\n",
       "        [0.99215686],\n",
       "        [0.99215686],\n",
       "        [0.99215686],\n",
       "        [0.99215686],\n",
       "        [0.65882355],\n",
       "        [0.56078434],\n",
       "        [0.6509804 ],\n",
       "        [0.99215686],\n",
       "        [0.99215686],\n",
       "        [0.99215686],\n",
       "        [0.99215686],\n",
       "        [0.99215686],\n",
       "        [0.99215686],\n",
       "        [0.99215686],\n",
       "        [0.48235294],\n",
       "        [0.        ]],\n",
       "\n",
       "       [[0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.68235296],\n",
       "        [0.99215686],\n",
       "        [0.99215686],\n",
       "        [0.99215686],\n",
       "        [0.99215686],\n",
       "        [0.99215686],\n",
       "        [0.99215686],\n",
       "        [0.99215686],\n",
       "        [0.99215686],\n",
       "        [0.99215686],\n",
       "        [0.99215686],\n",
       "        [0.99215686],\n",
       "        [0.9764706 ],\n",
       "        [0.96862745],\n",
       "        [0.96862745],\n",
       "        [0.6627451 ],\n",
       "        [0.45882353],\n",
       "        [0.45882353],\n",
       "        [0.22352941],\n",
       "        [0.        ]],\n",
       "\n",
       "       [[0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.4627451 ],\n",
       "        [0.48235294],\n",
       "        [0.48235294],\n",
       "        [0.48235294],\n",
       "        [0.6509804 ],\n",
       "        [0.99215686],\n",
       "        [0.99215686],\n",
       "        [0.99215686],\n",
       "        [0.60784316],\n",
       "        [0.48235294],\n",
       "        [0.48235294],\n",
       "        [0.16078432],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ]],\n",
       "\n",
       "       [[0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ]],\n",
       "\n",
       "       [[0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ]],\n",
       "\n",
       "       [[0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ]],\n",
       "\n",
       "       [[0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ]],\n",
       "\n",
       "       [[0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ],\n",
       "        [0.        ]]], dtype=float32)>"
      ]
     },
     "execution_count": 40,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "x_test[0][1]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 43,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "<tf.Tensor: shape=(1000, 28, 28, 1), dtype=float32, numpy=\n",
       "array([[[[0.],\n",
       "         [0.],\n",
       "         [0.],\n",
       "         ...,\n",
       "         [0.],\n",
       "         [0.],\n",
       "         [0.]],\n",
       "\n",
       "        [[0.],\n",
       "         [0.],\n",
       "         [0.],\n",
       "         ...,\n",
       "         [0.],\n",
       "         [0.],\n",
       "         [0.]],\n",
       "\n",
       "        [[0.],\n",
       "         [0.],\n",
       "         [0.],\n",
       "         ...,\n",
       "         [0.],\n",
       "         [0.],\n",
       "         [0.]],\n",
       "\n",
       "        ...,\n",
       "\n",
       "        [[0.],\n",
       "         [0.],\n",
       "         [0.],\n",
       "         ...,\n",
       "         [0.],\n",
       "         [0.],\n",
       "         [0.]],\n",
       "\n",
       "        [[0.],\n",
       "         [0.],\n",
       "         [0.],\n",
       "         ...,\n",
       "         [0.],\n",
       "         [0.],\n",
       "         [0.]],\n",
       "\n",
       "        [[0.],\n",
       "         [0.],\n",
       "         [0.],\n",
       "         ...,\n",
       "         [0.],\n",
       "         [0.],\n",
       "         [0.]]],\n",
       "\n",
       "\n",
       "       [[[0.],\n",
       "         [0.],\n",
       "         [0.],\n",
       "         ...,\n",
       "         [0.],\n",
       "         [0.],\n",
       "         [0.]],\n",
       "\n",
       "        [[0.],\n",
       "         [0.],\n",
       "         [0.],\n",
       "         ...,\n",
       "         [0.],\n",
       "         [0.],\n",
       "         [0.]],\n",
       "\n",
       "        [[0.],\n",
       "         [0.],\n",
       "         [0.],\n",
       "         ...,\n",
       "         [0.],\n",
       "         [0.],\n",
       "         [0.]],\n",
       "\n",
       "        ...,\n",
       "\n",
       "        [[0.],\n",
       "         [0.],\n",
       "         [0.],\n",
       "         ...,\n",
       "         [0.],\n",
       "         [0.],\n",
       "         [0.]],\n",
       "\n",
       "        [[0.],\n",
       "         [0.],\n",
       "         [0.],\n",
       "         ...,\n",
       "         [0.],\n",
       "         [0.],\n",
       "         [0.]],\n",
       "\n",
       "        [[0.],\n",
       "         [0.],\n",
       "         [0.],\n",
       "         ...,\n",
       "         [0.],\n",
       "         [0.],\n",
       "         [0.]]],\n",
       "\n",
       "\n",
       "       [[[0.],\n",
       "         [0.],\n",
       "         [0.],\n",
       "         ...,\n",
       "         [0.],\n",
       "         [0.],\n",
       "         [0.]],\n",
       "\n",
       "        [[0.],\n",
       "         [0.],\n",
       "         [0.],\n",
       "         ...,\n",
       "         [0.],\n",
       "         [0.],\n",
       "         [0.]],\n",
       "\n",
       "        [[0.],\n",
       "         [0.],\n",
       "         [0.],\n",
       "         ...,\n",
       "         [0.],\n",
       "         [0.],\n",
       "         [0.]],\n",
       "\n",
       "        ...,\n",
       "\n",
       "        [[0.],\n",
       "         [0.],\n",
       "         [0.],\n",
       "         ...,\n",
       "         [0.],\n",
       "         [0.],\n",
       "         [0.]],\n",
       "\n",
       "        [[0.],\n",
       "         [0.],\n",
       "         [0.],\n",
       "         ...,\n",
       "         [0.],\n",
       "         [0.],\n",
       "         [0.]],\n",
       "\n",
       "        [[0.],\n",
       "         [0.],\n",
       "         [0.],\n",
       "         ...,\n",
       "         [0.],\n",
       "         [0.],\n",
       "         [0.]]],\n",
       "\n",
       "\n",
       "       ...,\n",
       "\n",
       "\n",
       "       [[[0.],\n",
       "         [0.],\n",
       "         [0.],\n",
       "         ...,\n",
       "         [0.],\n",
       "         [0.],\n",
       "         [0.]],\n",
       "\n",
       "        [[0.],\n",
       "         [0.],\n",
       "         [0.],\n",
       "         ...,\n",
       "         [0.],\n",
       "         [0.],\n",
       "         [0.]],\n",
       "\n",
       "        [[0.],\n",
       "         [0.],\n",
       "         [0.],\n",
       "         ...,\n",
       "         [0.],\n",
       "         [0.],\n",
       "         [0.]],\n",
       "\n",
       "        ...,\n",
       "\n",
       "        [[0.],\n",
       "         [0.],\n",
       "         [0.],\n",
       "         ...,\n",
       "         [0.],\n",
       "         [0.],\n",
       "         [0.]],\n",
       "\n",
       "        [[0.],\n",
       "         [0.],\n",
       "         [0.],\n",
       "         ...,\n",
       "         [0.],\n",
       "         [0.],\n",
       "         [0.]],\n",
       "\n",
       "        [[0.],\n",
       "         [0.],\n",
       "         [0.],\n",
       "         ...,\n",
       "         [0.],\n",
       "         [0.],\n",
       "         [0.]]],\n",
       "\n",
       "\n",
       "       [[[0.],\n",
       "         [0.],\n",
       "         [0.],\n",
       "         ...,\n",
       "         [0.],\n",
       "         [0.],\n",
       "         [0.]],\n",
       "\n",
       "        [[0.],\n",
       "         [0.],\n",
       "         [0.],\n",
       "         ...,\n",
       "         [0.],\n",
       "         [0.],\n",
       "         [0.]],\n",
       "\n",
       "        [[0.],\n",
       "         [0.],\n",
       "         [0.],\n",
       "         ...,\n",
       "         [0.],\n",
       "         [0.],\n",
       "         [0.]],\n",
       "\n",
       "        ...,\n",
       "\n",
       "        [[0.],\n",
       "         [0.],\n",
       "         [0.],\n",
       "         ...,\n",
       "         [0.],\n",
       "         [0.],\n",
       "         [0.]],\n",
       "\n",
       "        [[0.],\n",
       "         [0.],\n",
       "         [0.],\n",
       "         ...,\n",
       "         [0.],\n",
       "         [0.],\n",
       "         [0.]],\n",
       "\n",
       "        [[0.],\n",
       "         [0.],\n",
       "         [0.],\n",
       "         ...,\n",
       "         [0.],\n",
       "         [0.],\n",
       "         [0.]]],\n",
       "\n",
       "\n",
       "       [[[0.],\n",
       "         [0.],\n",
       "         [0.],\n",
       "         ...,\n",
       "         [0.],\n",
       "         [0.],\n",
       "         [0.]],\n",
       "\n",
       "        [[0.],\n",
       "         [0.],\n",
       "         [0.],\n",
       "         ...,\n",
       "         [0.],\n",
       "         [0.],\n",
       "         [0.]],\n",
       "\n",
       "        [[0.],\n",
       "         [0.],\n",
       "         [0.],\n",
       "         ...,\n",
       "         [0.],\n",
       "         [0.],\n",
       "         [0.]],\n",
       "\n",
       "        ...,\n",
       "\n",
       "        [[0.],\n",
       "         [0.],\n",
       "         [0.],\n",
       "         ...,\n",
       "         [0.],\n",
       "         [0.],\n",
       "         [0.]],\n",
       "\n",
       "        [[0.],\n",
       "         [0.],\n",
       "         [0.],\n",
       "         ...,\n",
       "         [0.],\n",
       "         [0.],\n",
       "         [0.]],\n",
       "\n",
       "        [[0.],\n",
       "         [0.],\n",
       "         [0.],\n",
       "         ...,\n",
       "         [0.],\n",
       "         [0.],\n",
       "         [0.]]]], dtype=float32)>"
      ]
     },
     "execution_count": 43,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "x_test[0]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 46,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "array([[1.25720206e-07, 5.91425930e-08, 6.32607779e-08, ...,\n",
       "        9.99935031e-01, 1.01975006e-08, 6.13120210e-05],\n",
       "       [4.98389636e-06, 2.51755177e-04, 9.99448478e-01, ...,\n",
       "        1.46468028e-06, 1.98647213e-05, 4.11493275e-08],\n",
       "       [3.98561269e-06, 9.98225749e-01, 1.62139928e-04, ...,\n",
       "        6.10260140e-05, 3.88690358e-04, 5.85076077e-06],\n",
       "       ...,\n",
       "       [9.99999762e-01, 5.94773730e-10, 1.33712668e-08, ...,\n",
       "        5.01138508e-09, 1.09457105e-07, 1.08453095e-08],\n",
       "       [3.18507227e-04, 2.99286592e-04, 7.39790499e-04, ...,\n",
       "        5.33797429e-04, 9.92702723e-01, 1.43682712e-03],\n",
       "       [8.94516143e-06, 7.49030278e-06, 2.03893178e-05, ...,\n",
       "        3.30411904e-02, 2.44121984e-05, 9.57345426e-01]], dtype=float32)"
      ]
     },
     "execution_count": 46,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "mnist_model.predict(x_test[0])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 35,
   "metadata": {},
   "outputs": [],
   "source": [
    "mnist_model_2.compile(loss = tf.keras.losses.categorical_crossentropy,\n",
    "                      optimizer=tf.keras.optimizers.Adam(),\n",
    "                     metrics=[\"accuracy\"])"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## 使用tf2.0"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 47,
   "metadata": {},
   "outputs": [],
   "source": [
    "(x_train, y_train), (x_test, y_test) = tf.keras.datasets.mnist.load_data()\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 54,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "array([[[0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        ...,\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0]],\n",
       "\n",
       "       [[0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        ...,\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0]],\n",
       "\n",
       "       [[0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        ...,\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0]],\n",
       "\n",
       "       ...,\n",
       "\n",
       "       [[0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        ...,\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0]],\n",
       "\n",
       "       [[0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        ...,\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0]],\n",
       "\n",
       "       [[0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        ...,\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0]]], dtype=uint8)"
      ]
     },
     "execution_count": 54,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "x_train"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 49,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "60000"
      ]
     },
     "execution_count": 49,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "len(x_train)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 55,
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "array([[[0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        ...,\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0]],\n",
       "\n",
       "       [[0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        ...,\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0]],\n",
       "\n",
       "       [[0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        ...,\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0]],\n",
       "\n",
       "       ...,\n",
       "\n",
       "       [[0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        ...,\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0]],\n",
       "\n",
       "       [[0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        ...,\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0]],\n",
       "\n",
       "       [[0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        ...,\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0]]], dtype=uint8)"
      ]
     },
     "execution_count": 55,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "x_train[:10000]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 56,
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "array([[[0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        ...,\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0]],\n",
       "\n",
       "       [[0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        ...,\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0]],\n",
       "\n",
       "       [[0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        ...,\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0]],\n",
       "\n",
       "       ...,\n",
       "\n",
       "       [[0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        ...,\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0]],\n",
       "\n",
       "       [[0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        ...,\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0]],\n",
       "\n",
       "       [[0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        ...,\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0],\n",
       "        [0, 0, 0, ..., 0, 0, 0]]], dtype=uint8)"
      ]
     },
     "execution_count": 56,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "x_train[:10000, :, :]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 58,
   "metadata": {},
   "outputs": [],
   "source": [
    "x_train = x_train[:10000,:,:]\n",
    "y_train = y_train[:10000]\n",
    "x_test = x_test[:1000,:,:]\n",
    "y_test = y_test[:1000]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 59,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "array([5, 0, 4, ..., 6, 9, 7], dtype=uint8)"
      ]
     },
     "execution_count": 59,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "y_train"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 60,
   "metadata": {},
   "outputs": [],
   "source": [
    "datasets = tf.data.Dataset.from_tensor_slices(\n",
    "    (tf.cast(x_train[..., tf.newaxis] / 255, tf.float32),\n",
    "    tf.cast(y_train, tf.int64)\n",
    "    )\n",
    ")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 61,
   "metadata": {},
   "outputs": [],
   "source": [
    "datasets = datasets.shuffle(1000).batch(32)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 62,
   "metadata": {},
   "outputs": [],
   "source": [
    "optimizer = tf.keras.optimizers.SGD(\n",
    "    learning_rate=1e-3\n",
    ")\n",
    "loss = tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True)\n",
    "loss_history = []"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 63,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch 0 finished\n",
      "Epoch 1 finished\n",
      "Epoch 2 finished\n",
      "Epoch 3 finished\n",
      "Epoch 4 finished\n"
     ]
    }
   ],
   "source": [
    "# 使用tensorflow需要自己定义迭代过程中涉及相关参数\n",
    "for epoch in range(5):\n",
    "    for (batch, (images, labels)) in enumerate (datasets):\n",
    "        with tf.GradientTape() as tape:\n",
    "            logits = mnist_model(images, training=True)\n",
    "            loss_value = loss(labels, logits)\n",
    "        grads = tape.gradient(loss_value, mnist_model.trainable_variables)\n",
    "        optimizer.apply_gradients(zip(grads, mnist_model.trainable_variables))\n",
    "    print(\"Epoch {} finished\".format(epoch))"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### 2.回答以下理论题目?"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "#### 1. Compared to FNN, what is the biggest advantage of CNN?"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "卷积核的设计，可以共享权重参数，降低了待训练参数数量，简化计算量"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "#### 2. Suppose your input is a 100 by 100 gray image, and you use a convolutional layer with 50 filters that are each 5x5. How many parameters does this hidden layer have (including the bias parameters)? "
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "`50 *（5*5+1）`\n",
    "参考网址：<https://blog.csdn.net/yanzi6969/article/details/78019683>"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "#### 3. What are \"local invariant\" and \"parameter sharing\" ?"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "**local invariant:** 局部不变性，一般认为人对外界的认知是从局部到全局的，而图像的空间联系也是局部的像素联系较为紧密，而距离较远的像素相关性则较弱。因而，每个神经元其实没有必要对全局图像进行感知，只需要对局部进行感知，然后在更高层将局部的信息综合起来就得到了全局的信息。\n",
    "参考：<https://www.zhihu.com/question/36980971/answer/94840350>\n",
    "\n",
    "**parameter sharing:** 参数共享，不同于全连接神经网络中，每个参数只对对应的上一层输入使用一次，所以参数非常多。而CNN中的卷积核，在计算时会被使用多次，所以参数是共享的。"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "#### 4. Why we use batch normalization ?"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "BN可以防止梯度爆炸或弥散、可以提高训练时模型对于不同超参（学习率、初始化）的鲁棒性、可以让大部分的激活函数能够远离其饱和区域。\n",
    "参考网址：<https://zhuanlan.zhihu.com/p/52749286>"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "#### 5. What problem does dropout try to solve ?"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "减少过拟合"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "#### 6.  Is the following statement correct and why ? \"Because pooling layers do not have parameters, they do not affect  the backpropagation(derivatives) calculation\""
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "错误，梯度反向传播时，还是需要经过池化层传递到前一层，不同的池化操作，对应的反向传播也不是不一样的，但是总的原则需要保持传递的loss或者梯度保持不变。"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### 3. 实践题"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "#### 3.1 In the first session of the practical part, you will implement an image classification model using any deep learning libraries that you are familiar with,  which means, except for tensorflow and keras, you can also use pytorch/caffe/... .  The dataset used in this session is the cifar10 which contains 50000 color (RGB) images, each with size 32x32x3.  All 50000 images are classified into ten categories. "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 153,
   "metadata": {},
   "outputs": [],
   "source": [
    "import tensorflow as tf\n",
    "import keras\n",
    "import matplotlib.pyplot as plt\n",
    "from keras.models import Sequential\n",
    "from keras.layers import Dense, Dropout, Activation, Flatten\n",
    "from keras.layers import Conv2D, MaxPooling2D\n",
    "import os"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 141,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAbAAAADCCAYAAAAo05dUAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjMsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+AADFEAAAgAElEQVR4nOy9d7hl91Xf/dnl9HZ7vzN3ujTqXZblKhk32ZiAKTHYSQwkYBIg4QkhkAQcSCBP3gCvsfHzYhJMiw0YF4wlWW7C6prRSJqq6TN3bm/nnn722Xv/3j/W2ufOaGau5k4s8+rlrOeZ58w9Z5ffb/3aKt+1lmWMoUMd6lCHOtSh1xrZf98N6FCHOtShDnXoaqhzgHWoQx3qUIdek9Q5wDrUoQ51qEOvSeocYB3qUIc61KHXJHUOsA51qEMd6tBrkjoHWIc61KEOdeg1Se5GLo7FYiaRTBIEAQA2BseS3+KunIUx/XQdBwDLsrAsPSf1Wt+X+yMAvxNdawyhCeW3UH61bOuCNoRh0L6+/Z3eY+kLLCxsvcax7XY75Fqj7157rml/B8vFMpVa48KXrkO5fMH0DgziNWrSN6+BMXJ7LJ4EIJ6QTycWB8C2LRr1CgBesy7vVp5GfYjab9k2mWwOgIQ+xwQ+APV6LepBmweNujwv0GvafTPg+3JNGEbfyd+u6+qno08LiKIrQrmE1WJp0RjTfyU86evrMxMTE1dy6XeMQm2o70u/XddpzyE7mgPtuSSfpv2/q6O9e/deMU+kHZZxXRtb5yKWdd77o/9dGNbiBwG2rp9I2ozmMNqf6Hm2beM4MpbR+EdjHZEJTftVbW7o/Y4u5pjr0mq15Dn6rui5a/NC5ms8ZrfvtyyLas2j6flXzNZ8ocv0Dwy3+21ZNrYdjY99AUfa69ucP24m6sQF17bvMWujHLW9zZHzW/kK4USX+nXtlpfx+Pxv9KIzJw5taK506MpoQwdYIpnk5ltvo1hclr/tkJ64DNCm3jQA/T0ZAPq6sgDEnRhuIiUP0EWwvFIEwPPl3u6uAgB20KLZbALQaDQASKZk0w6QBVOrVyh05eV5Rr7zmp48nph8Og65rLw/k5H2xGLynLpea6JD1Xbb9/vG4rf+8HMbYQm9A4P88v/4BOeO7AVg4dRhgkD6ObjpGuHNtmuln0ObtE8uRw8+AcCZ4y8C0CrLgebovflu4YmbTHPn698IwPad8rzGqvD/4IF9AIShh9cSfh06uB+AUnERgKYn/Gx5DstLcuBVanKtH8hv/f090r4e4Vlgyviyf9Goyxh98fNfPXOlPJmYmGDPnj3tQ+VVofa+JbtQvSp9W1qWfvf0dBN40s9UWuamE0/IrTr2IRYXikIbI8dxrpgnAK5rM9iXJJVKtdvu2tKC6JD19WCINuTiaomkLYJPxpa5UVahx05Lf1IJ/T2ToVDoAmBlReaIV5UxjjbUltdqb9yOCizxmLy7kJE1MtzfzdTcHABVT9qTz3dL+1rypGp1FYCx0TyxWCQAuXzt20c3whL6B4b5zd/5n+25kkokiCelHaEj/fONCsU6Wk4AsWhqRQKaCs4ta00YBbADAyZ2QdsDO+LxWjvWBD1zwW+RABBgrR2Oek3U5kigbz8L8NvPk2s+/L3Xb2iudOjKaEMHWKPR4OChgxQXdZNIgtUrk60vEC3BSg0AUA1lAVUCg7FkgdUaclDU6rqpBjK4iyr5JV3T1hIcXayJRELvrQLghx5WoxcAXfu09NBLudKWStNjWSXQdFoOMMuWSWw58oluGLVGC1+lTcdN0GrUN8ISgiCgtLJMb5ccAqZ/EOPKATu8aatcE8rz7VA22bDm01hZkuvrssmO9gnfNo1vB2B8+2YARkbHGBgYBCAW0wXdJRvy+NiQ/O17NLTdxRU5CBcXhf+uaoFYDt29cn8yI9eullYASCSF16ERnsXcBKVVFTKaVx/oHm3K3w1q1mRDXT53EoDJw6uslmTOvP6t9wGQV2Eo0mUsrO+qDd0CYo5DoNJBGIRYcVkbTdUco0MlOsC6cmnyKoR5ZelPWJd1lI7JQVhIy2c6lSQbl/m9qGssNPKZTMrY9/f3sbIi4x4JhyPDMvcc3aIHBnqI6W+nJqcBiMe0PV3Slqx80FsotDWjaq16aVXlFSi0wE1Iu70woLpaBiCWUc1Q+4laNkIsfD2ogobwsrEqczqu/QyQfaRSr2BbCW2zCIVGfwsjq4d18eFktTVN037nmvYZXnBtcN5z5FrT1pJfVSGuQx0fWIc61KEOdei1SRvSwGwg5VogAg2be5NMDIpUM6BmqFSk8USmnWaDRkvNGPpdXE0oqAnRhPJ7oSfdVvPjKnVF2nlk/ml6DVpqYk/rd25Grk3q375VxVbV3VfpMPLVZTOivVTU5NTyW5ErgXJptS2VXTEZA60WXlMkwVrNY2LnqL5DJObIvNfTp2bBmM2OHTsBuOfu2wEYHRwTHhTETN5ypR3pZAI3smqolF6vipbVVM0xnUrT3SVS9LatuwE4fPglvUmuaTZrFNQMpK44VktiJjKIRB9JmysrVeq1Zrt7V0uvZpqy6Nm2isqzk6cAePHJvwOgVa8Ry0p/6yXRzvI9MkfDyCdi2VejMFw1WZZF3LXbPuHuvl6q6seMBaJ5RT48S/s3PDTAUL9YHE4dPwFAnyvzaGhENHDbVx+ZZbW1zN6CWESMo1paQe5JZ9I4tryjf7APgKRqbWXlk29aFNSsP6prVK3/uDH5O6HmvdALyOfE4mBa4Zp/7wopCANK1Urb57a4sMS5qXl5Z1K1vZyMY8KOTMDgRVpsS/pSUxN8Sq0U2LL+y14Zz5M2bd2yA4Dt28S6kYpMlWG4pilp8yMfeRipYuYSZsaXUbTn2ViEdDSv7wZt6ACzLEPS8snl5Lado930pmThxULZpCvLshkGoSyqes1HTfjk1S/m6kFTVFOBYgjoyaUpq9nHU5NhXU0E0YTKZjK0PPUBqL8opmbGIJBrXceiqQdKPAJOhDLRmxUxnxBECxF8nbyr1SZBuLEtzYQhfqOOpcCURDzFqppYe4fkUNp0nZgFB8ZHpL2xOJGTqeUL347MiEmxdnJBvreFjy/tf4E7rpVD6Y133iHv1AVU0g3n7Jlp4urji8dlM+nrl0P07OQx+T6ZplKv6n3SPlfNQvm8HOoRKCTw1wAfCfWvXA1ZG9zMNkKRGajVlDZPT4qLIR+Z07pyzK/I/FqamQJgcFx8kJHt2XAxSOjVJMexKeRzbdPdwMAA80sy7kmdw6vqHx7sE0EmkXBIpeSAGR2XAyvy67Y8mdNxZIwS8QQ1BfGMj4hAY9RZFNdx9DyPvl4VpHSTbzZlXuSiedCsU15d0d9kXvf2yYGYyqi/y5LvXS9Ooyrv9JutDQstlWqVJ556kooKZTYx6mq2bgTCm1hcPh3dUwILGmruDvSAyaipPGVJ+5IJGePA9qhWZa3teVF8xvOLYhbdumULAH19fW0/aQT8icyCbYBYaL8y0CMyLVrWRX6yDr061DEhdqhDHepQh16TtCENzLUsuhMuKZUWC5kU/XmRDgNFT0UGuLYz2rZpKoihDddWqSaIIOSOnKPz80WCljyhXBPJuhaIJpJNKfKwGeCo9B2ZjxyFl9eros2kY3lclYAaChypq6khVKNRsSLXFmstKjX5rdGy8fyNSUwmDGnWqmRVqs739HPrTTcDML5VTBZlNQu9dHISgFKtRqUokvZSUaTLmVmRePNqQsQWE96XP/s5Yj8o/HnT6+4FIBYTfg4NjWgjFimqtvHcPkE1umpKyah5xw8MXkXeqexuow8D5XGE4LNJt8eqS01J/18iY0x77BeWhX+nT58FoKl/55JxapUSAEdeEMl7aGIbAF1Do9GD2kL1q6ktRuS6Ln19vW2p3Gs0GBwSTSmdFM0xoeETw/1qSm7VWFoUk1ouL1qQq6jB0JPnxNwIRm+o16TPkSnMTsrzmmq1aHrNNjCqUpI5k8mK9hFpHUvLKyRikStAnuMpmrVciTQl+cErBXiezMdsJtPWYK6UgiCkWKm3Q08sDK6aNNOqTUWArkjTbBDgq+xdrqmlRs31CUv6mzXSR8eFmKKgG7rmT0yKRn5mZhaArnyB8TGxlvT3ibm2q1vMlhFK1DHhRdplEIE62jD9NeThGoijU+3j1aSOBtahDnWoQx16TdLGNDDHor8rSS4mUkky6WA7ImFEsS0t9QWtSSVeO94rUEktNOrXUsnfuCJZlb0qgTqzawqx9/WzrHbsqeUqMbXd5yvyjtasaA71VdHaNvVtZ2BAJCorJ36ipsLWKxWR1FbLIo0trtY5PSnXBI5LU/0KV0qWbZFIxGg5Ih3XU1lOlUTaff6xZwBYXhKpdWpaQBMxx2r3oekLDyJNcbhfhmR+Vn06iTjlokjVR08JUGF4WJzvUfzN8PgQI+ofOTsrWt5L++VzYFgk+dNnF6Gl8GGV3AMFikTgl4Qrkm+9EZDPi+bmuokN8eO7QwajMYBT584BcOqsfE4eFxh9Xy7LWJ9oETNnhZf79zwLwO1vllipdL7wfxbJvEGyAJsQrylzL/Ca+NE80EB4V9XjksZaWgTtIPepmRkAChrYntZ1U2rK/DXGENeQiFbkY1XNydKQhtAPCJ3IX6shJaokROEt8USauGrw6aQwKPKFrqrlYLUo78wmC1iqNabzhXYA/pVSaAx1L2zPZbAwQeT3lk9L2xvhKbxWg5ZenkuLX71cEv6VIk1Ttdx4PE4uHgVjSx+qvvQz8qk1F1cpFmWNZrKyjw0Pi3Vj2xYJhcnGEyQ05CECnOhywmh8WjsJw3mafdBRwF5V6mhgHepQhzrUodckbSyVlOsw0p8hHxctJZuOY6k21U4Fo1JIUxFtNha9OfGjZDTSv7QqGlNBpfyyIg3PTC1SaWp2AJVuRtPqN4uJZHV6qUjTKPJRRbKC+gbu2S2Q9NJMgKnpb30iZTZr8pxKRc7sREy+Hx/KtQOF50oNlo7OboQl2LZLOj3IfFF4cnxykkMHD8hvKlUGioisayCqY4fUm6JVFcvyWVYU1ulzh4VXKenTrm27QLW0x7/9LQA2K3pq5y6B4vf2FtrByIW8SM62LxJytRmhQZvUi+LzCIIoy4nwIPKFRHDoRNJp+zVqtShd1dVQ5E88X825jMpjwESqgIkgzZFf5OVylkWoqNJI0yhrdpFzc6K5zM0tEwTiXxobkPuPPCsa8cDQMAA777iTaAnYkQ+mncJBmxKlXTLfCTSZwcIQj0dpmQy+ahtNDUTvTmnmGEVHunaMhqdrQn29UeYYTxG7cdUa4vE4llpHAtUyUupba+l45vJdJBU+bimSMPJrtTTrhhVLtK9BtY2mhlUEnjAm7ormk+/poaX+5VK1RrBB1F1oDPVmg2ZrLeVb9O52cHGUFUMHJ7QMVV0vyZRqiFG/W/J3Q/3rvhVi9L54lPmgPZ3ke9d12teUa/Lc1WOyDheXZK/KJQuMjYpVp1v9Y/Eow1AEuVdftx/S9tEFZoNhOR3aEG3oALscrVRbfOyrx+jPJfjIfRMAnFio88ihFUIDt21tcu+ugXWfYYzhxckGM6s+jmVx03CaQmr95tVbAYdnKgSU6ErHuXNX2DbBXI4OT5fYc2qF0BhuHM+zYyS57vWWZSWAPwZuA5aAHzLGnF73JqBaKvLwZz5FrruX+7//nwAwP32GQ3u/jTEhI1u2suWa69d9hjGGfc88xiOz53BjMT70oX/K+Pj4uvfMzs7y3/7rf2dhbpaR4T7e9q77Lsod+XI6fXqWQ4fFzHb99VsZvfQ7rrcs6+3GmIcvwZMZ4I3ATxpj/mr9d53m2muvZdeuXTz//PMAPPTQQ/zsz/4sQRDw4Q9/mH/3i/923fYaY/i5n/sFHnzwqwD8h19e/3qA5VKZj//1g1TrdW57ai+/+av/4RXv+fSnP82v/8avA/Arv/zv+dCHPvTySwbP/+NlfOlC1tePvxJP6nWPx588TjoV5x1vuA6Aqfkizx46gzGGTcMFdk30rdtWYwzPHZxiZqGM69jcsnuI7nxq3XtKlQZfe/wItbpHT1eaG68dfsXsKVPzJU5OLWOMYfNwF30DF6f3syzrF4wx/13//0bgd4AbgR8Gvg94J68wV2rlIt/+qz8gU+jhre//KQBmzr7Evie+TBAG9Gy9jpHdd63b1kZpialvPkRteZbNt7+NLTe9ad3rAbxGjee//hnq5WVS2W7e8gM/TULj0S5HB557lM/+wUcJw4B77ns/973vn1/qsvPXjwX8LvAuoAacA+5mHZ5YlrUF+AzQC+wFfswY463XLsuyPgT8iv7568aYT7/C9Rve6yzLug34IyAFfAX4WfMKsRSWZf0S8GEE8/evjDEPX+KaP+MK5smGfWA9uRSuJ3bwRMwlnUjToElPNsHPvOMavGaD0BgeObzKT9y3nXzS5f9++Di7R7tQEzJpzVM4vSBS3YkzqyxUPBZLPpsLGRp+wIHpGq8bzvO+Nwiib2xY7vmrvSd58rhoSS/MlhjrTnLteC/7Ti/z2P793DYxQq3SJJdT+36gaaqSMWpNnz2nV/jHb9hM02vxl09PsW0oy47NsmHnlsu8eGrh5d3+MLBijNluWdYPA78F/FD0o+O4dPX0cXxScsDNnD5FOtakVi0TT6XZfdvrOfri4xhjeGnfHrZecx01z+PkS4eYXV4hkZN+9Q3KAZ9SbXV04iamjh0kDALe/rZ3sbS8yJ//2R/zoQ9+kJb6RBYWxa93ww3Xsn2H2Or/4s8/wz/7pz/K3df283994s+ZOnGWN7zuVpqNJE2NCQoRTSs0Ps2mx/4Dp7jndbuJJxI8+ug+1fBkDtbr7dRaR4FPWJa18xI8+T7gSxfPmIvnsQG2bdvGvuefxwCBH/CRj3yErz78VUZHR7nr7rt44IF3snv3tWuaV1sTk8+HHnyQY8dOcOTIizz04MP8x//0Ud7+1rcCUNKYJCybA5OC3ku5Cb7w2FO8Ydc2bt02wVcOvsTv/c5v8xsTY3SPCd8sP7IgyDuWllf4tY/+Gk8/+yyWZXHn7bfznve8ty19X4Zezpf/euHPFrZtt5F6qUyKhhUSBJDNJHnbW28gqDYxxvD0gdO87c5d7JzYxP/z+W+wbdMgac19mImC+jV4tzDUw9mpZSp1j/e+/XqWlqs8t3+K73vXrTQrmifUkvUQiyVIqubwyGMHuWHnKJtHe3j8uZPML3vs2jbMarVJS9HAjqZkazRaeC2fE+eWufv6CVw3xmPPH+O6a+122qaFxQVa/kU+5LPAPwF+AcAY8wHLsv6oPR+MwTMhVhAlmg4JbYtGyyOZK3DrAz9KM2xhwpA9j3+R1z3wQax4iie++If0bdqOo6nh4ppGLpvStHVeAyudYvSO+ylNHcc3PmVfkYqKanSI4gFtWqrRH933dQrDm7j5XT/K6ee/zbPf+jzb7/wepptLHD8jSNf+fhEmRkbGCcOAP/n4r/BjP/+79A9t4uO/+gEmbriXnhFBIJ+XJ/H89fN2YIf+uws5zC6xfi6g3wJ+2xjzGcuyPonMtd+/3MWWZfUA/wm4HVl2ey3L+pIxZmWdd6y7112Gfh/4CeBp5AB7B/DgOu3ajQgy1wEjwNcsy9ppzIWq6svnyeVoYweY6zLQ00t9Wcw1tuVSqbWoNlqEoaHU9HEth3PLNbozcVIJl8DANaM5XpxcYmxETF+eejZPnpOAwuVSwNnVJj2ZJG4yRpYYS5UGMddnwBXzVnJZDrsd+SFmemyMMTx6bIE7xws0ax7D2Th7TpzF9irYfkgro7D7ggrJtsuRmRW2j+QZ6s8RhoYdIxVWmiGbPTHjTfRnSMQukkC/F/hV/f9fAb9nWZYVSRnNZpUTJ57hyInjAEzPnCAoV2m1GpggoLqyyq4dEywtLFAb38SPvP8HmVmo841vfQOAu+97LwCbtwlvcr1ykM2tVHnxmw9y/c13MXlWMjCsrpZ47KlnuPmWOwF4205JElyt1AkD2QyeeXov/+XXfpmnv/Rpdg5k+ea+k/yz972dwdEunnpGslTMzkl/Wy2fs5NzdHdlCIOQleUi2WyKs2enGFKgSFVhyoCHaFp3XoonwJcvnjEXS/ORiS76fObpp9m+bTtbJ7YQmIAffP/7+fznv8D27duIq8Rjte16crh84Ytf5Md+7EexbJf7738b//rf/CK7r7+Rnp4e9j9/BIDTp84QKKDomD3DyelZ3n/v3aw2PHYN9PPVp/ew/9HHues9okGkFAyg8g4PffUh7r//fno1e8d999/P3z70ED/8Iz+MdXnkx8v58kfn/9jyA6YWVttw60wzJFvIEAQ2YWho1AKyTpLFYpn+nizXbBshmXbYOtbD1NwKN18n8P+utGzWuSEZo6ZteHbvacb7U6Qcw1h/miebTVYW57FDzTDvyD2tkk+jKYfk1FyRt9yyA9ty2DUxyNMvnmRiKItfX1uj/Zp3syffzUunZugrpEi5ht7+AmOD3cwvrrBpRDOctHK4ztIFDIkkeMuyLmlblMS3az8FoU+jUsarVgnDkEqjTmBBaX6aRLZAkIhhLJ/BrdewOn2C3n4x67XLJuhcycZjEO8i19VFbfYUjgVRE3wFj0VZ/o0ftpOFL5w5wvXv+BHqrRrdW3Zx4KH/zcgt92BZMXxFbZSm5Qw4M3Oa8sI5iKfYf/wl0tOTFEZ38rUv/xE3vUHWdUxdFVy8fv5Y95CnLMvqQjSYS5JqbG8F/rF+9Wlknl32AEMOyUeMMcv6jEeQw+V/r3PPunvdJdo1DOSNMU/p338MvI91DjB9x2eMMU3glGVZxxGePLnOPZelVwXEUW60yKfbA0cu6VKut9a5A7wgbJdkAUjFLOrrxGQ1/YCYbbVT1yRjDo1XiOEqVT0K6bXMEvl07BXbBYwCkwDGGB9YRdT4DVG9Xmun8wFIprM0auV176kUl8gV1iT+RDJJs9G87PXLy8vkctl2DFd3Ps3SyuortKt5QbaNeNylrv6kS9A5hB+X4slVwRWnpqYYGx9r/z06Osr09PS690xPT190z9LS0mWv93yfmOu2S+t0ZTOsVtf37U1PT11grh0dG2V6emrde7iYLx6Qe6WbXk71hkc+s7afpVNxqo3152m52iCVXJNH06k4tXXuaXg+iZjbLl2SSSWoN9ZH4FbqTVLx896RTFCrr2vF+o6RV6uQyKyxMpHJ0nyF9XM11KrXiKswE0tlaNXXnyfNeolk5rx1nS1Q13jLS9BF6+e879PrvKYXKOqcOv8569Gl3nHF91zhXjeqz72qd2zgnsvSBjWwGN19/XSr09i2YxRLK3jUsWwbN5nCDgLN/G4RWi4m5pLM5IjVoKWmq8MnxdxW1RQ2yWQCx4ZCwmGkXybPcrFCPpNi73GBnvueNLVZGKK/O0nNa+E6NkNdeVp+g2rTx7Yhm4JqzeBFOeVausAssMIA2xIHubEdHNsm5tjtkhomWIO/XilVKyWe+rtHcAd3AbDt2htIeSHVaol6ZY53v/t97No5xrNPP8ELe59nfHQ3xq6Ty52iWa3ixkR6dRyBdrd8OQeq5WVJ59QK8FUabngtZldKnDorG2mU23DrtgkMNgabMAypF2scefp5yg0Pr1HnxPPPcf3b38ENN4q5rL5HNLATx0/TaDaIxeKk01kKXb0UV5tASKkUpRL6PwBxtBEQ5393YZkJS5OEBcbn2PFjzM3PsrKywuHDh7nmWtEwE5oW6Pw8e8bYhMYhxMVgceNNt3DLLbdw9pTw5lOf/BS+bq7nFlfxg5Bj83LIDSTShKHhpW/voV9NiNe8XrTaGgoOMT4t4xNons66V6NlPGZWptqa3YbZYQxNP2R5WYAm6VqDnpZHs9kiDEJWVyoksxmCVkDgezRqJQmytwAMjgJWmmUtg6Pm55eOnaJeq5GKD5LVcBbHski4DsOaad4KtKRIrUnSBVe1D8dYJBJJAh9MGFCvliFMkdWyLA3dwP1Wi5bXxHVtUkmHXCZOIu7g+S2qFTlIctnshmH0xhiaLW8tk3toBNwSSuBwo1mn3qxRqZXx/CYrlSKOZeO3WgR+0E4lZimwq50pPoxSQUGrXids+VQ1BMBWMIen74wZC6PhDAZDrVG5YB9otOpgNaICFm3DeBja+KZBK2hSri9SqgaslhaolhZ5af83pV3fhQD5f8j0qmhg+VSM1dqa9FeqNSlk1s+pF3ftCzSoaisgHbv8YkjFXJr+WsR71fNJXWz+e1m74i9rl0fuPE3xMjQFjANYluUCBcTBuSHq7u5hWTNdAFQrRVLZrnXvyRR6KJXWpDm/5eHGLs/Hnp4eKpUqvtrdK80Whcx6gh0kEjEa52l1jXqTVPqywJYxhB+X4snlVcN1aHR0lMnJNYFsdnaO/v716/6NjIwweW5N8JuemmJ4ePiy17uOQxAG7c2tVK+TT60PcBgeGeHc5No7ZqZnGBoeWvceLuZLHNiwmpBKxCjX1jSbWt0jk1p//aQSLtXG2j2VV7gnFXfxWn47U0Sl1iCZWF+eTSfj1Jtr66dSa5JKXn2uzI2Qm0zjrZmzRSNLrw+uuBqKJdO0NGdoq17FTa4/T+KpDN55mqBXrxBLXbZdF62f875fT1JcArp0Tp3/nPXoUu+44nuucK+b0ude1Ts2cM9laYMoRAvsGNaaXZdEMkbc87Fsi3Qmg4vNjmyeLz47ScvOkErG2H/mKD/6jjuIrDa1RZHut/bIRtlsgBXGOV5s8YFrRji7VCGfWuSmHSP46qSNNALXWSUXzwAWu0YGaDgpfuAdb+X3/+Zb3Ht9nLt3D3Dk6BRxN8qmLo5u33fZ0p/g4edL1OoBbtzh+EyZd962GVtTXVmWfSnvxpeADyE22h8AvnG+Tbjl+cxPLnLLTe8WfiT66XGguLLA/n3fYGR8guVimULvGOfOneXxx58jnh7kzNE9vOGdH8JxNahY6zbhR9D7Opt23MSBJ/6GiYlrKJVWsN04drYPOy4LpF2ZFwOhgBzuvPU2nnxyLxMj4+yZ3M9tO8ZIOgabCjdcL362ri45OL9U/yp9fX184QuP0mj49PWPsLS0yr1vuh3LjhIGa2oiDscRp/Mzl+LJpWZL1D7rvIO9OIoAACAASURBVEzegd+SFE7qNL/99ls5dvw4Tzz1OI898QSf/exnec/3PsBff/EL3KMplN7yprcqb0U7feC9D/Dx3/sE7/+hH+LJJ58il8+zfYc4zR/43gcAOP7SUb724COApPJKJVM8deQYPYUCjdUyY919lOeWeeohQTK6vaLN2IPCm827JviVX/r3PL1XTPNffehh3vzeu3nqhW+1i61egl7Ol5nzf3Rdh4GeHH5D5mQum8D4HiaqBUZIKhVnJNHNc0fPUCw3MJbPibNLvP7mCWzdv67dJcmhZ2fFOtFsGga6s5yZrXHDNTZzS2XiMZdUKk1aC3d5Na2zl7JwtKDjQFeGQ6fPMT6U57lDU0yM9lLI56nUTLuGXRRu0vJ9hvpy7D08Se/gEJ6xmJxbYbB/jKpqul09/RuuAReGIbVGAze6L3QhDAmaAWHgU16eIx43OG5Iq1Ii1ixTiGc5evooN73uPrKaHd/Y0t5VTVhQ1zRim7fsYiaZxHHjJDVwOZEQoa6l1hmLoD1X88MTLJ94ib5dt7B08gi5ka20fIjTwnYUJq9Q/SC0SWa6aJRWqBUXcG1YPn2ETdt3MHvwMbRhUVfPXz89wM9YlvUZBMSxCly2EKExxliW9U1kTn0GmWNffAXWPgz8F8uyIh/E9wC/9Ar3rLvXXaJdM5ZllSzLuhsBcXwQ+NgVvOPPLcv6HwiII+LJVdF3BEb/cnJsm/e9fhd/8OA+wjDkruu3MtSbZ3G2zN/tP4Ff9hnOXfjqkZzLXAP+25f2EXdsfvC2Te3f/uvDB/jI6y42k777lmv408ee4+uHTrBlqJ/77xSn9umlBis1j7u2Xeh+SCdc3nzDMJ946AiWBfffPEY64eLVW3zh2UnuvnbsoncAfwj8iToblxEEDZZljQCfSr2CJN/miePwlrf+IJ/73McJQ9h23Z1094rWsOfRL7Lj+lvYddPdF9wzvvMW5g89yZNPPoJjO/RNXNv+7dGH/oqf/6kPX/Sen/+pn+Lf/up/YmbyDKM9ee69XvL/PfbUC5yYnOFf/OT3X3B9IhFn27ZxnnzqBdy9B7n11htJJON4XpMnH3+BkbE+urtzADuB7zPGBJZl/SHigH07sKA8+Y9At2VZX7ntttuuiCcgwKCP/e7v8KEP/jNK5RJ33HE7/QrN/pNP/yk7du5oH2ARvfNd7+QrX3mQ3buuIZVM8vHf/2T7t3e86x089JWHLnpPf08/s4uzTM/PM9bdy+4xEQJfOHaUMzPT3PuBf3TB9YWuAh/+6Z/gH7//gwB84Md/iGxeDrk//N0/5q3veiPAsGVZ54DdxpgS8B6gdd5c2QtrcyWXXT9kIyLbtnjLrVv5/KOHCMKQrWO9dOVknj176BzGTnLjjgvn6lBPlkrT8KdfehbXsXnTnVvav33qM9/mg+99/UXvuXXHMN/ef4bnj8/SnUuyfVzM2TMLq5yYXOLmazZdcH0i7nLHjVv47IOy39xx41Zijmi2B4/PkUi119uvWJb1c8BuYBfwd4jl7T2WZf0a8Kzy5flEvueKeGLZNttuv4cD33wQKzSMbr2GbEHuPbrvCfL9Awxu2nbBPa1mg4f+7BO0PDFRWgeeYvd9H4BUhsPf+ks23Xof8VT2gnsGd93K6aceZunUYWLpLOP3vAuA6vI8SycPsOWuN17UrrGb38CJx/4GEwb0jO8gmclSLc5TWlkhFk9GJabOXz9fAT4BnEI09H8K/EzEE2PMzZdgwS8Cn7Es69eBfci+hGVZ7wVuN8b8x/MvNsYsW5b1nyNeAx89D9DxKeCTxpg9L3vHunudMeZdl2jXT7MGo39Q/2FZ1r/Qdnzy/IuNMQcty/oL4BDgAx+JEIjKlx83xqzvBD+PrI2UP9g01Gt+4QNvx2pFZi2faqXESrnJ/3rkJX76PTdj21oVWQNgS7Uyo+pwry0KoKA+J0GC2zQGq9YQiWZ0553EjUi3K1rOIdUlm1l5Sd45PjRMURN3br1Gyozku0WiWlwQU9S3vvYMK6vyroktcvAFmtyzpbZxV53XQctrp66xYmk+/pX9TC1Vrthwnc3mzc033cn1b3qP9LdhGOzpYnVlka985mP8m1/9GA1tb1PTYYWhRVlTBxk1k6Z7RfJPanD3/DkJFcg1yhw9Kvw6plWWd22Wekb/8iffL3zc3EvKFkl78aSkUvr6p38TgN4hee7wLbvxAnlnKi08XS4JL/a8cBoQsxNAo7lAtSxmkSiR6qc//cW9xpjbL8cHhbx+2RjzV7fffrvZs2dPO9jYsmyWNcnuwf0v8uM/+VM8/LCAFmcXRMt6cs8zPL1H1lNpWcb6+t0yvj/zL38GgAGFLzuOy6qmDSuqX2NCk7FGFb4O7T/Af/7VXwPg6RckwbGjiV53ZmTju2loFKOBsCN3SEzettffCsCK+nVqmhy3aRXxtK5dZHb7d//kN66YJwBd+bR5813b22CSVDqN7bhUqg2++ugL3HPDpnYdr22bxHc1NbtAqElsMwlp62ZFH55WWPdUUcZtYstmSmVZN1G6tt4+mU+VkqyrfDpHZO2dn1XrkLYn361+2MDF1WTcuZysUce+0Md06qwmpq4H7TjDZDLJ1x/bz0px/fVzPl8ShR4zcvfb0Yx0dCcK5FNp6tVVnn/s89x8//eDJf1LKrQ/69sMDAh/GppEO6oPFlRWtb3ywPTwJqyMCIqtWjR+0oeG7n21MGR2QbTZVlXmU0xDrFxFJzqhR6ul5Z8c2W9CtJ6YrhFTk/VZPPU8lWVRvisVeWfDDzc0Vzp0ZTz5jvjALAuarYBP/u0L34nH/f+CbNuiUa/y2x/9ub/vprzqpEGHbwIua1uLyHYcyuUy73739776Dft7pI3wxLIs/CDkyQNnX/2G/T3TlfLFsmyClscLX//r707D/h5pI3PlHwpdKU82ZEI0GAIrwGhwozGGVDJFKpniN378fgCmF8SUe+qcBAS7MUN8TjTCxpx8t2NA7Or3vVn8FiemRHLJjfbT1yuO8nmViLq6RLOwQw1YtB3mF8Tn5yZFWlooirQzNSO+hVgsTVdepKx6XW3kCtGPChiGqonZltVOdHo1iTfj8QTDm7a0n9FolJgruWAl+Ocf/SMAVhQkEfkO65UKLbWNR8lyfa1wm1YNbKBX+maW63iaqsdSG35ktowy44TGbwdM2qrRRSVqKtWy3huS0DaWlLeptGgib3zdjQC8dEKycRw4NEslSlMUe2WzlzHmAy/7Bmi2eYwFq1pE88SpY/zmb32UM9Pn+JPP/hmLClJZqZaxFeiTbMqYz2san28/8W0AJibE7JdIJJjS+dXytFxOTZ5TKasE7cK1dwjC8Pnj+wHwyjLA5zQ5cjqeYKwg/Tu15zkAnIRWN9bYplVfND0HwEj7ms1XxqtczBOpCp6NO2QUfBCLxyh0yXv63ivjv6LhAAcPC1LXD20ScTFz9WTEnTE9JfN/SQunNvwoRVsZotgmxUMViyvKJ/nba3qk0zJHerSwZVQhuqnoShMa6prayqi2FlWKjvoelU9KnQekcGPxV0TdXcQXY8D3KKRF8+xKu0zNyEE+co8IOaeXZA+zNMH1lt4BBsbFsnJEQy6MxrulNZC9oGnr9k++QHZI5nI2Ievv1NFD0gflZ9eOG8mOiF+xekasHY760PLqQ69VitTKYi2Ix2Q8Sg3hY2Ql6lVtvkKrjby12j7By4f4XGqu/EOnK+VJJ5lvhzrUoQ516DVJG9LAHMemqyuL74o0Vqk0MGqXXi2L7fnM2Tn9TSSXVNJm5pRIM4MKuR0dFR9Ol2bmiJVVOknGGLtJ4nGSsyJlpnyRtAPVJKvVBsPqw/E0/YyVEYloLCMlEHJdQ5SXxIc0PycSbUtT6TS0vARqI88kknh11dzisQ3HbRgLjOW0E5rWymUSqiGVS6JZeqqB1TRpbsyCXEYk7v5ukcDzPSLJ9nfJvYEr0nE94bO8WfrVDBTU1hKtINAkv2FoEWgcS5TMtatHpMtQ/V5By6dQ0KSvmq2gqNqKaUn/b75WtN+uXIIvf1nQeQtza9D/K6V6o8bBwy/ganmWluexor6qovoozs5oLNuAxEn2FFL09sm4LpyQfh4+IJrTI18TNGFBc/s5rkPTkz5EpUkeelg+o0iKkbEB0prI+aabrwFg32MvAVBTafjo0hypQPje7YsGcPypvdLOfpHgl5WvMS+JH43xVSY4jsdijA0NtLWX7q7utl8u1ifjNdQv/Pj6Nx8FIAwdunIyJ2dnpI+D3dK2roLM++K8aB2L87N0dYsGn1FttqB/59TvlysUyGQ1JkzThJ08LpqNo+VZak0PTzVbr6k+INXoLeVdKinzN7Bia+VFmg3MBpP5Ygx20GJI08vNrczT0v66ORkTW3nkt0Sb3HzrdaxoOzz1fzuK0LTzwpuirrVyo06o2nlTA7ULes2k7lHVhSU2KzJ3ZJdYI4qHdL+ZEt6szJ2hVJW9JPCFF6t1aWeqW+Ztblw+/VqJhpamiWLOOvTq0IYOsDDwKReXcL1oI7aJooBddeTWdIPqzsnG0JVJUl+RA2xgRBbn6I2SVPPAOVkkR4/L5z3DPRTVIT247SYAbA2P8JpykHWZkJIGpKY0w/awpvspBrKoYjd2U1ez4uNfkRRj5yblfieqgaQ6ft1ASxVRu9W6qOrqK5KaQNxQ2l1IwnhBnn3NVlkU2WQUXCrvqZaKNGrCp1RG+rBrh/RhfLOCEWJyyFeKRcY1xmnXKTFh5DX8oKc7qtkVJyr8qvGcJDX+y9dFaxuIRWZONQv19smmEQFuqkU59Ef7+3nfe74HgC/87dc2xg+gWq3wxDNPUFczZCaZ4YEHxBzkK5hm735J+VTIycZdDxuMaFWA1pxsrFG2jNoxOXi61byXKWTI6qaRzMhGVuiSjkcVDvL5LCmtNPzmt0rS11UFER04IECXoGVxtqgHn5p33VnhV3lFIdOK/rNTfUxNypwqldbikTZCBoMxYbsOl+PYtKJKwo7Wz4utQbRBkgW0zSQKbd+8WQS/Po2XG1PTeSIRI1/I6LPlOfPzIijcc5cIhkMjI/gKlCotyZpY0bCWpaK0xXUM/X0iQEWAlVBN1AU9aFZWZQ8wtoVX1/pmLb8doH6l5DoOPfkcfVrjrLg8R09S+JNQXkSCw8A2SRawdXicg2dlDLs0i4yvNtIBBS3ZOrerro2dk2tWFmR+b9ZagbW43LMSVFleEV7Yw4K8HNstiOCpczJPG/UasWiM1Nfg6Hg0i7IuFzTkz6/VsPXAD64u5r1DV0gdE2KHOtShDnXoNUkbjgNzLAjU5GawsDX1TqBq/ooG6pdKCp5oegyrVHjHW94CwNgukW7++n/9TwCG1AToeHWmTkri2qGtAqFO9opzNWMU1rw8TyoUqd3TNDeLZfns6hfJtHdognpFJHFbc/oGcXUEK4ijHcToB1iaCNn33Q1rYLlMmje97ja27haNcXpqilEFAOzcIXEpQ/0C+XU0tVK5XKSpZsCoPdmM8CirsUJOXCT/WOhRr4p0eOv1opVN7JyQPoRR5VobXyHrRqVER2uRtRoqQbd87AjIolV20b+bagJyNWg88Ir0qwR77xvuAOAvPvfIFfOk2fQ4efokq/Mi2e/YsoOUZieYnhZp9cyps9rvlLahhqWVrOtaWw3lzfZtAsbY1i9aQa47z/y8avo90ofhcXl+VJk3HkIylDmZ1/ve9g6Zf8tqEZg7N89iUzSG9KpaCaJK1GpmHc3JWGYGh5g6fRrggswLGyHPa3F28lx7rMvlaluD8LT6cKBm17Saz7y6z0C/zPeELfzZtlUADFEOSzumpuFEjJTWeItyHJq6tLVZ0ppfhTq9w8IP25fnbdYwl0RSeFCqFts1y1w1zfk6RxyF1wdak8xJZjBaeyyb6SERO7MhnsRjDpuHevhH75RYvzMnJyhroHdTM4v4TZkPEyOiHZnQYPrE3L2q67iqdbzG+mStRQmCK9UGRs2dWSN8dNSEO6gm9er8ApUpmTctnQ+ZQeHJyHVvACBsrTI/LXtTTUMs0OfkM8ITV2ORjQutmgJivpslv/8BUkcD61CHOtShDr0maUMamIWkBQpUGrNsOxLiMZrVPSqa0NMr/oehtM+tt0vl4GvvEc1rZV5t9lo1eKsGoYZWyJBmYYh8NzX1iUXJeVt1lwDRDk5MSa66/QckAPaeu+Xa3qFeSgp5jWkqwL4JTb8UQea1+qzf9FhdUCdvOU24QRt+Op3ithuv4bpbRAOrX7+NTEHrbek1RoEhtmo4PZmhdoaZNshWnd+Rvb9dCbdZZ9t2kTxTmkKqXhW+GQ2gxHLbFWWjlDjBeclRAbx6nSDUkARX26NvLy+J9HnmlASnvv7eW6hp0GY6uXEJMgwCqqur1BSKnUgn10A+k6cB6FIeBVXVjBtNZmalJM3MtABHLFsk+x/8fsmSEVYEFPONx77FmRfFt9NbEC1k9pgGw6uUvtqag5jMgZ5e8a3dsEuClb33Cd/+5x/+CfWyvH+6KHMSBTI0PZXgtebaSCFPXLWbvgHxs5w9vUG+hCG1epNQpXLPD+jp11Ik4VrdLaCdCf/QgZeI6XgND8na6FeNzNHFFmV2iydc0prHMvKBURdNpa4pwZYX5jG29DmlYxvdk89p+rDacju4P6robClfIstFPiULK3At8lrhIea0S7ddMTmWIe80eN2tMm53XjdKWQOOo1CTltZq82uqoTeabPFEC60pyKSi8PmYWh5WtL/JLXHqCv03XRIAPjUrvsxjagXY3T3A2QWZW6jWHiRFA85ulsD2N2ybYHlSNLCXnhOgz/ys+GYzlpbY0uTkjcDB0vXsKqiq4X93svb/Q6OOBtahDnWoQx16TdLGApkNhH5AXe3E8Uy2DZV2bJEwtg+JdJhMydk4sXmcm+4V38OwQlSff/J/AbBpXKHD190gz+vfhpsW+3xN7eB1hcPOTYt2sDJ3jkD9RylNc9OncOnJ6X0ADA6P4qtN3Cic1aqKlBQYtVOrxpJKxIgPacLghIW9Qa+gbdukMhmyamfPpF1QP0GEDIyg+VE5kNCEhFocr53wVjVDX/U2dWFgLJusBrv6GjYQhFEEs/o5CNo+j6giY+RLMVHxB9/DUpt9Qu+PBYrq04BMo+i/hZNzjO0SrXjRrmyMIdo/r1mnphLp8VPH+fwXPgfAY48KPDyqfDynvpmFM5Noweh2uq/4kMyFx/9OApmbGgx96NhRqnOisRQX5NquXpkLC4oiLK1W6daQBC+QoOBvfUuClVN5QcN29w2w2BINq6Z+linVyIymbUqvSh+chXm6NPDXcWSSPPfM8xvii2VZ2E6s7dtJuHGaGtaRSEZIWB1jT8aivFKkpkG1WzTXX0rbltXg30K39LPltwi0WGMEe+/rk2vmFWo/s7DM3gOSWmu7avbzC/L86Rnxtfo06crLfTGdj4mE8NfXud3UhMahBeke0UhLlcqGPT6h71NZXuHcqQMAjI1uYXRYNGZX+xeqH66kgdvF4gq9PTKGVbX8RHXJqhUZr3JFxmrXtq1UFenZ0LCB/pSilTWz/m133cOyVqk4PSuWAk9T4gWKsKS7n5Ebxcfef+PbhBcrEjK0fPhpAE4dkLSDiyeOYsflnbYm66bZ0cBeDepoYB3qUIc61KHXJG3MB2ZZxByXFUX9BQ2LVFpjnDQweEB9X5Mz4lfadus7GLvhHfoE0bhaZZFOCjmRkvp3SvLlqtvDwX0ixTS1Jk9UD2txSuzVTuCR1Mqzo5qo98adglT0HU3R43QRiyuyTiXF2hnxmYSaLkdjEak4DuleuW9wpJfYK9QUezk5jkOu0INR/1at6WHU5t5sXigVeuo/aDZbUqwS1oJA9bcoSLamKaD8MCTXo+g7LTLYlRNbfjIuvocg9MDSeC9FhUZJWJfmNUFvvUKo6E1Lk8OGgbQznxOJdPMmkXzrtWq73Ekht/GaS47rUOgp0FJWliolDj0v2srcqVPaThnDtBulCItjNHg2KmozNizj26OxYivqA9k6sYszgWjURU0SHCSEN3PqU6vVAorLIiFbGqPYUF9FsSa+DDueInSEFyaucYyqcQQ6PhlFg2YL3W2tJjRXF9wTc2MM9Q2R0DmWTsRJpTXWSTWnmKrt+aTwf9voIF26xkbU95bVAp95TZfUsBWFGMYprcp9SUV3xrTe3eyCaLqTyzVe0iKxs/MaD7aqCEUNaN997TBZjcUK1B8V+YYilG5SY9kCP8BSjdQP/DWN/wrJsR26Upl24oGZMKRvSHhS0Odmclo3TxMdO1YLDc+joPFjxr4wHuzwIYnf6u/vJ50WTbOm6/CmCZlXb7pd/Ft131BT1/OOcRnbuSWZa9Oz4hubPTXJ2aiwrGqGqS6xUnRdL/vbzbteB8DoqRd58YmvALAwe0p7GpUl6tB3kjZmQgxDmvUGaS18ZyUdYrbCtzU/YiorE/29P/ReAO55533k+2RjnDupecb0nqI69hdOizN0uhzwrS98AYCsOswbTVlUQ4OyiedzGU6dE3Oip8/pGZkAYOcNWsYjSLBcFIBHlOl+pa75BI20vVFXJ70xmIos5Gu71sx+V0rFYokvfOlBgpiYuVZW5qisiqlDz/T2QTY3JxtHEBp6FFrf3SemkIQu1qpmYj96THhVqlQY3yLweUe99fmc3LNliwZdjg+xRaHVPWpeyukGFCpYAsehpWPkKPLG0WsHJ/RAzGvGfhOg+zo9PfmNMQQ51LM9BVw9/LylKotHZczGs5p/TzecsppoGraPpZnFExqSsTAnm8fepyVJ9KBCy5dWiqyqOaiiFpr6YrRBaBYHJ04qFlWyFv4vaDaQQLMjpN1U23RrJ6OMCfpAzeZeVXBAqVSnWysGRKbbjZKxwNg2SQVAxFybmAZnN7TKcksz2xRywvebb+5r9yOmxUxdNxJctK0KykjEXbKaZSOuY2tCmVdREPuhIy9RjYq6BlolQU1pcSeC4CfawKNQa4eVlN/lmrzL1QnieT6+ZkPxms0NZ+KIOQ7DPQUsTUqwPDfPCy8KmGffAdkXBkcF0PKGN0kpk9H+Ao0VzVHp6klmR7yR/m4aEaEnlYyRiGu2/bgiujSwuRXINeV6i7qa3g8fOw3AiiZOuHWrAGcqAy6nZuSQPXxGDscXTko7yyo89eXl+bsHR7n9jWJm3PekhJ+UNNi5Q99Z6pgQO9ShDnWoQ69J2nA2+tB47QA+yw/xTVTNWE0LCZUctahhIhbj0PMCrljRQMCmSmzlFZGwJ49LduiKSREL5LesOovzSc0R2C2S+8zcbDuoslZWs8ipqAzFQXlOpUzSVehtQjSdJV/alVIpP602iJSboKw1n/zQ36ABBErlCo988wm6xiTNjQkq7HvimwBs1vCAvl7RmKa0xpcfBm3Ht6e59uZUq7zvTjFD3HzjddLHZgNbocGnzkqQ6NFjwsf9B4SvXYUs3/8D3wfA66+TkIW4QpDHhkV69RxnLRO/moFaWuvIdhXc0SW8Sdk2oaMmrQ3yA0TTCOM2RqXauGMTU81ikxYw9FULKqtk7+Sz2HF5f31ONPNmUaTs8pKYUxc1vVKxWWPiVgEEzS6ICbG4IvdktQJxo1alpZn0GwrQqCtAIgK8JONJjKUBxKp5OSrB2wrdjsIb5heKqPUZN36VGlgIXsunrCmy7FyaelH61tJ6VmktCumoRlFcWqWpGthqRXgVaQ5G+xXB7GO2Q03Nwjq0eApiiqwms7MzNI1WQndU81KNzlEttFYL8FVrTaiZelVDImaXxAxrohxyxsKyNDN9wt0wjr5eq/LivmcxSzK3C7397D0oGs4R1YZe/5b7APjTP/sTAN5z3710J3W/UX65Gi9T1zp7/b2y7sNEhpWXVQ+w1BQcpZCzYkmOnxGLzW//j98GYHFe9qa77r4XgAfe/2MMaBhDRgPAR3zp68GiArLUIjR/9gw71By/dZckZDi6/+kN8aVDV0YdDaxDHepQhzr0mqQNgsYNEBJqUJ4bSxOoWOopeGCwINLhw1+Sirs9gwcZiLQATWAbi2lql4ym7VFpPBOLMaTZyetaWTaldbKWFsSv1PICchpc6Wk26WP7JJB55ojApZt+XaIqWfN3ZMYUjJBRoEBCNL1k6NONPO/a67aQSp7cEEe6e3p5/498kMSA1DarlWc5tl98NsND0m87qsCblP56YZ2d18v13cMiKdY0G/kD75S6apGGWG022i6XKD1Ow5e2z6uUeObUNOm0PHv2nGgkpw8ek3criOXk7Dx3fo8UhN08IdntI5+YrVUCiKlmHfqgUnXc2phPAyAIQorFMs2a8DrjOfQPyTuXzogv4PhpkbgXWtK+np4ebB3XaqghDy0FOCiQoKFBq75lWJiV+VCtaGXrllbgTYgk7tUbWAmttabVAOKawsloOEKj6RGqo9LTOZ1QP1M8Sj+UlqD5VDpLS99h21cn9/mBz+JKkRGd4+VqDT/U/veKZhqlwvK1DlnT89p+2SPHFQCjYxJXTWKTjqedTdCoCo8C1aB8heMn9NriyipHNcP6ln5JEt2jYCpX/Z3VaosVTTLgakqpyFe5op+hibLTu8QUQFStNfH9jQFcWkHIQrHGkZgm255f4uyMBBq/8b43A/Dvf+WXAfjY730CgL/9my9xzajwMKbgm4z6DKO6eD0F4Wd/z2DbLxZXbdJWWH5F57/n2vz+JyW059ARqYAQzYPPf+kvARjbdQM37BDrRkpDCvJG7h+RKYKvvuVqYGE0PGLz6KYN8aNDG6OOBtahDnWoQx16TdLGNDBjEYYWcfVPJd2wHXFrFMIeKppocVH8PZWFWVIt8TGFajfv6RbpqWtE00ap3X5qerYNw7U1ojhKIeVoPa9MMo0inHGi/6j/LfBEarRDi1JNpHgvIRJobkTeUU0JEq2s5U8aVZvevCSL7RvoxY1tjCWWBYm4zdEjEohZ5XANGQAAIABJREFUWp1tQ42jasEVhe9GAc3JRIyWJoRdXZBr586KD+zBhx8EYKWsv1dWyWmC2YLWDssoWvDcOalGO9A3SjIvmty3/1buXz72ovJExuP47BznFJq/41rR/gqKmiqofzGlKYUKmRgx9Yek04kN8QMQlF49hlZtwbfiVNVlMqMIwxkdu4qmbGJpFSemIQTqd4qq7Nb9qAK4aoWxOFOqkUfB3ZaiDxdWNK2PZWFUGo9pfbZ8FHYQVR42po3ITKm3LyqDESH+LL3HhGHbdxJJ8Bslr9VicnqamFoHfK/O+LikeqqqllmqRBqYluywHWqqHR4+LtaByGIxreVd+rT2W6HQxbFjgoyL1tF73y0+1YSROdTdlSNVkjmxpKjM0ItSUjnahjRVDUKvqQZnx2UeNNSPGEHnwzBkRUso9UXY9g1QPJFgdGI7gZYiabUaxDW597BWXY6SDoyPiE/5a1/8HOVZ6XNag5KjGnwRCjWh4RnZdJa0oj7jOqZJDY2Ikvwu1MscPCx++PvvF3/bTTdLarg/+JRoZk/+3YNs1VItca1ovTgre9wLx8TyE9PQhcF8F0Fd/YLxjo7walKHux3qUIc61KHXJG1QlLSwrQTJhEow+GRUuslocG1NfRq9Gmvh4uOtSvxTqMiqmuYMGhyU1Cyhaiq7bhzjiW9+HQDPiCQaU62lrpJpPpcnrjbtKJlpRf08p2Y0ULXo07REguzfKWf0aJRWyGhxu0Utu9GIkVF7er0WsNGCsqHforw0yze++LcATM6ew26J1PriixqbpH3wVZPACnnky9+Q96s/8OZbJKjSiwuqqtSU9p08O8/SksSEeQ1p3PTsaenvafn+9ltu41995F8D8MxTT8q7Vpf0OSLZ1zGc3CNa3rf3iuSecUUSj/wIjvqMcpkYY5snAPje7//hjTEE0TRdK0ZLNdFKvclylExWfQO+arrG1yDjegNL4+VaJkILqn9DY9kcDUh2XLedDDnSdtu/6adtW0SuqiiBs92+P0rJFWBs64LfIv9WuzK3FiENw4Bo+NrjuEEygG8MS6uiseTTybbGFaEfIytFVUsF2TaYUK0IKfltfll+e36/+LIyKfEfNRstoji2uGrQh4/JNYNpWZ+5TIyhIfn/0hnRICxFMc4vyHPGxnoJVPttqia4FlgfFdzUNuWzeOqkq3ohwUbrwWLwCQj0GfFEGnWNt3kzNy/tWlyW9X1udgmjqM1oL4ri56LXJ3R+ZRKxdgmYVFIsDMmk7FmhJjw+uzAn0Fngfd8naN577rkHgMlJQSd+/kt/w74XJB4z0FRgK4qW9ZYkSYIbyNqt+RVOrshaS2vJmw69OrShA8y2IO7a1HRTdJIZQgVZ1HTTdhTym4jqWcUyxDW/YSEvZsbZBTnQaqNiEhgYl0waU/OLXHfH6wGoLIh57ORRgcZXK2LucJ06Bd3QovLmM1Ny7dkzakJMZMgPyiTtV8e0pYectawl5Fc0m8dAD2MaUX/80CxNza12pRSLxRkeHGbHhBzGhhBXofFOOwu9bIImWqTJDCjEe2REzCRvfvvbAcil1ayXFBPJoQMvcPS41kgbnQCgobu3o8LDgaNHOHRUzBjpiWsBmJ6W+7u75HMgHiedlTFZnpVNbWlKzE0LizIeDd19WqHFTFH4c899V5eNvlKutCsXVyt1qlHWeX1cvkvGJZFaM1FGQcUphXXH1GwVHUpRpnHHddtBvGv127T+nP7p2PaaaTmI6r35F9zT8n0CvS/a5CKHfzvjhG56iZiLiXJJJq7CrAq4jkt3bx95XQfJmMuy5vpM6Vi2tEqCp2ZON2YTj2qGaYb4+WW5p6HpZHo0U8XY1j5aWs2gVJb1cvqcbP7xfjWRGp+sZo+3BmRu5FMyFpWiCBmnz5xm204BH3i6sXsa3hLFeUcH2qaePCkFATXrHmwwG6LvBywWl2gpMMm1bYz2fd+LYpa/4abb9G8BWLSw8TSA2WupSXpGTMoNDdGJhNyYs9aimGYPieZRENUMa9Tp0WQLUchLWQWuoWEx8S6vLPDVr0p2jYa6BJaWBERWVSHH1bnsGIvuQXGPDAwObYgfHdoYdUyIHepQhzrUodckbUgDc12LwX6b1pKYp+pBiCZ6xmjKmUiCzWvG73gsRr0q0kwqAkh48rnniScA2LpLNIBz52bbQabphGa5Vw0vquhbrdSpa/Crr87trEo+99wiMNdkLo/vaKVozVxfnxTJzC6LRD2g+cxu2XkdA10ife2dObVWj+sKyfd9lheWufsuMTnc86Y3kdBcdW7k9LejHHqqmeG0Je26J+1bOicQ6WWtB7W8KBD5k8dPMD0vpp7sgMClURivpalxPL/JI48+BsDmbZLZf7xHNLukgmHSsQTNhkjNJ0ui1WYj6LHCgWdXRKLs65ugps76bzz6zIb4EfFkcWmp3cdGw8NTM3FMU1zFVGqPxtJ27LbJEDvKuxflCVS4fwS4SCfa2lqkcgUvs/1altUGdkQU5ZmMNDI35rZNiNbLTIdrml1UFgCSCvO/Wg0sCEPKtRqhVtIeGRwgrppXTc2nGQ2HsDS43HIMsbgCJ1TjqilAIK5B+dleAT20bB/f1VyIXWomUzBDWc1xO7Zuxp+VcfY1Tdaq1lnbsV3APecmj9FSLcjSLaKi8P5QZd6sWgqy6ThVDcx20rn22F0pGcsQWCGWpqaq1GrUNTwmClL/nY/9HgBnjovloOIFHJ8SzTKyakRj2opAPQoMc7Db88BSvhmF/bdnhzGkMnL9ku5tUQB3SSt1N5s+p0+LOdFS3rSirGNqkoxmTDwWJ5OQMalVry5vZoeujDoaWIc61KEOdeg1SRvSwOJxi03jcQqWSH7HJ2vMKQzcCzTwM6tJaTVoOQgrOHpOLqtEVa5o9dmWXOMY+cxlu5nT7M/nqlHApCac7ReNzgpb/y97bx4mWVbVi/72mWKOjMh5qrm6pq6uHq0egaYb6AZBbHDgXpyuCoI4fHKv+vQpXq+i7/pExOcVVPQhCqKiXLBtaOYGeu7qrq4eqmvOGnLOjDnixBn3+2OtHTlUVlVGC98zdf++r76ojDjDPvvsYQ2/tRbKFXLmJjJ0zwJnqVaBnZ4fASx5Nj36zm8wDZ/TEe1k+vLocB/OXyANcHG+1ckSv14YhkAmncBijdr7zJFDGGTfwtAgOctVxvlymfwSaLdhsRQ+to20qk1FeobJ40SwaDZIIhwcGkaak8iaHAjd4mDSkRHyU8xMXcDCIvXhyChT9hWBghO1wkp06mwlWJtNsLbhL87zw1AfDY1thc8agezSKQ9Qqqog8KGYFpZlQyktHbqz4kjwCDRNsxOwG/E7V1K1yVK9yWQTwzbgqHpncqUELpc1mB+3owEXCtSP6n14vo+I/WSrNS/lLwtDlfg2gJKx1b26hWEaSGfSiNhy4AVBp2Kvou0rf5+SLQ0bsOyVY9LjsSPYb5fmqtT1eh0p7t95rjBsWTSuilyfL13II5skzWtogHzTC5LmU5oz1w8O9nV8QL7qQ+W75IoIuTzdp1atYIHrdEkj2zXBxbIsDuKmZ3EbTXhMozeECr6medPHCbB7egcQxqoCOQdsBzRfIlW5nUkdcSA770sl1Vap1KDIQjBQ4ed9+JGHAQCvfjXVMHzhRSJKRRE6ZBWT2xpz+5TWF6m55kucP0skDjOR66o/NLqD1sA0NDQ0NDYkutLATEsgX7ThzpPNuzhoAhmy/y7Mcrof9nVYDqdN8oE4UPZpOqbqksSXYd9Vm0s0uO0F+HxspGixkqQdZYPP51PI50lydJlqvMAJRlUiV2EYEEz/dZitxG4jOCzFb925la7RkvjGNyiI8cjxObjt7iRIQwAJO4bXJinxkUe+AsmhBPm0oviyxsn+HgsGtmylNFP7b6Fknzs2kyZWYdruTJmkWieVwI4+0hbnuabTNbv3AwCuvoYSCH/qbz4Oi2t8Bay5+j5XFlapfZJhhya/dRsFbs+dP8YPwTRj1mj37t2FNle03sSprrqBZVno6+uDwcHBUSQRsGarNJ42J4cVTGUWwugkzvVZojXjlf4UpZ3EMupcb7WfS7Ec41h20hrF0UqmodISgjBEwHXPFI1+tSbWoeVjSZKPu421YBhCIJlyYAj2//lepzp2ipmGglOyOayZwRTIc1qkdo1p2xbPsUTM12lzWx2wIgLfpfZPt2kc9Y6RTzSYnkNKJd7O0T0GeugdLyxSUuzennxHG2+EdMHdIzQ+Y6kS/nJC7WaAXtbKghAwje5YiBISEeJOn1oJBwlOB6b86cUiWTJUNuU4lp33FbIPOeZ6ahGPnbjDUkXHr91o0pj2PFW6hs8Jo8539/8LhcM8/yKtCU8doirewrAR8VgLldbPGpzksRizrzbEUghIUnbHatboDloD09DQ0NDYkOi6IrOVtJDMk7TYmzVgcbkGO0VSSI3jqxCpBLaDiGxlIyYtxUlzjIYq42CSxOXJuFO1WDHQhDJXs5QZtamyLV2INIYKpw9yOW1STyEPSwWv8j1aLNnOLhATr8x+uHqzii9/nco3zLaAtt+dBhbHMVpuCypq9p7XvxGxT34okyW/mKVC2QmkdZBkzXWmwoUCKxTHVVKFNzn+6Njh01h8lHxU27eRxvVdzBbz2ReWchKQqsQMf2eoVD8sELtxDIslxC3jpIG1G+ST3MdxSU8covIsU2ePwWV6qeSUXN3ANE3k83nEkWLwGfD43dRYs1O+H1MlXY6iTgkQVXwxjJVky5K3qoQsDAipVK2VTjrl34ijGBIr2Z++y4HS3Fcx5FIqNHW+ktz5mzS/B8cyYbB2pjSDbiGEgGMaSDODL4oimPzQpqn8ftQ2xbCVpoF6nf1D7KdR56jK5D6Ps8AN0apy4mK2POS4bI+aK0HLhemooGFOk8XsYOXXSlgmCr0UxyRr5EsTzDJuczV1t8VtSKeXgr6l7FStXnefQEAIs1MJXZgC4HFjcwFX9XJUkc2EaXZUbc41DAF6T0rb6rBS5ZK21tdPmqyyiEgeF1EUI2aHqWJUznDx2a0c31lvBmixBUU16CJNjO9pmGbH76pY1a3aYlf9orE+aA1MQ0NDQ2NDoitRMo4FGg0bMIkllM20YadICsmwk6mnh6Pba1zyvTaLBktrQZs+cw4xCpMsYYVsf7YsAyr3pZ1QPglmTzG70bCW4oKcFMecccxLiTMU1GWMfC/dQyVCPTFBEtBLzxE7aIgzdAyNpwHOnNHfk8NsvTubtWEIZLIOelhKzA3s6tjTkywfOOzzkMwQS6QdxG3SROp1lqo5/mdwB0nMO9Lkuzhx5hTACXBtTqw7OU2+ij4uwdLXX4TvqvLw5CdRmS881ngCrwWL41WGOIny2WmSMmfPUaaPNidlPfXCYfT1sQTOCYS7hYDRKXLqBx7aHhdjZA1bScVKU5ZR3Enc7KkYpFUxWkoDMgwDMfs4V0VrqUQRkEIsZevglGOGKvxoLpXplB3lQTEMFbtNHcDnCqPz/zB4mSxEIZBxErC4tQaWMn00OPZJ+dwc9lemMuml//PccKtkyRgaJBZqmzWyQiYJe0AlH6ZjA86orOZMKpuBzZk4VKcF3K/9AzSvndiCyVaOBM9rKbkwZqe8DF/DNDuxfK7rrmCBrgcSAlKancTNpJHRb0ob7mhi1pKfUo0F9Z3JY8TmF6e07CiKlkL5FIuQE4OrPjHNJa0/xVlNxjY73AY6x/Wjjuam2qWSO6tnVt+bprmM+ciJys+e6apfNNaHrjYw3wcunAW8Cg3q3ECIZIrNdlwTp7eXAx9ZFa9UWigvcv5B1qKVcz5eRYFGHHVUQrV4qRxxLpskZQjYTCMOW2TeiJjMEfGkqzRaHfpviTfSiZNcuXeRFnqfAwyHe4axdws5uGsucGKm1k2XII7baNWPA0zPt0UWs5wj7cSLEwCAJJtzHHZ29w8WMdrPNZh44vT10IbL1ka0megyOJjH2ChtItOc/fr4caL2bvXJvOF5Hup1umerRZtSJwCTN7DId2EmyFT4wvPkFFdU+cFBCuQeO0DkkMGBIfQPEHEkyed0BUmTWdGWg8DvkErUPZXZS5n3BERn8U7ygm3w4hStSgEVxzEEO8kViUOZbBxzifjR5vRhirShFjl1HyllZ4FptZhUoioG8Maizgl9r0PrTiZfXiCzAGBLCUMJYKZ1UfvVIugo4S4METPRJMnH9OQUzZyum+SA9tiPkM5ySiru5zbPDSUUpB2rk6KryYHdSQ5od9l87no+bMmJBLifDZP6g6chWi61s1Ipd/qX6m11SeKIJfx21Ol301jaTDobAq8BKmxAQpL5F0sCrsGbkp3i8AquNp1YYdJcO1Qi8P3OOFTftXxlXmTTaRgsmUrZ3KuqHSjToao3ttzErMzFGt8ZaBOihoaGhsaGRFcamBQWIrsfgUOVfb3YgxGSqSvZQ1JJYYAktaJBkkxvK0alRBpIZYGd0U1OphmyGYIDXuMwRptJCEqaUdTnOmdidxtt2By8mDMoSDA2SNsIArpuIiOR5CzvBYeO3Q7Sfq65ljSK3Qeo3s/WnTtx8BaSRC9MNfDwqYVuugSIJWK/DYNlASswkWfSyqHHHgIAzMzSNQW36eDBG3HHrdSHVc5MfuTpxwEATdYajnN9sNMTE3BZUlbElmSezHs1TgRbLy+gWSONTcm/FkuJPTmSAEe3bUOxjyrwDo5yEPf1lHaql0kczvKM7my27KR97wJSSgRB0DEXhmHYiYjuSKcdDQqdeyotRKV3ClgaVucoaVhAwmQzoKIrr6a/yzheqsDL112tkdm2vSx7vbHiHh2tgrWtdCLdaWtHEu8ShhBIOfZS0HUcdZ4jzzXfOuYpvkelUoZkDayHTdBZZi5ItmS4nqqkLREHNNZyHAysLHrK6Nn0PdgB3dNlAlZokPa5UKXx1FisoVDgjPVNGlfJlDKX0b3LnBG/3mp1gqdTqdTLCjGgcc0kljDqVANXKbuWzIGqeoK9RLtXoRqs0YeK8KG0dcgOkUL1qTJJKzeFaTud31aHSgQcyG7EYYdMFKoAe56P8SoLwXIz6sut3q2xPuje1dDQ0NDYkBDdOF2FEPMAzn7nmvNvAluklAPrPfg/SJ8AXfSL7pO18R+kX3SfrI2u+kVjfehqA9PQ0NDQ0Pi3Am1C1NDQ0NDYkNAbmIaGhobGhoTewDQ0NDQ0NiT0BqahoaGhsSGhNzANDQ0NjQ0JvYFpaGhoaGxI6A1MQ0NDQ2NDQm9gGhoaGhobEnoD09DQ0NDYkNAbmIaGhobGhoTewDQ0NDQ0NiT0BqahoaGhsSGhNzANDQ0NjQ0JvYFpaGhoaGxI6A1MQ0NDQ2NDQm9gGhoaGhobEnoD09DQ0NDYkNAbmIaGhobGhoTewDQ0NDQ0NiT0BqahoaGhsSGhNzANDQ0NjQ0JvYFpaGhoaGxI6A1MQ0NDQ2NDQm9gGhoaGhobEnoD09DQ0NDYkNAbmIaGhobGhoTewDQ0NDQ0NiT0BqahoaGhsSGhNzANDQ0NjQ0JvYFpaGhoaGxI6A1MQ0NDQ2NDQm9gGhoaGhobEnoD09DQ0NDYkNAbmIaGhobGhoTewDQ0NDQ0NiT0BqahoaGhsSGhNzANDQ0NjQ0JvYFpaGhoaGxI6A1MQ0NDQ2NDQm9gGhoaGhobEnoD09DQ0NDYkNAbmIaGhobGhoTewDQ0NDQ0NiT0BqahoaGhsSGhNzANDQ0NjQ0JvYFpaGhoaGxI6A1MQ0NDQ2NDQm9gGhoaGhobEnoD09DQ0NDYkNAbmIaGhobGhoTewDQ0NDQ0NiT0BqahoaGhsSGhNzANDQ0NjQ0JvYFpaGhoaGxI6A1MQ0NDQ2NDQm9gGhoaGhobEnoD09DQ0NDYkNAbmIaGhobGhoTewDQ0NDQ0NiSsbg7OJZOyP5dDHEv6QgDCsQEAoUF7YdoUAAC/1QIAVJouomXHL/uA4HNMi5phGkCSr5fLpgEAUtK5YRTzOSZczwcA1OvNldflT1MIGPz/mM8Hf6hPwQfHkAhj/k4CLc+DHwSqiVfuk0JCDoxm0KgHAABDJGEa5op7GNwYy6RnswwHpknHBCE9ixdSf5k2NcZyIr5GjDiOVlxPCH5t/GxSRjBNh+9lcJfQOVHE/RcIxDHdM45Xyi1hFPD3dO84iiC5oyLu99JUc0FKObCePjFMU1q2DSG5Gw0BJ0nPrt6V36Z7Sv7CNA2YpqEOBwDYNp0TcbvCKKS+sSzE/NLiIOqcDwC2Q/0QI0YUhiueQfC91JiKoggGn6eeV8qVnwpCCBhi5flu0113nwBApqdXFofGl91raS4YnRHH9+C/Iik7x0DSc6jxZPK7Vk2NlzV5Zev/NVhq65UhUJm7gFa1tO75k06nZKHQgyhYGoNCmOpyAIBEIrHiEwB8n+ZNu0lrgOd5K85BZ+4Znfmo5lznU607pgnDsFb8psaFWqMMYQDG2vL+RV0jl33L7XjpxRe6Gisa60NXG1gh6eDnbr4OFi8IKUtisncEAHDCpQH4w3u3AwAmzh8HADx1dh41/k29VLUQt3jQhWpCigh9vTkAwNvf9v0AgKBVAwDMLcwDAIbGRnH0xAkAwGOHngcAxDxqB3vyAID923difm4SAOC6dQBAo9GgJhi0KCb4yXPDOQTOIADg5IsT+NJjz3TTJRgYz+B3/v5uHHryFADg+UM+Ng3fQNfOpAAA2RRNiozVCwDoL4xjcKhIv/VTn9TMZ6m9yWMAgHZcoWdzHSQimriZHC8mvAFZZhYAkM8KpB06JmgO03Wa9Fttkfrv5PGzMBN0T9jUrguTM9TOLC36lTL1lR/6AOjdVKpVAMDf/uajZ9fbJ5ZlY3BoC8Abpm/GKI4VAACD/RkAwPx0CQBgCPrbcRIQoHEyPEDCy3U3HAAAlGvUhhOnqI9T6RR2bBmjY4t03WyKF7ksfXqx31nUahV6Lps3/vkpGktnzp6H00tjxkxSWyNBC2MqnwQAJBPUN7lkBjYveEqA+8AvfXTdfQIAheFN+OkPP9DZiCWA0KP/q83VMekeauH0ogAipmPgtwEA6RS1ybapHaagv9tIdDa1ALzBK+EMV95T1EIsAJg8Jw0WhAIWFIJ47XMAmtd/+rNvvOJ9lqOvvx/v/cX3wmIhbfP4FhR7+wEAvqC5Kix6PiU4tNtu572EDRobp4/TelMt07iqlOjz3NkzmJ+h12RxF6RYSI58Ehpty0QySXPDSvB7z9G4TOVoHhX6BlDoHQUA9BR47vJ6k+PPVJbWLjOR7myODgtUN12zt6uxorE+aBOihoaGhsaGRFcamB8bONNOoOWS1OOINhD1AAAMlgIXzs4CAA5NXQAAvDRXhlRSJmteySRJOUFIUpdSzZOpBCouiXhPPEda1kgfXd8LlQQpO9qTbSs7IX3s3rEDALB18xYUciTFz0xP0CEBSa/ZImmMEWsh6UQDo/0kZZ030xCiuz09CEJMzi1idBtJZaaZQ292u/oVADB55jQA4MzkNABgbLSFpiRprWiVAQBh/iXqiuwiPW9AUmK9EqLXomdxHJJA8z3U3lxqnI8N4IekaSl7aHWWrBXl09RZx586jMwmeg9jO0njTGboHrU6neu1WdIXNhYWSUvxud+6gRACCceCjOj9RJEEQtJwBoskXbdLJP26Dbpn0kwhnabn3Lt7JwDgql1b6VkarEEl+d0YEvuuod+2bSWp2PfIlCQNup5hAhabIGOftYcmaVc+a6m3tPdC2DQWjTRrYA6bgtN8Kx5jjrAvMiF+4Je66xcpJYIwhmSzrgBgGMqUrDQmvr/SbUwDYIuHw1p2aNJnK6BnTdls5rLijklWqV5LplCx9Lna5qXM6XwOmUuNFefLS5gSl5tapZTrtTV2kEymsGv3Xpw4RvN9oVpHOkdzPpGi99duk/VEaTOx76Lp0fgZGKT5fOvYVgDA5LkJAECrShaMW2+/A9OzZI1xbOq3AmtKzx95EgDw0FceQDRHc1SZZyX3icmanuM4MGP6TpmpLTZpptnS0tM3BADI9Y6jWCRrS19fX1f9odEdutrAYgG4pkDJYJ9M5KGPVeVsnhbwdpM2t0qdzTftANJQ/hj2V7D/w1IKYECDvul7yPIEeOLZIwCAXTtpMduzYzOd46SxdSttVM2YBvjsNC22tbpL10tmcNMryfx0+MmHAAAu+0PqAa1Mi01qb6/bxphJC2S7IZSbYd1otyMcP17H1u20YWzbvRmnT5yk9rVo4mV4M63zxv/8seeQHb0KANCXo0U1NOjGF07TBgZJ5xSdUUjwIu/QPXp7aKI0qjSRXjoaopihRTmXZxNSHy2MzUn6fma2gG3j9F06S8eEMd3D5wXCcuj7cqmOVpM2LuWO6AamKZApWLDY1JmLkkixaYYtdEhb9He7TZtnq7EAmabj56bot2ciWqTaPo2lvkHaeEfGhzEyShthqkDHOnxvXm+QdMzORhE02T/CpjePn1N6MYyIp0CCFqfUIC2eYYrO9bjBUsglH2G3g2QZpJQX+dcA8nWq3wEA7IuRUnYEv8Bz+VmpTQ73ob3sOsp02Nm2VlsO5VpfrkQcxwjk0iYLALFUgt3KZxfLrvVy/G6maaCYy2H7TpoPF86fRalEQnBebWRJ2iAck+6QcQy4bX4vLCTx9EZPD81rn/sqjHxsYsE2lWRzc5o++zdtAwC0pMQXP/N31J5Q8r2oV+2Y7hO7Pgz2Fbd5k4v52edVn588wQ+V7vjdlvvtNL790CZEDQ0NDY0Nia40MIEQCVHCSJrEnQJs9BZJOjojSYvJpEgaSQiSZNLCQpBhgkHIEgw71yPeP1NsOnISNoY3kUlgdHwTAGChQZrATI0kqptvPojSLJEP3vLW2wEAD9z/IADg0UceAwBs3n8D7jpwIwDg1CSb7x4mc0HVJ/NBg002e7/rBrgBmfH6+5OwLCXLrw++L3H+XAQJal+t7zx8gzStyKL+oyZCAAAgAElEQVTnLbA54ardJPHNzlXRZNPckRdI4wpZSy30kyQK7k870Uaxl87PpknrqNdI8luYpX6MfQvJPD1XzScJ9Lk2mTG9XjJhGINnkU7SvcoVcnBPT9E9FJEg8KhNjWYNYai0vu4lSCdlYevVQ0i0mT1Yl5icJJPOsSPUBkPS0PNqpGWJ0IXh0fFnnqL+O+fQMSFrPP1DpIGVx4eRiUnDHszvBQAMj5CmmU7QuEsICZ818oZPz+LXSJpuTLDGPleGX6dndtnc27+Lxp3B4zo5SOZaUTAhWPK2jZehloI0lAASYpl2s8RCZC2LzYKKDScMAxETKZgYhzSbNdlyhZAZv56RhoeVbesY3jta4/ravmQ6XPn35bH8idaHdsvF0eeeRb6P3m3KMlBenAMAuC69r8FhIuyA50ggDfisKQkm1Bj8ads0ZopFIlY8/PDXkGOCz76rDwIAPJMtD+zByA8MI7CoM8tlWgvSFvVX2lSkLwvCouuonlCsT0W27fSxX+/0V7317eODalwMrYFpaGhoaGxIdKeBGQJOxsL2HElL26SFHofs8KgSaSNdICml6ZBUGNsRbrqOaOVD7MM4fZJ8ROfPkXPVYClHhm0kWcq69WY6Z54ugyce+joA4NixzYhc/jJD2kaFfRyNgPbjk9OLaDKFuxmyX6VCx3hJkqiv2kIaSmFoFPOLpBXcddfVePDQl7vpEkgpEHo2KnMkLQatMhIZkrqKw6Q5yQRJ1YM7mdoeN9Bg6TIFOmZxkTSBnEN2/9FxstMHmEM1pt+apQUAQNKkYxrs8svlEwgdkhznmtTHD3yGbfdyCgCwwxmEKalPFqbI7+S3mYbN/OI2x+JIIZBl/0MnlqsL9BRyuPd7X4HmBEnSj37+MZhMsmjVlD+UtW+WZ3vSNjI2/dbHEnIhTW2AxVpDwDE6kzUcvv9hAMDZwy8CAO583W0AgP17tgIAMrYJp8rkoQW67uI50jzbLxGZpjkzjzb7SqZqpCGePXGebsnkofRmGmP7XnsN7DRbEqJ/hQ9MLMV8mRBYCpUzOr8DS6QJy7ZgdGLl2KfHNPw2k1saU/Q8/bv2I4Dyb9J1FOVfXVfEosOzWEbrWNlGXDoebm1Hl1JF5KUOuCTCKECpMo/nDz8OALDDGMPbtgAAfH6IdJYo7en0CN/N6Dxfi8NklFIcsL/0pWcPAQCe/voXkcnQ+SMDdP7QJvapsbZ2zb5rYf3wTwMAJs8T271aoblWr9GYadQqaHLMmevSmAnUfOmEB1HfO1YKDhOIFDEJZxa66heN9UFrYBoaGhoaGxLdsRClQMO30WOSRBMslHG+QlrUHdfuAQC4PkkpYywhJdMStxTo+H0D5MNpsVS4wAydVpW0h8gHLJ8kqi3nzgAAUhWSNnsHWCN5/pmOxvboi0cBAMemSMtohyR9TZ67gDmmgR+8/ha6XoF8G3/0yf8NAPBd8qMdenIBs7MUIHvD3Xtgxl43XQIDAglhI3BJSyoOD2NyllhUtTb1jTQoyPLa/bsAALfeM4yMQz6roEWfx4+zD61M7U6x3T5yIlyonQMA9OVI4hstcnBtL0uSMNBkn8CpCyRBnv4W+ZH8Oj2b2FRFa440r5EtJBWmCuzvM6jtBmfzSKdt+Kwh2opP3gVSaRv7rxvDSZf6slpuoS9Nzxmy1LpQJ8l2hNuws5CDxb4eFXBc5GBiJ0XjR/lMk8kUMhnSG6pzdJ1j938NAFCYYd9YMY+QmWqxz74rl/1jPP5alYUOqS6qcuaYBRp/6XkaxwEHQXvXb4e5ldoVqbj8LhF4PibPnIPJjEPbMjuZbAQ7uBI2Z1ThwF7bMxAz0zfJWW7A4SehpGMTw1sBAOWWh6ZQQe4c/Mu+aMWcFDA62VqWnDgrqfYSYnXimg6EsUpvk0ZHA4lF0DUT0TRN5Ht6cIYZuwszs3A5lCDXT9YExXRMcfhN38AoLIv6zWNrTIoZpieO05rw6Le+CQAwogiVBdJ+pi6Qdp3IkV/YSXOQck8Rr7jzLjqen89ts8WgRe+/Wa9ilufWxBlam06wJUlpeOPst+/rG0IqRXOzl/3XX33qp7rsGY31QGtgGhoaGhobEl1pYBYMDJhJjDGTKZ/P4XD5AhbaPn7sT/4K44Uc7n/PDwEANj2Xwx8fOY44ijDS+BbeuXMrEqfIVh+xhLVV5byLBE42Wvivz5/Ac7UmfnHPVtxmPg0A6GGtKuYURFEYAzUVh+bgd594CmcbDRRSSdy1bw+StoW0dFHj9DFje0nryWVIesvCxT998UEEUYTr9l2LHbuvR6tFUvzpEyeWcqoxBIl/HwLwBgAtAD8mpXxa/R5FMerlBvKcEmqxNo1kVqCx2Man3/sMcgMJvPKnSTL7yj8cwRP/MAUhgBvuGcJt3zeGoSH6bXArSZCtsyT5nZ8/BSklHv3bMs4e9mAnDLzzAxa2XJ2DYZC/0WL/o2P0IPRJuz38z0/g2QceRW22hOzBX8Tea0gT27Otilyanq04QNJ4q0V9evJICY/872cQ+gE2XzOIHdeNIOWw5hV1ZOr9Qoh7pJQPrtEnFwDcAuCdUspPm6ZAT4+NhQXyLdpGBlmT2jrtVXF48iRMw8BYby8cKTDfdPFPF0hbvXnTMO64imL+PJ+00jprR06KtLjYAj7z6As4cn4KSdvCe15zG8bZt9k6T5r19Nw8wog0MMNIYapUw69/6kFUW21cNdyLX73vlcj1puDVqE/SHKdW4tRErdkSjpZqePphksB/Yrgfr3n3zQAAf0nN6PQJj5UEgI8DuBFAATS/flJK+WkAaPoBnj43DciI22XAFga8yhxe+MgvINU3iuvf/SEAQO3kIZz8/F8AMsbQTa/Bple9tZMqbSunvxpOcgxmOgMpJT7xof+OF5/6JuxECq9/1/swvH0vXE4/pfJCmrbTCYguz17A/X/4K3AbNQzv2IvXves3YFo2BJbyPnqcc1Cdf+KxL+KJz34MhhC49a3vwLWvfktH6woNYuIJIY4B+DkeK8v7ZBHANIBXqrECYQBWosPUnT09gaTbQr3Rwoc/9g/I57O47833AgA+98/344knnoZtO7jt9jvwutfdA5+Zs0oxPPL0EwCAaq0CKSWOHD+H+XINhmFg9/XT2DQ60vHrBT6tQw3ZhHJVLc6X8L5f/1VUq1Xs2bMX73//78G2HSQdGw6noavxeLzrLoov+38/+UmcPHUK+/buxfv+j1+BlUx3YgZV0gZ0MX+wBoQQ2wB8CkAfgEMAflhK6a917LJzfhTAr/Gfvy2l/KsrHL/6Xf2glHLiCufcCOBjAFIAHgDw8/IKlFUhxK8A+AkAEXicrHHMJwC8HpfpE6DLDSxpGtiTSyOzSCq5acTYNT6OXKOF4fQ5/NEdB4DpGUSxxB8/ewwfu+N6bEeM7374Wby5x8aeFL1M1eueMmU4CWRs4Df3bsUX5yowY4ksU+6DHAd0tjhvoicRsfni/hPHcMdQL/7o5hvxNydPozI9j/+0bx+i0SEkJyYAAC3Fis/nEEURPvvVL+MD/+evwExm8Su/+9u4/a5XYdeO1wIAdvZnkXzwW6sf+/UAruJ/NwP4MH8SJDnGDSZCNNwKhoYGEbkR8v0p3PfLN2FqagEylnj0k+dw+4/uRqoIfP0jx+AMAdtuIJNhT47IAsksmR7yfeM49lgZrYUG/vDLb8CpZxfxif9+GP/X5+6ByvDRyU8XLEJyNobcyC7c/bPX4rPv/zByuSzufC0dk8AcRoaz3N3Up8efixHHMb75D4dw19tvhttawKOfPYFifwLbmHASqWwpwHEAfyKE2AXgnlV98iEAn1MHCmEg5SQg+Nx6uQKDNzBLhEhaNgayRcgQCIIMjs5N4yevvwGbBm188JvPYN/YAIbymU6yZocDWXNZusaJ+XlMlSr49e9+NWbrdfzpVx7FB3+EcvB1QjQiIOBsDe1mCX/whSdw17Yh3LptBH/11Ev4x288je+9YRvmmFqfTHLy6Jg2rIrbxmPTZXz/NUQo+NhfPojb/9N1yPWkES2ZmTt9IqWMQJOyLKXcKYR4G4DfXT6QhGFCZApLSZgBeBLwnRac3lHs+Jk/RRkSMo5w8l/+HFt+9LeQz/fixT99LxI7b0LGphALyfkeC700fUdyAk8+/BDcxUl88G/+BaeOHsGHP/B+3Pdrf4aTi9SHwlT0+RYEmxWf//jvY/j2N2PrTa/BM5/6IL76mU9iy23fDSGWiB1qAwuCAEGrjqf+4c9w8N2/h2Qyha/94c8jGt4Hm028jgl4XgAA9wL4Mo+V1X1y3/KxIqVEO4zhcP+bloUw8BGGAbLZNN547x2YmToPKSUeeuhJjI8NwLKT+OKDn8fzzx9GJk33HuglFwM4PMUygPlSFZVGC3devxuVhot/vP8L+KWf/alOnsmIzbSxH8Fm0+2ffuRP8MM//KN47Wvvxe/8zm/h05/+O9x33/fj+LGjePjrXwUATExQaM7oKNH79+/bg91X7cQzzx7BY08+DiuZ6WSBUSZzdDF/LoH/CeCDUspPCSE+wv364UsdLIToBfAbAG4CDbVDQojPSSnLl7nH6nf1PwH84BXa9WEA7wDwOGgDuxfA5y/Trn0A3gbgagCj4HHC86cDKeXbhRAfu8K9vzMmxOfmS9iSSWFzJgXHMPC9g0U8OF+97Dn9CQfX9WRhXyFLwHI8NDWLN26mVEL3jo/h4ZmZyx7/zHNHMTY0iNHBQdiWhdtvOogjR45c6TZvBvBxSXgMQEEIMbLuRjKq0y4yvQlkepMwLAObrs9i8oXmZc85+nAJN75+CEII7LyuH82aj/Kse9lz+jaPIdvfu+52zUyUkOtNI1dMwzANjGwvYO5cfa1DfQAnARzEGn0CksC6Rs1zkbIdFFMpWIaB60cH8MLM5Rlbz52fwsFt4xBCYMdgL1p+gFKjdcnjpZQ4MjmPg1sog8mrd4/j8YnLj5ULDRdjmSSSlomkZeKa/VvwxENHVx+2vE8A6hcl5X4aQNfjBADcC8fh9I7A6R2GYdkoXH0Hqi89edlzHv/Gl3H3d99HY2XftfBaDTQrl+5HKSUWThzGyLWvBABsvvl1mHn+0cveo3TyWfTuuBZ2Ogc7ncPArusx/9JTa137DFaOleV9cvdlb3IJVKoN2LYFx7YghEA+n0ejfvn5M1+qYqCQhRACxVwaLddFtbbm2FbtxpNPPo67X/M6AMAb3/g9+MY3vn7Ftm3bsgUJ54rxoy97/rDGdheo/wDqz++9wv3uAfAlKWWJN60vgTaXy+GidyXEpRdkXgfzUsrHWOv6+Dra9WYAn5JSeqvGyctCVxpYFHgoTZ3u5CV0zRitnixa7TaMWCLVCtA+egoXFioYNCTai3MIMxZ6UgJHai5cpoMLkCQdMrdXlcuQto1ACPgQaKgGDhLdPVehvbadBPwtpK2UHnwEu67dA7/tY0BKVL/xMIb7i2jMVdGaIpr19FOU5T1/9S5MHDqMvG1i8fRL8NO9yCcTOH/6LFoLJJTUbB8RU5SXYQzA+WV/X+DvpgFKu9Oo12E2WQOyLQStFkLXA9EOFpFKeCi1XaQLNgwRIVcsoGdoAQtnXLhMWmnNkqS7bexqAEBPagBu6SwG+jcjqNLzDowU0JhLYWQzbVAtdjTDChFzFvPTJ0nyMwyBwkAGN9xI36dwFYKIerXd5PQ7wSxqiyUk0xZ8t46EmUA2l0Z90VWZjCCMFZRx9exr9ckS20NKIAjBrHjYMFDoIfOfdASsBRMGt7cRxLANB/V2BNtuIWUAk00Xoe8jDKhPxjdRzseePnruaquJXGIAQeAjtIBCOonZcg05x+qQINqu1wm3mJ2vI22baJZJiEpbAgu1JiwYncDhRpMk5ZZHja54IRKGiTkmdYiUg/mpCuIwhmWvCAZWfYLl/SKlDIUQPoDcUrdISM+DjJeqMsSQiAMfkBKR7wNCICzPwc73AWGMUMRI54toTp6AwVnpZ6okxMT890SlhZMT51G45tVYPEXhAIl8P6anZtHqJw2yxs9pwICMJcJmFSKRxZRrAG4A3yygUVnEtGdAyHiJ18FEkTi2UKnWEeeGsBA4QBjBT/Viem4WYS3g5wG8JZPzRWOF+6QKoBMdb1o2Cv2DmD1BwoFlmmi7LXjtNiAlAs+FbQmEgY9kwkYqYaHR8gAZwfP8jgmxxpt1xHOip1CA224jk8p2tPJ8NoupmRlco8Zem7/P5REHMarVKtKpNCoLi2g264j8Ns5OnME3H/oinnrycZw+TZUimlzZ4sxZIkjZtoV6o4W5hQruf+BzMMylckkqIcCl+mTZ95djS/UBqEgpw2XHj13meFziHus+Z9m76gNwKUlojK/b7T0e6/KcS+LfDYlDCLGukhEaGhoaGv8+0JUGFkYRFhsVnOdEr2EcwhHDqLs+YBow8hksunUkHIEZPwRMG6m2gfmKhxEY8Cqc2byfCAeZXZSotx2SRNNYqCEWNmLD6gStevOs8nMtK1HIwmIb/kAqifnpEoYP7sdstY7+Yg9yt1+H9JyL5iSRAiovEdU1PjeLTGkRldl55M5OolSIUbowBduLMfkCSX/bnBFE4UUc6UkAm5b9Pc7fUXsEYCYMuJyguHG2Dm/BRasWADJG6DaQSSVQHHAw8UwNnl9BzvIgwwh9mxwEnHXejNjn5NFztxstZAsOylMujIjs+7MX6jCdFGpMYlnkhMluowFYdMz5Sa4nZUqk+2pIZukYq+3DddnP49Gx42M+omaIY09OYvfuYcycbWLqWBWDI2kUuC6bWJ4pdunZ1+qTw+qPOAxRWyyjuUiabTGdW0pJxYmDY4v9DxBoRR7KXh25vI2yF6AnmSAzEedKKvRQu1XdsmImgenFEgYcAyayKDVaGCzkYFlLw7nd9jpsi4Qw0fRCVOsuTCEwUashbQhUak1E7P1fqNM4K7NUHsZAyQtQbNDf89Ml3NB3DXwvQGyt8FEvHw+qXy4IqjrqAFiyWUnJZAjWwAyBOI4RRyGklIhDD8IwYGbyCKoLQBQhFBJxbR6pXAEqGf8Cm0vbPHaMioEo0YOXjp/B9txWAECrMg87k0eGEwP4gQogT8CGAekUELsNmG0f0rYQludh5/oQhzGkiJeSWkuVgR8wc31wJ44gZA3Sqy4gs3U/IpXMNl4K6sXFY0X1SQ9UsTlQlvdNm7bi+JOPAAAWq1W4ZQ9tb0mrM4RAKpVAEEQwDANCUHFT0zIQs1IScl6oDPvZa/U6hACq9RYGuDju7Pw8zk5M4IEvPAAAyHHi30w6A0fYkFJisbSIT/71x1GtLWKxXEarVcffffJjKFcWEbGbRq4q0BtFETw/hO/7mJubhoxFhyhirCyCua75swYWQa4Li7WwFWvQJTAJ4M5V9/j6Os5Z/a4Wr3D8+Kp7rKddl1xPu8V3RAPbnUvhbMvDBdeDH8e4v1TD3RwL9u3Ea7Zuwj/yBvXpx57Ba6/bf9nj9xWKmKrWMFOvI4wiPPbSi9g1tu1Kt/kcgB8RhFsAVKWU0922tW8siWYpQKscIApjHP9mBdsP5i97ztW3jeCJLxEb8eTzk0hnEygMfHv7cXxHH0pzDZQXGoiiGKdfnML2vaNrHeqAnM5PYI0+AXB559wlkHGS8EIfdc9DFMd4YbaEfUOX9+Ed2DSIw1PzkFLizGIFSdtCb+bSLjghBPYOFvAc+2EPzVRxdX/ukscDQH8ygUXPQxDHCOIY56dKuPH2XasPW94nAPXLj/L/vw9sZu4WqdGd8EvT8CuziKMA8y88it5dN172nNH9t2Dy6a9ASonS2aOwkmkk85cu5SGEQG7bAZRfpHip0uEvo2fPLZe9R3bHDWicfgaR20DkNtA4/QwyO65f69rbsHKsLO+Tr172JpdAsScHzw/g+QGklGg2XaTTycue05NNod5sQ0qJeqsN27KQSl46t6cQAiNDI5jgkiznzp9HDxe0/DbgZc8f9i99DdR/APXnZ69wvwcBvE4IURRCFAG8jr+7HC56V5djFPI6WBNC3MK+sh9ZR7s+B+BtQojEqnHystCdBiZjlNttzHDQYVBron9oAK04AmwLctMgEkVaGH7DyODdz7wIGYZ4SzaJHVGEehTgw/M17En24JXDg7C3cIkUEWG+3sD3fOrzqLfbMCDwV6cmcP/BW2ANBPiZrz6K/+e//TSGiz0UfHqMame9Y/8O/NxXH8Onfu0DGOvJ4/++742on7mAl/wI/3T0BXzoXT+BBFdDLh0/BQHgF2+4Du/7whcRWTbeeP112FJIYnqxhcdOHEW+EK0oD8F4AER3PQmivP4X9YMQ4nCm14GQIWSbpLOBfD9MN0ToG5CRgbCeR8wFzHbfPYDHPjEDCIndd+ZgZgOkjAye+qd57NhfxK6bBzDYR9rpQGEQY2/ehvPPtvHLP/BnSCRtvPN3XgXLiVFrzuO3fuBL+KEPkQY7c2EWXIEClRfmUT/+BUi/iuc+/gH8wYUi/vP7DmDxaBvf/Mcy3v2bb8LgKC3Eo2P0rv7rb2fwkd/9DMJA4p4334U3vPlViEQL//iXX8bo9gFce+suAJ/dBeA+KWUkhHgAwJ8AOAPSMP4LgJ9RfbLn6nHEfoCgTppCbzaHaoWCqEu1KsIoQj/7MYsZG/sK4/j80eP40oSJq4f60J9OIYokHplcwJbePA7wQmWZJOnfuncznnhpAh/8xtNIWBa+78DODm35XR+9H7//trvQbrXAlTBQrrVx745N+OiTL+KB09MYSjl4xWABF2YW8FKtjXPlBsaSpM1U2WfhCYHRXAZPlajdN9+yF6lcCmEs8Ue/+ve45we+CwA6fcJD4k0AAiHESQAlEN0ZQohRAB9NjuyAYYilMSbo/6ZpkqZhmvSbYWDk9e/Aub/9H4CMMXztnUgObEHCiHHkC5+EPbYPg/tuRY3LI2dSAn1X34zM0Sfxpd/7cZh2Ald/3y/Aj2NkbBNf+/334K5f/BMAwETZR4vl1uHX/hjOfvr3cOFrH0d6ZAf6broHwgCqRx9Da/IEhu/6oRVlmu10DoOv+EGc/ov3AgAGX/GDsBJpII4xef//QvGGe9TU+AKA9/BY+QsAZ4QQ9wCYBzHQ3gegKIR4YM/efUibSYxs2goACFIJhF6AMDIgpYTnS1RqZLkZGenHidMzkFIiX8gik88gansolevw0knks2lIrpVmJWz0OimUai0cOn4ehmFgZLgPz584gcVDh7EwX8LmzbT+OJYFyZqm77fxyOOPIIpjOLaNYjGPuVIFnh/B93z09GQBY2WarcVyA2EYIY4l5habKBaySDgOavUWHNtCkjbNdc8fKeV1uBi/DOBTQojfBvAMgL/g478HwE1SyvctP1hKWRJC/BYAxf75H1LKEp/zUQAfkVKuZuD8BYC/XjZ+37Z8/Eop37BGu34aSzT6z/M/CCHexe34yKp2vSCE+HsALwIIweOEz3kAFHYytcZ91kRXG1g3uHNkCHeODCE9NY/WwpKG+O6BPKLhwYuOH8hl8fC73ongOGWdkHUyP7UB/PFdt2Ko2HPROcVkAn/9hlcBI0T28mLObbZ5E171XWtLrLeNjuC20RHIvfsAAA8do/vdctVe3LhrK/7pW4+vOJ4lkPesdS0p5XXZvsS6kw8MbM9gYHsGTjFE38jSRnnTWwYwUBy46HghBH78V18Hm7MqBGap89uv//1rcWb6zEXnZHfejezOu7HvRnI4f8+9lCtw+75e7D9wx5rtOviqq3HwVVejvkBajM8m3bf++GvQ8jqC4fNSys/zc0sAW1a3lX+7bu/+TV0lZOjvz2N42wB2Da4k7b3pmp1rHi+EwL2cNT6bWql5/f7b7lr7HukkfpzzJMbLwmcOjPXhwFgfnjl58ZwZSqdR4IwPu69a8jO/5zc7RKtOnwCAlHIFy0vRgHlCviE1unPd/ZK76kbkrroRBgRycik28cC9/xmL0cXaphACB976HmSYvl11l0zhr/5v/2vNeyR6R7DrnR9cKuLKpvmePTcjv/vmNc/pvf616L3+tctyJdLePfbG9yzPXL8bS/9vYxUbk8dKWUr5hr37rl53n+TzGeTzGURSIlyWj7K3mEPSti86XgiB4f5CZ4y4ywgV/QNra/mWZWF4aAixMheyLTWdSiKVXJtpONBfUHcEn0SbbC699F0X82ete0gpT2MNtp6U8nO4BAVfSvmXAP5yje9/8hLHtwF8/xrfT4GE+LXOeQrARaav1RvXqt/eD+D9a3y/5j0uh642MLJXj8M4QxtSygUiX0KGQMsL8M5PfAF/+BpKqvrIeSKnjLbr2MPasceBgy77p/ynaXF1lU9gbAztXVQWoxWS3frADtpomgap8u7UBJwq++DyNKD8c7QJBbPEQLIH59Di0ht2L218xbspOXDlPFl1Cv2kmd2Q3YIvfYs2y0RhoMOOWzekBII2HF7osk4CdmTBdAwEXox/+fMTeMPPkJkyzeaLxbkAEbsD926nhXisj45R5VzaTS6ohxQEpxBqsE/n2Bl63ukKfRpBhLhCx/dK6utdRU7q2qIb+VYSZkBkIhUH46TotyEu4dKfJ4m01izDC2jRzFhXrijLQYe3gWm+AgIWjE5KKN/1UKszAzIMEUQRzk7M4O3veBOu3kdr27c+8QAWJqntIxywq8w3PgfjerwAxVEAz+NNiBeyxRJv7hyjJeMIzQb9VuHxEgnqf4Pf1cxiDSMFNuOmaZGrcxyYx8U4Q67oaaaziDqK05XX3NV9suyXZaVNlBQvELVbOPXn78WOd/wBP4fkNgu0+RnDBr0/KWhM2wnqnyGeBynTwBb2L28bpPmTYccZK6/45skZfP0EXafEKbbMTjJaVRxSLiX8FUuLMn2stFDEy7piPREwq/sljmK06y2MjbJAUuiFO+sCiOF5AR786uPYx2WIOow+QyDmfF4xs4Z9foZyjTRmR6XoMgRcHisNZiyqsc9W7ysAACAASURBVB1ynKIJYynZMc8NFci9VMQUMFa99+iipM5Lv6sN/uX0icb6++TbooH1Jmx85h1kni3PXM7n9x8HmZyDN//UHv7rImr+vytIKd++3mMTloXrNo3jth961XeySf+/o5s+sXv6sevn//w72Zx/M1hvvyQSNm49SIJ9s9VdftKNhm7Gyn8UrLdPutrAbNvC8OgQ6pMkwaWLAmCp1mY21zSnD/rosy8AAHb3ZfFzSS6HwJYK2SRpvPQcaWClAZIoT3vNjiQ1uouIBJvZdOhPk5Mne34aQjk36nTPhEHSc43jfqLTpyGnKFC1nKP2ZXYTWWZ0G6V/ac/Q9QbSGVy/n0xVm7aNw05cMSBxBUzTQL4njSSTCKQlkCmQZBxGStIjzbDBKWjMhkSCC+jBZdOHS5KzsMiUGIV0jYSdRcDSJuc8hqxREcdUQGaQlLSRMMnENVMhs/ZWizTQ8SQtAoERwGXfZdUnLTQuEalBxCS1FjL0GRsJ1Jnp6HDJmm4gYCAh0xgeoL4+FM2iDHr20aupXbfdSZr1HiaM9KUtfOFvvwIAqFWona0mjZvSApd/YclZWgbqntJKqW+KnDg4wQmBozBEhX1wqvihzam3VNmYcjuGzVqta7KGD3pXPjPrWmxONXMJpDkdWbSu4o4XQ0qJIAo6zClDGBeXK1mt8RhAxLPU5ujIm7hk0bU33gQAGMzTAbE04HBdkU0DzFDkbBNhSN9bu4dQc+m7BzlmTJkBBWsUljAhDVXeRbVHZT0mYUxlsTCwjHkoRbfVVCBlDK/twjJVAuciwjabrflaLS5f4nBZHbfdRszv0GLrhGqmwf6pdpvevSGMzo++vzLzkur7WEZLz8ka14q0EHxsDMUsVPdcW72SUq7ItqLxnUN3gcwyQjUqw5K08NmWBZ9rFFVCGmQlzvgdcsXdmp3CpM31nZjy6hv0Kdm2X41psF2YayJv0CJR5vX9c5Nk3t09Rgv0jt4k+hJkZmxOkCkycmnRkTy5yuV5SA6o9NlsF1Rp0/WPnAAApHloeUkbW/ZR8HAwdRYyuGx6sYshJUxPIhJc1VgGUEVYWw02aTr0RV5QPyQME05IpquMSaZw06PFPnYpW0TKZrt6ZEBENJ1GcnTscIHYYm7EmbJLLs7MUe7HokWCQ4+ke20epOsenTkFQ9BmZAua/D4H7bZ5QXOz5P+LnBRqbXoP9Ur3RLo4kmjVAhgJekYvBYxuIRPRvT9Ibd+5mzZsJ0V9c/Ud+xDyaPzWn/8zAODwKUrZIzzOAq+KQDkmSrxh9RaZ4MHZyF3OtFCvNtDkV2ny4uiF9EW1TaaklmHi6CSl8jq3QL/VI2Uy4vHBPox8fw+yGerTUuPyGSAuCQnIKO4sltK42OQkY5U1XmWGj2Fa9IwmU+QFS4Jek+ZhyaKNPpdO4sQ8bfZPvkSbU3ORfHvpYTLDGZFA0KL3n+Ug9XbM92KTbwQAko5RSQbUghxzmIkyrVnmUvSllBa6XbLjOEKrVcbZCZqXqaSDAlcX93iTMuhRMMCB7L7vw+Uq1D4f47MgY/EmZ3J2/yAIO6bC1c/SyaIfo+P/w5Iviv/kv+N4WSb+y0PKZcEEL1PY0Vgf/t0EMmtoaGho/MdCdxWZIeHIGBZnk+83bPgmaR4Way6tNkk5YwNkChvftgmTqnQwSyMOU5YFi9w+O95H+vphsbuoNk8mQFkiSWuKk5JW0w42c5CjodiNLqfJ4erLbthEizORS9bo0i6bOCe5cjRLVs0wRMGj8/sP7AKCiwKZL48AiOck4hQ9t2+04bA24NhEgDB8DvRkDSAOLQyOEtnIjoiwNT9FKqfNwbhhik01vgeXa3MlOUjT4LfWUyAChJM3UeIM8w5rCbU22Rtn3ecBANlhA8mINDCvTeYyMyLznWQZeqb0DAAgYefQ20t1tYyg+ziYIAxwYXEGjzxHwakDO3rwA+98CwBg+z5lKmViD1dq9v0I+28k0+jZpylFz5f/jkKGHJ/rz7HGGMsQPUlq86YRZgeyBN3girzldoyKx6QNbpdt0zF1m46xC2mcv0Am7xkOCu/fTCbOqQukmYWqCrRwUCuTdqfqznULAa7CrLSZOL6o8vFqyR9SQsQ0X8636POlKo3RFxcpU1APB53HkUSF00wFF8g8b5UnAADf+3bSwOYnp7Cjh/rTSNJ5j5ylscLGFPQ4FnKc8Fnl+FPJgDtErBbdp9qOMO8tX0a6y4bTbNbxxJMPYZLr/9mWRLNBKpfFSZyzWRqD48w2rpYqKLNVQtXNK1foHBU3rFiKrtuECXYLXEIbEgJLNshVGpjCWmeKVceuFTJ1hcTsGv9KaA1MQ0NDQ2NDoisNzIgNpNw0pkIiVgwabRRdlpbmyFcScvzW3n0k8W3efRVKz1JM0ghTksGSsC1p/0yxT8GCRJrpzMdPTQAA+jlJ7vatZP++4ASYPUn3SnFVX8FOehFxGiYzhM+imN8kTabE/qJ0mvwydZbUm55EaZIIHdbmYUTRavft5ZF0Mtg3fiOiNFdQtm2MFEjLSDIdXLCPYX6eaO+lZggzyWm02uTrcrkMRDLFlZSZOu42W2g2qX9U21TC4XyOJOhUNoXJ+RI/O2lg003SILKL1DdmMYWgNkF9YHBKptRWem6H6dMefZ9J9GF8mKj19svIs2knHAzvGEeYJc3xupuuxc5ryW8ZSQ6C5zgCX5U3NgWcLA3HzdfQvRuf+Rq1L6BnqDXpnTmWgev2UJLnrdvos8rEoOYcaQYzrQCzLZLCTZPp0haNgewwjZPb33AbZv+ZkgBMBeQrevPbXwMA+MZXKTP7Yw+Rb3HywjwCj8IMhFiRzLcrmMvIAI5pIWRyhAoRWJLYFTHCgGBKgcfjaJGtHA4TGHKcwDYKgWybfL1tSb6wgK8flmnOzJw/hpB90be+msLW+lmzH8yShrepL4cUz9Ekk5pUmi7lRwo5Qe6ZmQo++q0JAMB0O+pa4/DaLk4dex4lrpq8ffsWJLg9bZ995jwXbIvmtEAEk7WfOmuCkskrCdbawia9axlF8Dnp8RLlf6WWKLGkTa3+7AZrPfuqVFIa32bo3tXQ0NDQ2JDojoUYS1SbAb5e5QSafcDtTGlPzZHPKhmQz+r6GykjwuimnfjnJ54DAFQ5kDCyuCAjS7IpjiJsX5iB2Uua1vYiaTHtiDQSK0OS4IE7DqLELojSoTkAgMeiVWyRFuRKgUyGA3C52J7r0L3iPvIDtbmq9Mx8CVUuxVB+6QSazFBbL9KpLA5ceycMLhdiZDMoqOJ8CWqPCZJsXzhGFPfFc7M4M0P9ZFt0v1SW2uMELDkG9LzNqouQ2ZoqOLPVoGNOT5CvKJt0EMWqPAm9j/k6+XZ2BFupryYDnJugpMW2T/cqZEnzHN1KGnU1JC0uLqTRa7MGl7h8zsC1YNomCiO9+Mlf+DFqd8pAYFCbDSj6NZd54SrLUkYI2Rc6uoW0tV17SRO78By1RXJYgmmn4DMz7/Ap0pDmKjROZuZJE5uv+qjx+DJMl/uJxt3Nr34FAODg62/Go8+S76V1kvxJmQL1+5veQnWyjr/wGbrPU8/jzjdRe4a3dh9aAJBU79gWBCfY7Ukl0GLrgWJPKolyuTDvMKNO+Sot1qo252l87RsiLb5UrqDKoQMB09znatQfX3/oIQDA/ptuRYJTmxU5ye2mIfJXD7AGVkgnYAjOQMHZJwxug2L7VTih8LHzU4jYeiBiE92yEEM/wMKFScQqSjy2kErT88zNk786myIfWL1B1h3bEWjzPGX3MFJsWalyrIlktmQ6lemEDcTc10ZHu1JMT9nRyS5LjWcYrO1dzvf1r9HkNNYPrYFpaGhoaGxIdKWBySiAX5vCyUWS3N3AQWGcNKVrbZJ4ckwj3LaJ4n7y2V54zAj0WvTpcKXDNuekc9gn4/ghXE4JZLDNPWZq1OwiByYffRHpJElA9SRLZimSJL0sSfPNZhNprkpcYvt5nWNBjICk8ekZkkyNZAY11loynGi2GyTSGew88F2QNge5WgEsk3wSZkTfCU4o3Hqerj15fhGlNmlIOWZYhTMsMXJ6oMFeYsP15XvQaCmmHj1L0ObUOJwgtx2HMFgTbrRJk2iw3b/GqZGEIWELijF78SRpbj39XELEon6zM1wSJqhjsUz9s23opq76AwBiGaPp1ZHppeePESwFy7IkH3rMzJQdnQM+S/KFIWrPm976egDAp2YoFrBVUal7TCwa9Lz9g6Q9NkLSwDxmDVqZNFLMkB0coOe++VYKnr7lNZQnUxQMjG6jcRIz0+/kSdLI3vTdlHZu925ivh16+hguTJAfacvONbP1XxGmYSCTScPkMV2qltHiQOpIFYJkn4lYFkCsgpEjfqc3jJOG8sqruO3Moq1aQMRM11ad+iObp/5RQc833XIHsuyv9TnFUie8qZNPCXDYehAwK/fCBGlD3+ACsU9N09g5WolQZZaoYXVfkS+KY9TcNtI8f2qVCiz2gaX50+ZVyuNSN9l0Bm0OdpbMSA54LVFMX6UUrcybqLQi1mi7ZA2q30x+RypW8HJ+cxUvp/GdQVcbWD5h4HVbMpgv0SL75JkWvjRBEyW1nbNtZGng55hMENTbiAS94CabEJMcWBqZKokoDwjDQImd8bJNk9Lh2mNBhQfmqXNIs+Los9ngOaY1TyyQSTEZA05MA9xOcn2sgM2UFdogm5IWSStrI7Lpty3FAhxzeYHRK8MwTaR7ehBy7rxIALCV05jMLEk2DwZMrJg98SJklvprYJiCqE8eIxKBK8gJLZiwYI1JCDbLTHOZh2aLNq4WZ9YwowhCcnBtkrMrcHLT8zO0oRV7Mti0mbKReB4nN/XpfN+jz1wvndP2Yvg1eq8JnOqqPwDKrhCGPuLO3hTB4o0l7ASRWnwsfQZhG5IDa0OmuW86sBUAkBpm89BRCpsQlo1NNxNJ6Ht+gMq/T8/S5jI3R89fbwYIObh8bISErM1MkffZhF12FzG+hTYBy6D3cfo43SPz/dSWm24gss0zT5+Ay5HRUfDyFqUojlCr1Trn+xCdjBfOqpmoQmENACaHCOwcoja+/VU0Zqo8N8pVeuZiwsJkg97bgf20Wd98B5nyi71k9kxZNhIcpFzM0waR5Js7nGBgcWEeL7xExKtvPkrFcx/+JlU4L3Pdud7b3ggAaIU2Yp7fiMOuM0/EUsL1A5icbq20MIWBITIhj43S+1JEktIimfoX5hc7uRDTRsBtp/E1OErnzixQP5RrjTU2sJXbrBDiIkr8WlC/KSKLsUrYUBvZcuKGNiF+Z6FNiBoaGhoaGxJdaWBJW2DXqIUfTxOdeFNiEl89RtL7VyZIErpuC5lXGqfIFFOBAZMllopPGslAmrSfSLJmwoHR8zLGQpq0uzabInOc3ibDJInYD4FF0kASCZJIL7A5YZHNMMO2jXSGrpPL0DHSJWl1wadjLZPzEpZa2C9JwsvWAxhxtzIkYJhLaayCwEfIFPHYIU0irnN12QaZDcPGLIoDpEF482SObc6RphQyVTpo0DMuzs/C5KBS163zJ/1Wb9H1TMMCTLrn+Dbqr8ER0lrYWkRFAAMyw27bynXYIqLIt3xKP2VYpH36UQqZLGlrcZdx3QQBAYEwWErvoywpLU7MqjQvcM7BKAxgc4C7z2JVqkDPnR0lqX+GqdE9PXkM7iCNomcrvefkKKXZ2inoM3B9NNjkpDKWG0yeEEyCSJgJ9A8Q2SfH2ohjsyUhx6a3g0TcKH7moU5fpBIvLwe2lBJ+FHWy0VuW6FQaUBXFQpYpHaURhBGGuBL1fQcpZGCciSYtJmgMFWhuFBMm+jO3Av9fe1fWG0d2Xk/dWnpjd7O5iiJFLdTiGXk224OJg8xMbAN+MPIQ5CH/LE95TBAgQIC8xA4SxHbsJAM74/E645EsjTSSuC/N3pda7s3Dd241SXlhKx7AhO95aZFiV1fdqq76zved73wAXrojTeG1urVfolekn0GRgTXZ+vKEYqD//dGPAQDv//hneEgbry6vw4yip8ZbMkpmaNPjaYzQZlKMmjqFaHSGdNiGtrF0puBR5h8Esk6XVoRVLS1IKvhfP/kWLq/IfaZEK9EB0+p9Tp62U6M1VO6P+Lzt5POMzKb8nmsoPzFrWp9hYBb2Z6XUbxV4OPz+4BiYg4ODg8OFxFShpDYa43iAOdr4fPn2Ag77Eo18sCU554/3RMZ6i6wojgIYFkO6jIgNG2ZtfcpY1qMNSgWayBphFJ11ibrm78poEl8Dv/g3kQRf4fbW7DBIFqWLgUabYo3+kTCtS2R2lxck4o7oxxQ227jKWVVXZmfz5tDzwhiDYTxCTKnuKB4i40yulLL0FCystymVLngIKvL5LTqtH+6Q/fC400z2e2Z2BemILQBksIOh1NJGmdT8vChEwMbThTURHdy8LQxvl4KbqAZ4Sv4d92W/LjVekYNQtJSakX25f+8YKxQ+VArlqdYDsHUNkxuqRkEAWx0Z8BwNR1wLNRFxVOgIn3k2kqWoY0XYVurTYT0sYI41nSSfB0V7MdZDPSQAGVdMkY5nJrJpAIj8CDM1uR4aC7LtlVVZi4w1sfl1+dv1jXkYSr2D/0ddQ+qZKfcnyK/Delm+E9Y82M6+8pMMazOyHne4DkOyDY9tBRVOe7h6/SrUDWHVhYiN9cw4dA+FfX/w8CE++kgY909+JoKMTx6RbXXJttIUmvUcay9VnJfroboo2zd2NptOYWAbu/XUjCMKFNYXypifo+F3Yxkha9sjir8OWNu+uirG1FdW1/MhkilrYdsfSYvIYUuuK7q3wVPqxPy2X79vJ/fZOyOxn5xqDx5+ey3NXsu+709mlzl8pnAMzMHBwcHhQmJKM18Pnh/AY5S7MlvEn16XWkGHEu9PW2QJZDJLV67AjyS6GrGRcNSVKClgvjoKRRVXB5DuCbuoMbIed2R7TSq3ZhsNzDJCDznzZ5V1rojPY69SgMdahupJFLccyD4U7dwgGsMOul3UWQ/bWC+i8MF0z3QDINNeblNTjKpIrEEtR5E0E1GJleclanz3629jeyBM9VlTVG+LGxIxax5bxobwGD1UasIK9jlNehTLGt16nWPRSwZHbamHzS5xDo0njGLYkwOeW6wgNfKZC8tyzhYXLdMRlV5rKGu0OKtQoIp0f3s41XoAUmsYJYBirSBBjISzvGw0HFFZlrG9QWuDEdnZiOFzwquzWqf5MJvRw2IJhVD2eUy7qFSx3jWWdQu0D6rP8zbVNJFrasC5cWMVodmUczUkuy1zrtshZ6WlvEYr1Tr6fV4zgxcqDMLzPBT8EJaw3L68hI0VyR5cZctBi7Zqbb5G6QjVRM5bPKKlFGXz1aqcozJZsqeBCmeWHR8La/nud/8LAPDeezIq5+N7n+DwiNvj9zgfM5JNmIodQWO/u+E8bbT4s23b8Pwgr+kZk2LaRuZCFGDjygLKnL4dVmbxZFvUhkdkhAOqPw/WmTlYXcEBzb4ffSq1461d+U6AzevGvmpzLiWgZWF21lde8bLjbU4Y/moWLE+2gPDDT70892+H3zscA3NwcHBwuJCYrpEZgDEeDEPbSI/x8pxs4mBFIqg+TT5Tqv4W5hdRnJGIv0WaktCOJuXrmAo65fmo8ZFa5GfafiTQOsbs7mONYU3IRtXqUP5myZfo+bg1QKEq9QKdsHF2ICyowwh9bFtXxn2svCz9JtfXF1E425Dzu9ZEG8Sxhsel9PRkhG7IYZpFqsRm+vLaffQMX7orkffGXYbjSmoM8VD29/3vS2R5eBiiRNPewVBqdXX2a736pijuHu/fB6qyJpfXRbHVaEgtbKYi7G2Y7qFLBaA28v7NQxm1Mjdr2Yycp3qpgYQ1Pds8Og0yDfTjFClrT0Go0O3K+lfJEBbnpfZkwolay9Z27KiOzKdCkQ28iqbDrV4HTx4Li2isyNr4JfYPsiaiEx9d1mFHsVU+8vqjOjINDZ6S1bYZ7atQ1r/TY6M7FarDkcGDh8KW250XY2DVUgHvvnoLs2XZj43FGiqsNdWpuk3s1OGKnKO038d4wC+FrReSxZYjZiKosusdbqO3Lcfx7R/KaJy//6dvAgAO94WhaI1c8aet1RZViYY1Hi8sICKrs/ZlwRJNnWnhZemtxvhE03WGaRmY7ytU6hWogmQnBpmCZu008GTtS1ThdjnAs58M8OhTUTk3m3K8qT7NgvKBoMbAxum/cWQNkK+p7eUOyMSs8bLRGvpMI7Stv2ZkoLYhXCHI3+dmMn+2mFIP7EF7KpfUIk1QD+SsvXFFboJHdIiP2Via9PuImJYZ2RNP6q2oS84SO9LcQ8q/iUN7cbHgbaeq+lF+pWTWwZsPt2ImXzaTxNhlQ2/CVJWmnDzkjWFgXUGMxiJv+sUgOuGTdj4YA2Rxhoz7EAQmn3VVrclxZ3Ts33oqheYHHz5EtSiilNGcpEKGvNnPl9a5NrK9xcZtFOjnOGYatc4CdkLXgW73EKtr8kD0KOH/3nckZRSW5T1L6xkiXxZhd1tuZnEmacdmTx5yc0W5SdVnakjp/J2+gJOA1hm6vR6iUNa+EISIKCpQbIvw+GrdRQaDAZIkz/mdfEFCWbVflH1qtY7xzW/9BwCgNv8NAMC1GxSAUMyRZikGnNrc5cPIFtZD3pSVDrGzJ2sQ8/oKKJG3P2d8+KVaY/upNJsfHfWmXhMAaFQK+Os3ryMqyJE92TnAe9+TFN9dpn49rlnMG+on9z/EzVu3ZX/5XWhtiey9f0z/xx1JFz745BM8O2SrRlmu6blVEfMY34o6UnBsHsa85tKBpPRL/M4pk2FE95esKN/rUkOCPBsgpHyAGWQnGnnTiSDrnPCDEPWFS3hKZ48nOwfIuL2Yc/5GNDxssXHbCwOMEzsbTrZj3fKtp6KVusu05dPX8PMPMiDwrbsG06E2IA0LPG6dO+BbgUua2e3wwWZsY3MAz36mbfJ2+EzgUogODg4ODhcS04k4lEJUqsCn23rc6uXs6fKs/O6VtkRJH7dEsr27/RQdNt72GBWNmAoJGT6lLIoqE6DPKGfAqCaw6Q565+nxCJ7l6nz/KLBu0xKx9bXBqMDUF6XURUZSmtLcCp3Pby5X0Ygo8T5qQafTRUyeZxCGCRK6cweRj1EmRejtvZ8DAO79SNz4q5SJV5IiPv7PnwIACtfsjCdZt/KGsKtra7Kem3tjZJyLFHA67vK6jT6FCehBhLKS43t8/wEA4L0fiix/7WVGplWFMJW0XdqR7cwtyv99+lgi+nttYc9f/8rbuLQmjKCfHk21HoC4fZcKEYpFO5laodiQ9GQhYEqOKeY2XeSHwwFmZkQ+bVPUg8GAG5SXSl3W5I03v4BPn8lx/u3f/B0A4N13xLvwc6+KB2d9uQDDRvnAZ9MtnfBTrudBu4WHnDuX99GS7WVsKB/GnLYwoxB2Zb361gJ9ShjjYWgCNMkk7u108T8fyuTkTTLl+RkKmsLJzLcSm6o3d+S6evBEzskHP5XG4webwgy7Iw1wIsNX3xArqW+8JM3PRZuajwrY2hfGtrkv2+twYvqvPpKU8v0P3stZRrQijdzaMriBXCNWLKHC6IyV0nQMTAMYp8DmNvdp9wBx3lbDLADPV5lirSDVyBKb2qP4gqlfEqhJQzIAD5MG41OfrScMzMtp/2l/Q58WVZ5SuUjM+CfTkyfYHi2rdDyGsmlF36UQP0s4Bubg4ODgcCExvSeO8uFRoh2UgBHNNEOymPUViZIfb0qUGo/7yCi5bbFmc8j6R9VndHOiqNpmBLUbk5WxJuabSW3KPnVD1uL2WEtrM8LuaWCVLG2WDNFvSo59mUXoL16RGsHGlRLKFEeMsziPPM+LzMQ4Tp4hHrNxegDstYRxbR9Lw/XhrtTALoViwjrv+eiwLhbuCuuImO/fzH4FALjzVRFoHOkWjrdlvRZXZN9efVNWoEhBxOHhOg44kblCR/6XXhIrqNqasBiTDZFRl767JfWNfpP1Hpost2gEu/XSAipVqXnsHP58qvUApIweIoMi2y36pVyWbPJoVX4usHE9iiKUWOvrsrE8YzN3sSx/k7IGtHHnKm6/IqKXb/6jrPE//4OYzX69/wUAwJe+dhWaTcJWCj9xIZdrY3//CN2eHPuVq1J77HLW2i5FD4FtNJ4PoEJZkx4nZE+LXpLiB9vHuTBmZ6+LMvvEm6xDPd4VJnKZsvK/+su38fIrr8kacXba/IqwzKXP3QEAfIUMZWmujtkS95cTGgpFWbsKX0Ol0KPQqsk68E5L1uD7i1LvGmqD7SNheYYMYtAUlmfHdpVoDGCUfy4j3N8EnWkM+4NcWKM8hSyxDJeWW769B8j2AwNEFFRouubHeeZkYv1kX2xZ20rkz5Z1lZo0Kfv5vDoyO9aUfeWhxDpbQKGNncxtLdMmpsFJvu/+lMYIDtPBMTAHBwcHhwuJqVWI0ApjNoL6xsvrUYaS+BnmqRdqEkU1D/bRZVTZZiT1HhlTg0FSjYyu4nlIKAnu2KZnRkQ2jvGVQkTmVp78Vg7GTpFVBppNqzFDxhK3U5+hxUvCutyxRqcmn++lCbIpVVSpTnDc20G/I2rCbNhHqyc1JU0Zd52y6UH7oRznnA/Fek/ImWa1ROocalki58aiRMy1uoen91tcAznO5p6s4ziVGsbypTU825JzcnQo7MCEsv5LVD0XCpOREWPWE3d+JWtQ4Sym26+LYq3Xa+PwWPY5LEyvojJGI41HSDnrKvCBcllqOyFVdj6ZjVUqGmNyZqIt+6aqNGXPg22Gbh4f4cvviFntW38mc65+8D2xR3r8RGp/l54VUOCstbo1tGVk3+nIGnV7A9x6WeyJZmeFkdcassatdof7KT+v31rFiHL2QfxiDCzLMhw3j2FdhrwsQUSpeMwa5qU5WbO1m68DAG689iaqbMOwNZzajJzH5XlhYJFlGEbntRwrI8/ywViydnGqocgcylRjLtflH7L1qgAACPJJREFUXLz1JVnLwsws/uU73wYAPN2WidcZxxOlvFYUbb0CRPmEYs/znjPM/V0wOsOo10U6HHJNNHzYOhQVyLYpmec/UF5+QzBk8HZqeWyVySc6iG2jts7Vh2f2wXi5RN5G9GUaCZepzKyVCyiX7bHzfkNGZs+Lbeg+qWoM2epw72n7/IvicG44Bubg4ODgcCExdQ0s0yZX/ni+QkRVmRmyuZPRzVJFfv/jX3yII/Ydpax9HTDa6bAmVmbuuOwBBUYuhoq7s0PjgiDMGwc7ticltdYu8vtIASADs02RKrDjFeQ9rZ6wGt+kKCiJcD0dTGx1zgmdJRh2d+H5coxhdYR6mUznkbCp6iKnxi5IncoL53B57vMAgM0tYW7tBxKhvbwq6rGZGdnfK2sxjrblfY9+Kb8bdiQC9MvCuqLSEMuXhWXsbgorG2uyBFtfhEZtVqL86xxFcvCQI1wSTsNtShS7u9PBOJP1mWfP2TTItEF/kCBJedypQhzLmpRLZ6bYsh7l+wEyMq+E19KgJ+dwb0vqMcus0TTqsxgwGr/6ivS/HY/kNWL/Wq8DJBzQGJWoLCSrD9iku7y6hms3WENhHYllMsQcwNlmI31lpoRSke8vh1OvCQCEvsJKvYKEx554syhUZH2fUjQb1eUY335HpkbPVWeQpJZBTGq8J4+1Gk0+I+B6Kls3sopde2A6g9FnalZ8ma3J9+DOxnX88r40wm9tCQOzfV+Wkdo6IsykrmlObuycMMZApyPMMQsSBBp0yoLRNP0m24sCviofGSdot8m4ihzbnNIrLqYdWZqYvOaVnR2VAnvtGURUMtdZV16es039st1i5EMFp+9F1m4r4H7l41mUgU+WZg2tgftTrYvD+eAYmIODg4PDhcR0DMzzoMIQdP+BZ8TMEwDAqDLri4JshUaj82GGkLWgGiM/68hhFYYp8819rTG0ARzZlZ+ejpZUGk866VnzstnukLny0A9Q4rZnbA8RO+LDvKRDq6RhH9xllFV5aicBk44wbN6Dz76zsacRVSWKW7krDhfWYSItsKetXUNnX9hTj+bHwx1Zo1+8LyrE+Rrz6+EM/uTPZS2vXRfl3dyifFZtSdhDab4IpaSGc7gldaz9ptTbdOGp7GgSAoxoI47u8OhOUp2xrgWihOv1UqRkL8Viaar1AIAs02i1hyd+jjGgNZVHW6gxrwkbxRaKxdytozcQ5VfCc1+dE2bw5XeFlaxfW4Fin1R1Tmqur78pzLVMs9larYYx+Bmst3mMoAtkETDAiE4gVgVXLMnxVmnfFVHl5kcBYqr37O+mRSHwcWOhNlHlBhqDujCwWw1hxRtfFMXh6qqoIuMkyZVs+ZWZ+8zamo7td1PwraG1ZV6WF51hW/J+fWo7BarrauUibq7L59tRK5u0bDJU8SpvwjpyA9wXGAbrwcBDgsU5uSYX50NobZWAXHt1+jaltc73vUYlZcjhtnZfxjQ+jsfPM6+Jca8cbxQqlCLW8DkBtkwVp58zWQXF82D3RynLxJk1sourTvzuBdbE4fyYOoWoggC+dWE2GsgfYEwB8AKa8eTCeufuZbR5kf3kqaS3DpkjGPHk2hlI2g9ynzYrplDepNkQQD5dFQB8PqSYHUSJF1ZZhajS4qqq2CDK3SxzQyEl2ZEy+TTl0WiYW8mcF6HycKkUYFCwjddFGN4oowZnoh3Ty1C0LDj++AhRj+KNMZuL2Yg5NrJWOpMv0PHeCF2KD25cp2ch06PNZ5JaU719FPmkvn5dboDLq/SFHMkX8uCgCx3Lzcdn1f+1t67Jz5n4CmrwYZoO4dGN0lMv8gVU0IgQMrUCFaLXp78hrZn6dFv3uVaNWR9+kCtOAABFpuou0Z+ysiCRRqmqkLHJNaBHWNCgEIg3sjAIkNBKSmXWjV5uap2upAXH8Th/qAWRnU3HXaCPZRDKdvuDMRSFFr3u6AXWBAiUwkK1hCSWz+oNUpQ/Lw/lKwsi6rlzg6lQ23wbBrCuaiGfu3zO5EFd4NnvysnviW3ePf1wMchgrI6J/zD8G58ioUqpgFdfEZHMmDflf//vHwEA9tvWt5Tv8RROzsfyTognzg1jENjzECiEFIqEvg0UTqc8syzLU772YVKtyfdF8/vj5TPKfHh2Enc+F+z0PC/leXkqyq7fZFrzJG3oM5Vp06j2AWZt0exae8qDsVs0L7AeDueGSyE6ODg4OFxITMfAlAKiIiYTZQ1AKWnKgr3mJi2rWSkDf/GamMQuhxLePtyTdMReX95znDK1qH2MGSSlHqNDm27Mi6L+JGXIqJIZSFRsOspTKDC9WPMl+mqQkVWYErBF38CfpI8GXpanJs6LwPhYSBsYr0gEvb/Zwv6m2GilZcp+Y0rkt2hr1UwBRvNI5X2Vm0y5bsgx+XwP9lvYfSTby46FIS1xBpvSsial8QqabWE0YSYpw/llSTdempPUWjbawrMt2U5pxkr1ZR/SkUS8gQ31Dw3GbQoNRtNPljXGIE5M3uA5HCbo92XfC1ZGH1T4yvd4PsYU44wp6kkoVzdM9xaYVk29UT4bK2NLwLhPGbXPlFIQ4rAplHeuIWk6K6M+3OFE6zjGwoqkXq2BbLNzbI8CAKC4gzvbx3mqLdPTtxbIJjVMOsaIqchSqHD3pqTqLjfkHJTIFvJ0lWfytJ+y4gP7s2UJNjWWAVqdTpOlGTMaVjiSGfQpcOixbWHINcyMHOswzZCRbaysSUP9fONTAMBR59lkvwB4RueTroXZvEAaUalc9BBFIYpFZnO4D5bV2e9mlmW5cKTMWYIh1yvlcXrMvPjqJDNSp7Z3sufZZlgnDJYsK6dkCsiZl/2/Mz/nE5k9wBr7Oo7wmcKtroODg4PDhYQ3jf2L53kHAJ58drvzB4GrxpjF8/7xH8maAFOsi1uTX48/knVxa/LrMdW6OJwPUz3AHBwcHBwc/lDgUogODg4ODhcS7gHm4ODg4HAh4R5gDg4ODg4XEu4B5uDg4OBwIeEeYA4ODg4OFxLuAebg4ODgcCHhHmAODg4ODhcS7gHm4ODg4HAh4R5gDg4ODg4XEv8HKSSE8ORq1NgAAAAASUVORK5CYII=\n",
      "text/plain": [
       "<Figure size 432x288 with 10 Axes>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "for i in range(1,11):\n",
    "    plt.subplot(2,5,i)\n",
    "    plt.imshow(x_train[i-1])\n",
    "    plt.text(3,10,str(y_train[i-1]))\n",
    "    plt.xticks([])\n",
    "    plt.yticks([])\n",
    "plt.show()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 157,
   "metadata": {},
   "outputs": [],
   "source": [
    "batch_size = 32\n",
    "epochs = 100"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Load the dataset. "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 136,
   "metadata": {},
   "outputs": [],
   "source": [
    "(x_train,y_train),(x_test,y_test) = tf.keras.datasets.cifar10.load_data()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 137,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "50000"
      ]
     },
     "execution_count": 137,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "x_train.shape[0]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 138,
   "metadata": {},
   "outputs": [],
   "source": [
    "dataset = tf.data.Dataset.from_tensor_slices(\n",
    "    (tf.cast(x_train[..., tf.newaxis]/255, tf.float32),\n",
    "    tf.cast(y_train, tf.int64))\n",
    ")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 139,
   "metadata": {},
   "outputs": [],
   "source": [
    "# x_train = tf.cast(x_train[...,tf.newaxis]/255, tf.float32)\n",
    "# x_test = tf.cast(x_test[...,tf.newaxis]/255, tf.float32)\n",
    "#y_train = y_train.astype('float32')\n",
    "#y_test = y_test.astype('float32')\n",
    "y_train = tf.keras.utils.to_categorical(y_train,10)\n",
    "y_test = tf.keras.utils.to_categorical(y_test,10)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 151,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(32, 32, 3)"
      ]
     },
     "execution_count": 151,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "x_train.shape[1:]"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "##### It is your time to build your model. Try your best to build a model with good performance on the test set."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 142,
   "metadata": {},
   "outputs": [],
   "source": [
    "model=tf.keras.Sequential([\n",
    "    tf.keras.layers.Conv2D(64, [3, 3], padding='Same', activation='relu', \n",
    "                           input_shape=(32, 32, 3)),\n",
    "    tf.keras.layers.Conv2D(128, [3, 3], activation='relu'),\n",
    "    tf.keras.layers.MaxPooling2D(pool_size=(2, 2)),\n",
    "    tf.keras.layers.Dropout(0.25),\n",
    "    tf.keras.layers.Flatten(),\n",
    "    tf.keras.layers.Dense(128, activation='relu'),\n",
    "    tf.keras.layers.Dropout(0.5),\n",
    "    tf.keras.layers.Dense(10, activation=\"softmax\")\n",
    "])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 143,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Model: \"sequential_7\"\n",
      "_________________________________________________________________\n",
      "Layer (type)                 Output Shape              Param #   \n",
      "=================================================================\n",
      "conv2d_16 (Conv2D)           (None, 32, 32, 64)        1792      \n",
      "_________________________________________________________________\n",
      "conv2d_17 (Conv2D)           (None, 30, 30, 128)       73856     \n",
      "_________________________________________________________________\n",
      "max_pooling2d_7 (MaxPooling2 (None, 15, 15, 128)       0         \n",
      "_________________________________________________________________\n",
      "dropout_14 (Dropout)         (None, 15, 15, 128)       0         \n",
      "_________________________________________________________________\n",
      "flatten_7 (Flatten)          (None, 28800)             0         \n",
      "_________________________________________________________________\n",
      "dense_15 (Dense)             (None, 128)               3686528   \n",
      "_________________________________________________________________\n",
      "dropout_15 (Dropout)         (None, 128)               0         \n",
      "_________________________________________________________________\n",
      "dense_16 (Dense)             (None, 10)                1290      \n",
      "=================================================================\n",
      "Total params: 3,763,466\n",
      "Trainable params: 3,763,466\n",
      "Non-trainable params: 0\n",
      "_________________________________________________________________\n"
     ]
    }
   ],
   "source": [
    "model.summary()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## 使用keras\n",
    "- 参考网址：<https://keras.io/examples/cifar10_cnn/>"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 144,
   "metadata": {},
   "outputs": [],
   "source": [
    "model.compile(optimizer='adam', loss='binary_crossentropy', metrics=['acc'])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 152,
   "metadata": {},
   "outputs": [],
   "source": [
    "cnn_model = Sequential()\n",
    "cnn_model.add(Conv2D(32, (3,3), padding='same', \n",
    "                     input_shape=x_train.shape[1:]))\n",
    "cnn_model.add(Activation('relu'))\n",
    "cnn_model.add(Conv2D(32, (3,3)))\n",
    "cnn_model.add(Activation('relu'))\n",
    "cnn_model.add(MaxPooling2D(pool_size=(2, 2)))\n",
    "cnn_model.add(Dropout(0.25))\n",
    "\n",
    "cnn_model.add(Conv2D(64, (3,3), padding='same'))\n",
    "cnn_model.add(Activation('relu'))\n",
    "cnn_model.add(Conv2D(64, (3,3)))\n",
    "cnn_model.add(Activation('relu'))\n",
    "cnn_model.add(MaxPooling2D(pool_size=(2, 2)))\n",
    "cnn_model.add(Dropout(0.25))\n",
    "\n",
    "cnn_model.add(Flatten())\n",
    "cnn_model.add(Dense(512))\n",
    "cnn_model.add(Activation('relu'))\n",
    "cnn_model.add(Dropout(0.5))\n",
    "cnn_model.add(Dense(10))\n",
    "cnn_model.add(Activation('softmax'))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 154,
   "metadata": {},
   "outputs": [],
   "source": [
    "opt = keras.optimizers.RMSprop(learning_rate=0.0001, decay=1e-6)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 155,
   "metadata": {},
   "outputs": [],
   "source": [
    "cnn_model.compile(loss='categorical_crossentropy',\n",
    "                 optimizer=opt,\n",
    "                 metrics=['accuracy'])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 156,
   "metadata": {},
   "outputs": [],
   "source": [
    "x_train = x_train.astype('float32')\n",
    "x_test = x_test.astype('float32')\n",
    "x_train /= 255\n",
    "x_test /= 255"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 158,
   "metadata": {
    "scrolled": false
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Train on 50000 samples, validate on 10000 samples\n",
      "Epoch 1/100\n",
      "50000/50000 [==============================] - 116s 2ms/step - loss: 1.7798 - accuracy: 0.3479 - val_loss: 1.4950 - val_accuracy: 0.4527\n",
      "Epoch 2/100\n",
      "50000/50000 [==============================] - 114s 2ms/step - loss: 1.4740 - accuracy: 0.4653 - val_loss: 1.3251 - val_accuracy: 0.5279\n",
      "Epoch 3/100\n",
      "50000/50000 [==============================] - 120s 2ms/step - loss: 1.3390 - accuracy: 0.5226 - val_loss: 1.2003 - val_accuracy: 0.5737\n",
      "Epoch 4/100\n",
      "50000/50000 [==============================] - 119s 2ms/step - loss: 1.2402 - accuracy: 0.5612 - val_loss: 1.1380 - val_accuracy: 0.5980\n",
      "Epoch 5/100\n",
      "50000/50000 [==============================] - 115s 2ms/step - loss: 1.1588 - accuracy: 0.5905 - val_loss: 1.0619 - val_accuracy: 0.6266\n",
      "Epoch 6/100\n",
      "50000/50000 [==============================] - 119s 2ms/step - loss: 1.0990 - accuracy: 0.6139 - val_loss: 1.1138 - val_accuracy: 0.6011\n",
      "Epoch 7/100\n",
      "50000/50000 [==============================] - 117s 2ms/step - loss: 1.0486 - accuracy: 0.6329 - val_loss: 0.9856 - val_accuracy: 0.6559\n",
      "Epoch 8/100\n",
      "50000/50000 [==============================] - 116s 2ms/step - loss: 0.9969 - accuracy: 0.6505 - val_loss: 0.9356 - val_accuracy: 0.6737\n",
      "Epoch 9/100\n",
      "50000/50000 [==============================] - 108s 2ms/step - loss: 0.9615 - accuracy: 0.6622 - val_loss: 0.8837 - val_accuracy: 0.6895\n",
      "Epoch 10/100\n",
      "50000/50000 [==============================] - 118s 2ms/step - loss: 0.9276 - accuracy: 0.6757 - val_loss: 0.8697 - val_accuracy: 0.6993\n",
      "Epoch 11/100\n",
      "50000/50000 [==============================] - 113s 2ms/step - loss: 0.8989 - accuracy: 0.6879 - val_loss: 0.8464 - val_accuracy: 0.7072\n",
      "Epoch 12/100\n",
      "50000/50000 [==============================] - 117s 2ms/step - loss: 0.8709 - accuracy: 0.6969 - val_loss: 0.8707 - val_accuracy: 0.6959\n",
      "Epoch 13/100\n",
      "50000/50000 [==============================] - 117s 2ms/step - loss: 0.8432 - accuracy: 0.7079 - val_loss: 0.8241 - val_accuracy: 0.7132\n",
      "Epoch 14/100\n",
      "50000/50000 [==============================] - 112s 2ms/step - loss: 0.8269 - accuracy: 0.7133 - val_loss: 0.7844 - val_accuracy: 0.7320\n",
      "Epoch 15/100\n",
      "50000/50000 [==============================] - 116s 2ms/step - loss: 0.8015 - accuracy: 0.7209 - val_loss: 0.7691 - val_accuracy: 0.7366\n",
      "Epoch 16/100\n",
      "50000/50000 [==============================] - 121s 2ms/step - loss: 0.7871 - accuracy: 0.7274 - val_loss: 0.7475 - val_accuracy: 0.7424\n",
      "Epoch 17/100\n",
      "50000/50000 [==============================] - 126s 3ms/step - loss: 0.7703 - accuracy: 0.7341 - val_loss: 0.7324 - val_accuracy: 0.7485\n",
      "Epoch 18/100\n",
      "50000/50000 [==============================] - 122s 2ms/step - loss: 0.7634 - accuracy: 0.7368 - val_loss: 0.7461 - val_accuracy: 0.7414\n",
      "Epoch 19/100\n",
      "50000/50000 [==============================] - 120s 2ms/step - loss: 0.7501 - accuracy: 0.7399 - val_loss: 0.7883 - val_accuracy: 0.7288\n",
      "Epoch 20/100\n",
      "50000/50000 [==============================] - 121s 2ms/step - loss: 0.7388 - accuracy: 0.7454 - val_loss: 0.7188 - val_accuracy: 0.7551\n",
      "Epoch 21/100\n",
      "50000/50000 [==============================] - 120s 2ms/step - loss: 0.7312 - accuracy: 0.7499 - val_loss: 0.7474 - val_accuracy: 0.7472\n",
      "Epoch 22/100\n",
      "50000/50000 [==============================] - 122s 2ms/step - loss: 0.7182 - accuracy: 0.7524 - val_loss: 0.6899 - val_accuracy: 0.7623\n",
      "Epoch 23/100\n",
      "50000/50000 [==============================] - 117s 2ms/step - loss: 0.7131 - accuracy: 0.7544 - val_loss: 0.7448 - val_accuracy: 0.7492\n",
      "Epoch 24/100\n",
      "50000/50000 [==============================] - 119s 2ms/step - loss: 0.7065 - accuracy: 0.7581 - val_loss: 0.6956 - val_accuracy: 0.7721\n",
      "Epoch 25/100\n",
      "50000/50000 [==============================] - 112s 2ms/step - loss: 0.6985 - accuracy: 0.7620 - val_loss: 0.6752 - val_accuracy: 0.7690\n",
      "Epoch 26/100\n",
      "50000/50000 [==============================] - 115s 2ms/step - loss: 0.6960 - accuracy: 0.7614 - val_loss: 0.6619 - val_accuracy: 0.7779\n",
      "Epoch 27/100\n",
      "50000/50000 [==============================] - 119s 2ms/step - loss: 0.6873 - accuracy: 0.7648 - val_loss: 0.6659 - val_accuracy: 0.7756\n",
      "Epoch 28/100\n",
      "50000/50000 [==============================] - 120s 2ms/step - loss: 0.6884 - accuracy: 0.7647 - val_loss: 0.6728 - val_accuracy: 0.7730\n",
      "Epoch 29/100\n",
      "50000/50000 [==============================] - 115s 2ms/step - loss: 0.6834 - accuracy: 0.7687 - val_loss: 0.6704 - val_accuracy: 0.7726\n",
      "Epoch 30/100\n",
      "50000/50000 [==============================] - 111s 2ms/step - loss: 0.6790 - accuracy: 0.7685 - val_loss: 0.6820 - val_accuracy: 0.7685\n",
      "Epoch 31/100\n",
      "50000/50000 [==============================] - 112s 2ms/step - loss: 0.6716 - accuracy: 0.7710 - val_loss: 0.6476 - val_accuracy: 0.7820\n",
      "Epoch 32/100\n",
      "50000/50000 [==============================] - 112s 2ms/step - loss: 0.6705 - accuracy: 0.7724 - val_loss: 0.6825 - val_accuracy: 0.7686\n",
      "Epoch 33/100\n",
      "50000/50000 [==============================] - 115s 2ms/step - loss: 0.6654 - accuracy: 0.7761 - val_loss: 0.6586 - val_accuracy: 0.7754\n",
      "Epoch 34/100\n",
      "50000/50000 [==============================] - 117s 2ms/step - loss: 0.6620 - accuracy: 0.7750 - val_loss: 0.6454 - val_accuracy: 0.7832\n",
      "Epoch 35/100\n",
      "50000/50000 [==============================] - 117s 2ms/step - loss: 0.6563 - accuracy: 0.7775 - val_loss: 0.6541 - val_accuracy: 0.7802\n",
      "Epoch 36/100\n",
      "50000/50000 [==============================] - 116s 2ms/step - loss: 0.6592 - accuracy: 0.7766 - val_loss: 0.6750 - val_accuracy: 0.7795\n",
      "Epoch 37/100\n",
      "50000/50000 [==============================] - 123s 2ms/step - loss: 0.6522 - accuracy: 0.7789 - val_loss: 0.6319 - val_accuracy: 0.7857\n",
      "Epoch 38/100\n",
      "50000/50000 [==============================] - 117s 2ms/step - loss: 0.6541 - accuracy: 0.7781 - val_loss: 0.6914 - val_accuracy: 0.7845\n",
      "Epoch 39/100\n",
      "50000/50000 [==============================] - 118s 2ms/step - loss: 0.6526 - accuracy: 0.7792 - val_loss: 0.7122 - val_accuracy: 0.7652\n",
      "Epoch 40/100\n",
      "50000/50000 [==============================] - 114s 2ms/step - loss: 0.6499 - accuracy: 0.7815 - val_loss: 0.6633 - val_accuracy: 0.7791\n",
      "Epoch 41/100\n",
      "50000/50000 [==============================] - 110s 2ms/step - loss: 0.6461 - accuracy: 0.7820 - val_loss: 0.6478 - val_accuracy: 0.7854\n",
      "Epoch 42/100\n",
      "50000/50000 [==============================] - 109s 2ms/step - loss: 0.6485 - accuracy: 0.7802 - val_loss: 0.6711 - val_accuracy: 0.7802\n",
      "Epoch 43/100\n",
      "50000/50000 [==============================] - 113s 2ms/step - loss: 0.6410 - accuracy: 0.7839 - val_loss: 0.6607 - val_accuracy: 0.7871\n",
      "Epoch 44/100\n",
      "50000/50000 [==============================] - 119s 2ms/step - loss: 0.6375 - accuracy: 0.7855 - val_loss: 0.6945 - val_accuracy: 0.7647\n",
      "Epoch 45/100\n",
      "50000/50000 [==============================] - 111s 2ms/step - loss: 0.6397 - accuracy: 0.7844 - val_loss: 0.6754 - val_accuracy: 0.7866\n",
      "Epoch 46/100\n",
      "50000/50000 [==============================] - 110s 2ms/step - loss: 0.6373 - accuracy: 0.7885 - val_loss: 0.6488 - val_accuracy: 0.7843\n",
      "Epoch 47/100\n",
      "50000/50000 [==============================] - 113s 2ms/step - loss: 0.6371 - accuracy: 0.7857 - val_loss: 0.6468 - val_accuracy: 0.7851\n",
      "Epoch 48/100\n",
      "50000/50000 [==============================] - 113s 2ms/step - loss: 0.6306 - accuracy: 0.7886 - val_loss: 0.6246 - val_accuracy: 0.7927\n",
      "Epoch 49/100\n",
      "50000/50000 [==============================] - 111s 2ms/step - loss: 0.6305 - accuracy: 0.7875 - val_loss: 0.6888 - val_accuracy: 0.7765\n",
      "Epoch 50/100\n",
      "50000/50000 [==============================] - 113s 2ms/step - loss: 0.6238 - accuracy: 0.7909 - val_loss: 0.6867 - val_accuracy: 0.7867\n",
      "Epoch 51/100\n",
      "50000/50000 [==============================] - 112s 2ms/step - loss: 0.6249 - accuracy: 0.7937 - val_loss: 0.6214 - val_accuracy: 0.7928\n",
      "Epoch 52/100\n",
      "50000/50000 [==============================] - 114s 2ms/step - loss: 0.6211 - accuracy: 0.7919 - val_loss: 0.6347 - val_accuracy: 0.7915\n",
      "Epoch 53/100\n",
      "50000/50000 [==============================] - 119s 2ms/step - loss: 0.6201 - accuracy: 0.7931 - val_loss: 0.6605 - val_accuracy: 0.7883\n",
      "Epoch 54/100\n",
      "50000/50000 [==============================] - 115s 2ms/step - loss: 0.6175 - accuracy: 0.7925 - val_loss: 0.6128 - val_accuracy: 0.7949\n",
      "Epoch 55/100\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "50000/50000 [==============================] - 122s 2ms/step - loss: 0.6153 - accuracy: 0.7939 - val_loss: 0.6486 - val_accuracy: 0.7830\n",
      "Epoch 56/100\n",
      "50000/50000 [==============================] - 124s 2ms/step - loss: 0.6206 - accuracy: 0.7933 - val_loss: 0.6577 - val_accuracy: 0.7835\n",
      "Epoch 57/100\n",
      "50000/50000 [==============================] - 113s 2ms/step - loss: 0.6183 - accuracy: 0.7927 - val_loss: 0.6488 - val_accuracy: 0.7890\n",
      "Epoch 58/100\n",
      "50000/50000 [==============================] - 115s 2ms/step - loss: 0.6153 - accuracy: 0.7962 - val_loss: 0.6742 - val_accuracy: 0.7931\n",
      "Epoch 59/100\n",
      "50000/50000 [==============================] - 110s 2ms/step - loss: 0.6139 - accuracy: 0.7946 - val_loss: 0.6255 - val_accuracy: 0.7914\n",
      "Epoch 60/100\n",
      "50000/50000 [==============================] - 113s 2ms/step - loss: 0.6163 - accuracy: 0.7936 - val_loss: 0.6420 - val_accuracy: 0.7855\n",
      "Epoch 61/100\n",
      "50000/50000 [==============================] - 115s 2ms/step - loss: 0.6123 - accuracy: 0.7951 - val_loss: 0.6695 - val_accuracy: 0.7857\n",
      "Epoch 62/100\n",
      "50000/50000 [==============================] - 118s 2ms/step - loss: 0.6103 - accuracy: 0.7962 - val_loss: 0.6555 - val_accuracy: 0.7850\n",
      "Epoch 63/100\n",
      "50000/50000 [==============================] - 116s 2ms/step - loss: 0.6074 - accuracy: 0.7986 - val_loss: 0.6801 - val_accuracy: 0.7788\n",
      "Epoch 64/100\n",
      "50000/50000 [==============================] - 113s 2ms/step - loss: 0.6129 - accuracy: 0.7962 - val_loss: 0.6449 - val_accuracy: 0.7924\n",
      "Epoch 65/100\n",
      "50000/50000 [==============================] - 111s 2ms/step - loss: 0.6069 - accuracy: 0.7991 - val_loss: 0.6273 - val_accuracy: 0.7916\n",
      "Epoch 66/100\n",
      "50000/50000 [==============================] - 120s 2ms/step - loss: 0.6084 - accuracy: 0.7979 - val_loss: 0.6554 - val_accuracy: 0.7910\n",
      "Epoch 67/100\n",
      "50000/50000 [==============================] - 113s 2ms/step - loss: 0.6051 - accuracy: 0.7998 - val_loss: 0.6569 - val_accuracy: 0.7846\n",
      "Epoch 68/100\n",
      "50000/50000 [==============================] - 110s 2ms/step - loss: 0.6061 - accuracy: 0.7978 - val_loss: 0.6480 - val_accuracy: 0.7943\n",
      "Epoch 69/100\n",
      "50000/50000 [==============================] - 116s 2ms/step - loss: 0.6070 - accuracy: 0.7976 - val_loss: 0.6574 - val_accuracy: 0.7860\n",
      "Epoch 70/100\n",
      "50000/50000 [==============================] - 118s 2ms/step - loss: 0.6090 - accuracy: 0.7990 - val_loss: 0.6806 - val_accuracy: 0.7810\n",
      "Epoch 71/100\n",
      "50000/50000 [==============================] - 112s 2ms/step - loss: 0.6024 - accuracy: 0.8001 - val_loss: 0.6504 - val_accuracy: 0.7931\n",
      "Epoch 72/100\n",
      "50000/50000 [==============================] - 110s 2ms/step - loss: 0.6067 - accuracy: 0.7967 - val_loss: 0.6319 - val_accuracy: 0.7935\n",
      "Epoch 73/100\n",
      "50000/50000 [==============================] - 111s 2ms/step - loss: 0.6125 - accuracy: 0.7968 - val_loss: 0.6770 - val_accuracy: 0.7850\n",
      "Epoch 74/100\n",
      "50000/50000 [==============================] - 110s 2ms/step - loss: 0.6070 - accuracy: 0.8013 - val_loss: 0.6446 - val_accuracy: 0.7959\n",
      "Epoch 75/100\n",
      "50000/50000 [==============================] - 111s 2ms/step - loss: 0.6079 - accuracy: 0.7967 - val_loss: 0.6810 - val_accuracy: 0.7862\n",
      "Epoch 76/100\n",
      "50000/50000 [==============================] - 111s 2ms/step - loss: 0.6063 - accuracy: 0.7970 - val_loss: 0.6667 - val_accuracy: 0.7854\n",
      "Epoch 77/100\n",
      "50000/50000 [==============================] - 110s 2ms/step - loss: 0.6062 - accuracy: 0.7985 - val_loss: 0.6661 - val_accuracy: 0.7870\n",
      "Epoch 78/100\n",
      "50000/50000 [==============================] - 111s 2ms/step - loss: 0.6035 - accuracy: 0.7987 - val_loss: 0.7566 - val_accuracy: 0.7865\n",
      "Epoch 79/100\n",
      "50000/50000 [==============================] - 111s 2ms/step - loss: 0.6035 - accuracy: 0.7990 - val_loss: 0.6541 - val_accuracy: 0.7953\n",
      "Epoch 80/100\n",
      "50000/50000 [==============================] - 111s 2ms/step - loss: 0.6065 - accuracy: 0.7986 - val_loss: 0.7136 - val_accuracy: 0.7664\n",
      "Epoch 81/100\n",
      "50000/50000 [==============================] - 111s 2ms/step - loss: 0.6076 - accuracy: 0.7993 - val_loss: 0.6689 - val_accuracy: 0.7847\n",
      "Epoch 82/100\n",
      "50000/50000 [==============================] - 111s 2ms/step - loss: 0.6045 - accuracy: 0.7991 - val_loss: 0.6720 - val_accuracy: 0.7763\n",
      "Epoch 83/100\n",
      "50000/50000 [==============================] - 116s 2ms/step - loss: 0.6057 - accuracy: 0.7981 - val_loss: 0.6382 - val_accuracy: 0.7889\n",
      "Epoch 84/100\n",
      "50000/50000 [==============================] - 111s 2ms/step - loss: 0.6002 - accuracy: 0.7996 - val_loss: 0.6576 - val_accuracy: 0.7769\n",
      "Epoch 85/100\n",
      "50000/50000 [==============================] - 111s 2ms/step - loss: 0.6043 - accuracy: 0.8004 - val_loss: 0.6367 - val_accuracy: 0.7910\n",
      "Epoch 86/100\n",
      "50000/50000 [==============================] - 2187s 44ms/step - loss: 0.6078 - accuracy: 0.7979 - val_loss: 0.6519 - val_accuracy: 0.7903\n",
      "Epoch 87/100\n",
      "50000/50000 [==============================] - 111s 2ms/step - loss: 0.6041 - accuracy: 0.8015 - val_loss: 0.6312 - val_accuracy: 0.7873\n",
      "Epoch 88/100\n",
      "50000/50000 [==============================] - 111s 2ms/step - loss: 0.6054 - accuracy: 0.8003 - val_loss: 0.6713 - val_accuracy: 0.7837\n",
      "Epoch 89/100\n",
      "50000/50000 [==============================] - 111s 2ms/step - loss: 0.6082 - accuracy: 0.7979 - val_loss: 0.6410 - val_accuracy: 0.7915\n",
      "Epoch 90/100\n",
      "50000/50000 [==============================] - 112s 2ms/step - loss: 0.6070 - accuracy: 0.7981 - val_loss: 0.6437 - val_accuracy: 0.7913\n",
      "Epoch 91/100\n",
      "50000/50000 [==============================] - 114s 2ms/step - loss: 0.6059 - accuracy: 0.7967 - val_loss: 0.6774 - val_accuracy: 0.7925\n",
      "Epoch 92/100\n",
      "50000/50000 [==============================] - 117s 2ms/step - loss: 0.6051 - accuracy: 0.8008 - val_loss: 0.6895 - val_accuracy: 0.7773\n",
      "Epoch 93/100\n",
      "50000/50000 [==============================] - 111s 2ms/step - loss: 0.6111 - accuracy: 0.7986 - val_loss: 0.6297 - val_accuracy: 0.7944\n",
      "Epoch 94/100\n",
      "50000/50000 [==============================] - 111s 2ms/step - loss: 0.6076 - accuracy: 0.7979 - val_loss: 0.7801 - val_accuracy: 0.7513\n",
      "Epoch 95/100\n",
      "50000/50000 [==============================] - 111s 2ms/step - loss: 0.6115 - accuracy: 0.7978 - val_loss: 0.6817 - val_accuracy: 0.7705\n",
      "Epoch 96/100\n",
      "50000/50000 [==============================] - 111s 2ms/step - loss: 0.6041 - accuracy: 0.8011 - val_loss: 0.7607 - val_accuracy: 0.7596\n",
      "Epoch 97/100\n",
      "50000/50000 [==============================] - 111s 2ms/step - loss: 0.6073 - accuracy: 0.7991 - val_loss: 0.6644 - val_accuracy: 0.7792\n",
      "Epoch 98/100\n",
      "50000/50000 [==============================] - 111s 2ms/step - loss: 0.6092 - accuracy: 0.7974 - val_loss: 0.6811 - val_accuracy: 0.7804\n",
      "Epoch 99/100\n",
      "50000/50000 [==============================] - 111s 2ms/step - loss: 0.6072 - accuracy: 0.7987 - val_loss: 0.6970 - val_accuracy: 0.7735\n",
      "Epoch 100/100\n",
      "50000/50000 [==============================] - 111s 2ms/step - loss: 0.6141 - accuracy: 0.7963 - val_loss: 0.6568 - val_accuracy: 0.7850\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "<keras.callbacks.callbacks.History at 0x14a901dd8>"
      ]
     },
     "execution_count": 158,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "# time: 4:15-\n",
    "cnn_model.fit(x_train, y_train,\n",
    "             batch_size=batch_size,\n",
    "             epochs=epochs,\n",
    "             validation_data=(x_test, y_test),\n",
    "             shuffle=True\n",
    "             )"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 159,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "10000/10000 [==============================] - 4s 400us/step\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "[0.6567883818149567, 0.7850000262260437]"
      ]
     },
     "execution_count": 159,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "# Returns the loss value & metrics values for the model in test mode.\n",
    "cnn_model.evaluate(x_test, y_test)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 162,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "array([[8.7095231e-02, 2.5900593e-02, 3.7576474e-02, ..., 1.0399666e-02,\n",
       "        6.5035865e-02, 4.3131325e-02],\n",
       "       [1.8592451e-06, 1.9213527e-04, 8.7700555e-11, ..., 1.1645929e-13,\n",
       "        9.9980563e-01, 3.6297612e-07],\n",
       "       [2.4254961e-02, 8.9959167e-02, 4.6818892e-05, ..., 2.6268955e-05,\n",
       "        8.3116752e-01, 5.4347243e-02],\n",
       "       ...,\n",
       "       [3.1979684e-07, 2.4094392e-08, 8.3202479e-04, ..., 5.2634552e-03,\n",
       "        7.0658828e-08, 1.1879582e-06],\n",
       "       [7.3111847e-02, 8.6835754e-01, 2.3240966e-03, ..., 2.0564843e-03,\n",
       "        2.6703340e-03, 3.3806168e-02],\n",
       "       [8.5956515e-07, 6.2048463e-07, 3.0543245e-04, ..., 9.9660933e-01,\n",
       "        4.0734324e-08, 6.7333957e-07]], dtype=float32)"
      ]
     },
     "execution_count": 162,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "# 返回值是数值，表示样本属于每一个类别的概率\n",
    "cnn_model.predict(x_test)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 163,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[3 8 8 ... 5 1 7]\n"
     ]
    }
   ],
   "source": [
    "print(np.argmax(cnn_model.predict(x_test), axis=1))\t\t# 打印最大概率对应的标签"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 121,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Help on ndarray object:\n",
      "\n",
      "class ndarray(builtins.object)\n",
      " |  ndarray(shape, dtype=float, buffer=None, offset=0,\n",
      " |          strides=None, order=None)\n",
      " |  \n",
      " |  An array object represents a multidimensional, homogeneous array\n",
      " |  of fixed-size items.  An associated data-type object describes the\n",
      " |  format of each element in the array (its byte-order, how many bytes it\n",
      " |  occupies in memory, whether it is an integer, a floating point number,\n",
      " |  or something else, etc.)\n",
      " |  \n",
      " |  Arrays should be constructed using `array`, `zeros` or `empty` (refer\n",
      " |  to the See Also section below).  The parameters given here refer to\n",
      " |  a low-level method (`ndarray(...)`) for instantiating an array.\n",
      " |  \n",
      " |  For more information, refer to the `numpy` module and examine the\n",
      " |  methods and attributes of an array.\n",
      " |  \n",
      " |  Parameters\n",
      " |  ----------\n",
      " |  (for the __new__ method; see Notes below)\n",
      " |  \n",
      " |  shape : tuple of ints\n",
      " |      Shape of created array.\n",
      " |  dtype : data-type, optional\n",
      " |      Any object that can be interpreted as a numpy data type.\n",
      " |  buffer : object exposing buffer interface, optional\n",
      " |      Used to fill the array with data.\n",
      " |  offset : int, optional\n",
      " |      Offset of array data in buffer.\n",
      " |  strides : tuple of ints, optional\n",
      " |      Strides of data in memory.\n",
      " |  order : {'C', 'F'}, optional\n",
      " |      Row-major (C-style) or column-major (Fortran-style) order.\n",
      " |  \n",
      " |  Attributes\n",
      " |  ----------\n",
      " |  T : ndarray\n",
      " |      Transpose of the array.\n",
      " |  data : buffer\n",
      " |      The array's elements, in memory.\n",
      " |  dtype : dtype object\n",
      " |      Describes the format of the elements in the array.\n",
      " |  flags : dict\n",
      " |      Dictionary containing information related to memory use, e.g.,\n",
      " |      'C_CONTIGUOUS', 'OWNDATA', 'WRITEABLE', etc.\n",
      " |  flat : numpy.flatiter object\n",
      " |      Flattened version of the array as an iterator.  The iterator\n",
      " |      allows assignments, e.g., ``x.flat = 3`` (See `ndarray.flat` for\n",
      " |      assignment examples; TODO).\n",
      " |  imag : ndarray\n",
      " |      Imaginary part of the array.\n",
      " |  real : ndarray\n",
      " |      Real part of the array.\n",
      " |  size : int\n",
      " |      Number of elements in the array.\n",
      " |  itemsize : int\n",
      " |      The memory use of each array element in bytes.\n",
      " |  nbytes : int\n",
      " |      The total number of bytes required to store the array data,\n",
      " |      i.e., ``itemsize * size``.\n",
      " |  ndim : int\n",
      " |      The array's number of dimensions.\n",
      " |  shape : tuple of ints\n",
      " |      Shape of the array.\n",
      " |  strides : tuple of ints\n",
      " |      The step-size required to move from one element to the next in\n",
      " |      memory. For example, a contiguous ``(3, 4)`` array of type\n",
      " |      ``int16`` in C-order has strides ``(8, 2)``.  This implies that\n",
      " |      to move from element to element in memory requires jumps of 2 bytes.\n",
      " |      To move from row-to-row, one needs to jump 8 bytes at a time\n",
      " |      (``2 * 4``).\n",
      " |  ctypes : ctypes object\n",
      " |      Class containing properties of the array needed for interaction\n",
      " |      with ctypes.\n",
      " |  base : ndarray\n",
      " |      If the array is a view into another array, that array is its `base`\n",
      " |      (unless that array is also a view).  The `base` array is where the\n",
      " |      array data is actually stored.\n",
      " |  \n",
      " |  See Also\n",
      " |  --------\n",
      " |  array : Construct an array.\n",
      " |  zeros : Create an array, each element of which is zero.\n",
      " |  empty : Create an array, but leave its allocated memory unchanged (i.e.,\n",
      " |          it contains \"garbage\").\n",
      " |  dtype : Create a data-type.\n",
      " |  \n",
      " |  Notes\n",
      " |  -----\n",
      " |  There are two modes of creating an array using ``__new__``:\n",
      " |  \n",
      " |  1. If `buffer` is None, then only `shape`, `dtype`, and `order`\n",
      " |     are used.\n",
      " |  2. If `buffer` is an object exposing the buffer interface, then\n",
      " |     all keywords are interpreted.\n",
      " |  \n",
      " |  No ``__init__`` method is needed because the array is fully initialized\n",
      " |  after the ``__new__`` method.\n",
      " |  \n",
      " |  Examples\n",
      " |  --------\n",
      " |  These examples illustrate the low-level `ndarray` constructor.  Refer\n",
      " |  to the `See Also` section above for easier ways of constructing an\n",
      " |  ndarray.\n",
      " |  \n",
      " |  First mode, `buffer` is None:\n",
      " |  \n",
      " |  >>> np.ndarray(shape=(2,2), dtype=float, order='F')\n",
      " |  array([[0.0e+000, 0.0e+000], # random\n",
      " |         [     nan, 2.5e-323]])\n",
      " |  \n",
      " |  Second mode:\n",
      " |  \n",
      " |  >>> np.ndarray((2,), buffer=np.array([1,2,3]),\n",
      " |  ...            offset=np.int_().itemsize,\n",
      " |  ...            dtype=int) # offset = 1*itemsize, i.e. skip first element\n",
      " |  array([2, 3])\n",
      " |  \n",
      " |  Methods defined here:\n",
      " |  \n",
      " |  __abs__(self, /)\n",
      " |      abs(self)\n",
      " |  \n",
      " |  __add__(self, value, /)\n",
      " |      Return self+value.\n",
      " |  \n",
      " |  __and__(self, value, /)\n",
      " |      Return self&value.\n",
      " |  \n",
      " |  __array__(...)\n",
      " |      a.__array__(|dtype) -> reference if type unchanged, copy otherwise.\n",
      " |      \n",
      " |      Returns either a new reference to self if dtype is not given or a new array\n",
      " |      of provided data type if dtype is different from the current dtype of the\n",
      " |      array.\n",
      " |  \n",
      " |  __array_function__(...)\n",
      " |  \n",
      " |  __array_prepare__(...)\n",
      " |      a.__array_prepare__(obj) -> Object of same type as ndarray object obj.\n",
      " |  \n",
      " |  __array_ufunc__(...)\n",
      " |  \n",
      " |  __array_wrap__(...)\n",
      " |      a.__array_wrap__(obj) -> Object of same type as ndarray object a.\n",
      " |  \n",
      " |  __bool__(self, /)\n",
      " |      self != 0\n",
      " |  \n",
      " |  __complex__(...)\n",
      " |  \n",
      " |  __contains__(self, key, /)\n",
      " |      Return key in self.\n",
      " |  \n",
      " |  __copy__(...)\n",
      " |      a.__copy__()\n",
      " |      \n",
      " |      Used if :func:`copy.copy` is called on an array. Returns a copy of the array.\n",
      " |      \n",
      " |      Equivalent to ``a.copy(order='K')``.\n",
      " |  \n",
      " |  __deepcopy__(...)\n",
      " |      a.__deepcopy__(memo, /) -> Deep copy of array.\n",
      " |      \n",
      " |      Used if :func:`copy.deepcopy` is called on an array.\n",
      " |  \n",
      " |  __delitem__(self, key, /)\n",
      " |      Delete self[key].\n",
      " |  \n",
      " |  __divmod__(self, value, /)\n",
      " |      Return divmod(self, value).\n",
      " |  \n",
      " |  __eq__(self, value, /)\n",
      " |      Return self==value.\n",
      " |  \n",
      " |  __float__(self, /)\n",
      " |      float(self)\n",
      " |  \n",
      " |  __floordiv__(self, value, /)\n",
      " |      Return self//value.\n",
      " |  \n",
      " |  __format__(...)\n",
      " |      Default object formatter.\n",
      " |  \n",
      " |  __ge__(self, value, /)\n",
      " |      Return self>=value.\n",
      " |  \n",
      " |  __getitem__(self, key, /)\n",
      " |      Return self[key].\n",
      " |  \n",
      " |  __gt__(self, value, /)\n",
      " |      Return self>value.\n",
      " |  \n",
      " |  __iadd__(self, value, /)\n",
      " |      Return self+=value.\n",
      " |  \n",
      " |  __iand__(self, value, /)\n",
      " |      Return self&=value.\n",
      " |  \n",
      " |  __ifloordiv__(self, value, /)\n",
      " |      Return self//=value.\n",
      " |  \n",
      " |  __ilshift__(self, value, /)\n",
      " |      Return self<<=value.\n",
      " |  \n",
      " |  __imatmul__(self, value, /)\n",
      " |      Return self@=value.\n",
      " |  \n",
      " |  __imod__(self, value, /)\n",
      " |      Return self%=value.\n",
      " |  \n",
      " |  __imul__(self, value, /)\n",
      " |      Return self*=value.\n",
      " |  \n",
      " |  __index__(self, /)\n",
      " |      Return self converted to an integer, if self is suitable for use as an index into a list.\n",
      " |  \n",
      " |  __int__(self, /)\n",
      " |      int(self)\n",
      " |  \n",
      " |  __invert__(self, /)\n",
      " |      ~self\n",
      " |  \n",
      " |  __ior__(self, value, /)\n",
      " |      Return self|=value.\n",
      " |  \n",
      " |  __ipow__(self, value, /)\n",
      " |      Return self**=value.\n",
      " |  \n",
      " |  __irshift__(self, value, /)\n",
      " |      Return self>>=value.\n",
      " |  \n",
      " |  __isub__(self, value, /)\n",
      " |      Return self-=value.\n",
      " |  \n",
      " |  __iter__(self, /)\n",
      " |      Implement iter(self).\n",
      " |  \n",
      " |  __itruediv__(self, value, /)\n",
      " |      Return self/=value.\n",
      " |  \n",
      " |  __ixor__(self, value, /)\n",
      " |      Return self^=value.\n",
      " |  \n",
      " |  __le__(self, value, /)\n",
      " |      Return self<=value.\n",
      " |  \n",
      " |  __len__(self, /)\n",
      " |      Return len(self).\n",
      " |  \n",
      " |  __lshift__(self, value, /)\n",
      " |      Return self<<value.\n",
      " |  \n",
      " |  __lt__(self, value, /)\n",
      " |      Return self<value.\n",
      " |  \n",
      " |  __matmul__(self, value, /)\n",
      " |      Return self@value.\n",
      " |  \n",
      " |  __mod__(self, value, /)\n",
      " |      Return self%value.\n",
      " |  \n",
      " |  __mul__(self, value, /)\n",
      " |      Return self*value.\n",
      " |  \n",
      " |  __ne__(self, value, /)\n",
      " |      Return self!=value.\n",
      " |  \n",
      " |  __neg__(self, /)\n",
      " |      -self\n",
      " |  \n",
      " |  __or__(self, value, /)\n",
      " |      Return self|value.\n",
      " |  \n",
      " |  __pos__(self, /)\n",
      " |      +self\n",
      " |  \n",
      " |  __pow__(self, value, mod=None, /)\n",
      " |      Return pow(self, value, mod).\n",
      " |  \n",
      " |  __radd__(self, value, /)\n",
      " |      Return value+self.\n",
      " |  \n",
      " |  __rand__(self, value, /)\n",
      " |      Return value&self.\n",
      " |  \n",
      " |  __rdivmod__(self, value, /)\n",
      " |      Return divmod(value, self).\n",
      " |  \n",
      " |  __reduce__(...)\n",
      " |      a.__reduce__()\n",
      " |      \n",
      " |      For pickling.\n",
      " |  \n",
      " |  __reduce_ex__(...)\n",
      " |      Helper for pickle.\n",
      " |  \n",
      " |  __repr__(self, /)\n",
      " |      Return repr(self).\n",
      " |  \n",
      " |  __rfloordiv__(self, value, /)\n",
      " |      Return value//self.\n",
      " |  \n",
      " |  __rlshift__(self, value, /)\n",
      " |      Return value<<self.\n",
      " |  \n",
      " |  __rmatmul__(self, value, /)\n",
      " |      Return value@self.\n",
      " |  \n",
      " |  __rmod__(self, value, /)\n",
      " |      Return value%self.\n",
      " |  \n",
      " |  __rmul__(self, value, /)\n",
      " |      Return value*self.\n",
      " |  \n",
      " |  __ror__(self, value, /)\n",
      " |      Return value|self.\n",
      " |  \n",
      " |  __rpow__(self, value, mod=None, /)\n",
      " |      Return pow(value, self, mod).\n",
      " |  \n",
      " |  __rrshift__(self, value, /)\n",
      " |      Return value>>self.\n",
      " |  \n",
      " |  __rshift__(self, value, /)\n",
      " |      Return self>>value.\n",
      " |  \n",
      " |  __rsub__(self, value, /)\n",
      " |      Return value-self.\n",
      " |  \n",
      " |  __rtruediv__(self, value, /)\n",
      " |      Return value/self.\n",
      " |  \n",
      " |  __rxor__(self, value, /)\n",
      " |      Return value^self.\n",
      " |  \n",
      " |  __setitem__(self, key, value, /)\n",
      " |      Set self[key] to value.\n",
      " |  \n",
      " |  __setstate__(...)\n",
      " |      a.__setstate__(state, /)\n",
      " |      \n",
      " |      For unpickling.\n",
      " |      \n",
      " |      The `state` argument must be a sequence that contains the following\n",
      " |      elements:\n",
      " |      \n",
      " |      Parameters\n",
      " |      ----------\n",
      " |      version : int\n",
      " |          optional pickle version. If omitted defaults to 0.\n",
      " |      shape : tuple\n",
      " |      dtype : data-type\n",
      " |      isFortran : bool\n",
      " |      rawdata : string or list\n",
      " |          a binary string with the data (or a list if 'a' is an object array)\n",
      " |  \n",
      " |  __sizeof__(...)\n",
      " |      Size of object in memory, in bytes.\n",
      " |  \n",
      " |  __str__(self, /)\n",
      " |      Return str(self).\n",
      " |  \n",
      " |  __sub__(self, value, /)\n",
      " |      Return self-value.\n",
      " |  \n",
      " |  __truediv__(self, value, /)\n",
      " |      Return self/value.\n",
      " |  \n",
      " |  __xor__(self, value, /)\n",
      " |      Return self^value.\n",
      " |  \n",
      " |  all(...)\n",
      " |      a.all(axis=None, out=None, keepdims=False)\n",
      " |      \n",
      " |      Returns True if all elements evaluate to True.\n",
      " |      \n",
      " |      Refer to `numpy.all` for full documentation.\n",
      " |      \n",
      " |      See Also\n",
      " |      --------\n",
      " |      numpy.all : equivalent function\n",
      " |  \n",
      " |  any(...)\n",
      " |      a.any(axis=None, out=None, keepdims=False)\n",
      " |      \n",
      " |      Returns True if any of the elements of `a` evaluate to True.\n",
      " |      \n",
      " |      Refer to `numpy.any` for full documentation.\n",
      " |      \n",
      " |      See Also\n",
      " |      --------\n",
      " |      numpy.any : equivalent function\n",
      " |  \n",
      " |  argmax(...)\n",
      " |      a.argmax(axis=None, out=None)\n",
      " |      \n",
      " |      Return indices of the maximum values along the given axis.\n",
      " |      \n",
      " |      Refer to `numpy.argmax` for full documentation.\n",
      " |      \n",
      " |      See Also\n",
      " |      --------\n",
      " |      numpy.argmax : equivalent function\n",
      " |  \n",
      " |  argmin(...)\n",
      " |      a.argmin(axis=None, out=None)\n",
      " |      \n",
      " |      Return indices of the minimum values along the given axis of `a`.\n",
      " |      \n",
      " |      Refer to `numpy.argmin` for detailed documentation.\n",
      " |      \n",
      " |      See Also\n",
      " |      --------\n",
      " |      numpy.argmin : equivalent function\n",
      " |  \n",
      " |  argpartition(...)\n",
      " |      a.argpartition(kth, axis=-1, kind='introselect', order=None)\n",
      " |      \n",
      " |      Returns the indices that would partition this array.\n",
      " |      \n",
      " |      Refer to `numpy.argpartition` for full documentation.\n",
      " |      \n",
      " |      .. versionadded:: 1.8.0\n",
      " |      \n",
      " |      See Also\n",
      " |      --------\n",
      " |      numpy.argpartition : equivalent function\n",
      " |  \n",
      " |  argsort(...)\n",
      " |      a.argsort(axis=-1, kind=None, order=None)\n",
      " |      \n",
      " |      Returns the indices that would sort this array.\n",
      " |      \n",
      " |      Refer to `numpy.argsort` for full documentation.\n",
      " |      \n",
      " |      See Also\n",
      " |      --------\n",
      " |      numpy.argsort : equivalent function\n",
      " |  \n",
      " |  astype(...)\n",
      " |      a.astype(dtype, order='K', casting='unsafe', subok=True, copy=True)\n",
      " |      \n",
      " |      Copy of the array, cast to a specified type.\n",
      " |      \n",
      " |      Parameters\n",
      " |      ----------\n",
      " |      dtype : str or dtype\n",
      " |          Typecode or data-type to which the array is cast.\n",
      " |      order : {'C', 'F', 'A', 'K'}, optional\n",
      " |          Controls the memory layout order of the result.\n",
      " |          'C' means C order, 'F' means Fortran order, 'A'\n",
      " |          means 'F' order if all the arrays are Fortran contiguous,\n",
      " |          'C' order otherwise, and 'K' means as close to the\n",
      " |          order the array elements appear in memory as possible.\n",
      " |          Default is 'K'.\n",
      " |      casting : {'no', 'equiv', 'safe', 'same_kind', 'unsafe'}, optional\n",
      " |          Controls what kind of data casting may occur. Defaults to 'unsafe'\n",
      " |          for backwards compatibility.\n",
      " |      \n",
      " |            * 'no' means the data types should not be cast at all.\n",
      " |            * 'equiv' means only byte-order changes are allowed.\n",
      " |            * 'safe' means only casts which can preserve values are allowed.\n",
      " |            * 'same_kind' means only safe casts or casts within a kind,\n",
      " |              like float64 to float32, are allowed.\n",
      " |            * 'unsafe' means any data conversions may be done.\n",
      " |      subok : bool, optional\n",
      " |          If True, then sub-classes will be passed-through (default), otherwise\n",
      " |          the returned array will be forced to be a base-class array.\n",
      " |      copy : bool, optional\n",
      " |          By default, astype always returns a newly allocated array. If this\n",
      " |          is set to false, and the `dtype`, `order`, and `subok`\n",
      " |          requirements are satisfied, the input array is returned instead\n",
      " |          of a copy.\n",
      " |      \n",
      " |      Returns\n",
      " |      -------\n",
      " |      arr_t : ndarray\n",
      " |          Unless `copy` is False and the other conditions for returning the input\n",
      " |          array are satisfied (see description for `copy` input parameter), `arr_t`\n",
      " |          is a new array of the same shape as the input array, with dtype, order\n",
      " |          given by `dtype`, `order`.\n",
      " |      \n",
      " |      Notes\n",
      " |      -----\n",
      " |      .. versionchanged:: 1.17.0\n",
      " |         Casting between a simple data type and a structured one is possible only\n",
      " |         for \"unsafe\" casting.  Casting to multiple fields is allowed, but\n",
      " |         casting from multiple fields is not.\n",
      " |      \n",
      " |      .. versionchanged:: 1.9.0\n",
      " |         Casting from numeric to string types in 'safe' casting mode requires\n",
      " |         that the string dtype length is long enough to store the max\n",
      " |         integer/float value converted.\n",
      " |      \n",
      " |      Raises\n",
      " |      ------\n",
      " |      ComplexWarning\n",
      " |          When casting from complex to float or int. To avoid this,\n",
      " |          one should use ``a.real.astype(t)``.\n",
      " |      \n",
      " |      Examples\n",
      " |      --------\n",
      " |      >>> x = np.array([1, 2, 2.5])\n",
      " |      >>> x\n",
      " |      array([1. ,  2. ,  2.5])\n",
      " |      \n",
      " |      >>> x.astype(int)\n",
      " |      array([1, 2, 2])\n",
      " |  \n",
      " |  byteswap(...)\n",
      " |      a.byteswap(inplace=False)\n",
      " |      \n",
      " |      Swap the bytes of the array elements\n",
      " |      \n",
      " |      Toggle between low-endian and big-endian data representation by\n",
      " |      returning a byteswapped array, optionally swapped in-place.\n",
      " |      Arrays of byte-strings are not swapped. The real and imaginary\n",
      " |      parts of a complex number are swapped individually.\n",
      " |      \n",
      " |      Parameters\n",
      " |      ----------\n",
      " |      inplace : bool, optional\n",
      " |          If ``True``, swap bytes in-place, default is ``False``.\n",
      " |      \n",
      " |      Returns\n",
      " |      -------\n",
      " |      out : ndarray\n",
      " |          The byteswapped array. If `inplace` is ``True``, this is\n",
      " |          a view to self.\n",
      " |      \n",
      " |      Examples\n",
      " |      --------\n",
      " |      >>> A = np.array([1, 256, 8755], dtype=np.int16)\n",
      " |      >>> list(map(hex, A))\n",
      " |      ['0x1', '0x100', '0x2233']\n",
      " |      >>> A.byteswap(inplace=True)\n",
      " |      array([  256,     1, 13090], dtype=int16)\n",
      " |      >>> list(map(hex, A))\n",
      " |      ['0x100', '0x1', '0x3322']\n",
      " |      \n",
      " |      Arrays of byte-strings are not swapped\n",
      " |      \n",
      " |      >>> A = np.array([b'ceg', b'fac'])\n",
      " |      >>> A.byteswap()\n",
      " |      array([b'ceg', b'fac'], dtype='|S3')\n",
      " |      \n",
      " |      ``A.newbyteorder().byteswap()`` produces an array with the same values\n",
      " |        but different representation in memory\n",
      " |      \n",
      " |      >>> A = np.array([1, 2, 3])\n",
      " |      >>> A.view(np.uint8)\n",
      " |      array([1, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0,\n",
      " |             0, 0], dtype=uint8)\n",
      " |      >>> A.newbyteorder().byteswap(inplace=True)\n",
      " |      array([1, 2, 3])\n",
      " |      >>> A.view(np.uint8)\n",
      " |      array([0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0,\n",
      " |             0, 3], dtype=uint8)\n",
      " |  \n",
      " |  choose(...)\n",
      " |      a.choose(choices, out=None, mode='raise')\n",
      " |      \n",
      " |      Use an index array to construct a new array from a set of choices.\n",
      " |      \n",
      " |      Refer to `numpy.choose` for full documentation.\n",
      " |      \n",
      " |      See Also\n",
      " |      --------\n",
      " |      numpy.choose : equivalent function\n",
      " |  \n",
      " |  clip(...)\n",
      " |      a.clip(min=None, max=None, out=None, **kwargs)\n",
      " |      \n",
      " |      Return an array whose values are limited to ``[min, max]``.\n",
      " |      One of max or min must be given.\n",
      " |      \n",
      " |      Refer to `numpy.clip` for full documentation.\n",
      " |      \n",
      " |      See Also\n",
      " |      --------\n",
      " |      numpy.clip : equivalent function\n",
      " |  \n",
      " |  compress(...)\n",
      " |      a.compress(condition, axis=None, out=None)\n",
      " |      \n",
      " |      Return selected slices of this array along given axis.\n",
      " |      \n",
      " |      Refer to `numpy.compress` for full documentation.\n",
      " |      \n",
      " |      See Also\n",
      " |      --------\n",
      " |      numpy.compress : equivalent function\n",
      " |  \n",
      " |  conj(...)\n",
      " |      a.conj()\n",
      " |      \n",
      " |      Complex-conjugate all elements.\n",
      " |      \n",
      " |      Refer to `numpy.conjugate` for full documentation.\n",
      " |      \n",
      " |      See Also\n",
      " |      --------\n",
      " |      numpy.conjugate : equivalent function\n",
      " |  \n",
      " |  conjugate(...)\n",
      " |      a.conjugate()\n",
      " |      \n",
      " |      Return the complex conjugate, element-wise.\n",
      " |      \n",
      " |      Refer to `numpy.conjugate` for full documentation.\n",
      " |      \n",
      " |      See Also\n",
      " |      --------\n",
      " |      numpy.conjugate : equivalent function\n",
      " |  \n",
      " |  copy(...)\n",
      " |      a.copy(order='C')\n",
      " |      \n",
      " |      Return a copy of the array.\n",
      " |      \n",
      " |      Parameters\n",
      " |      ----------\n",
      " |      order : {'C', 'F', 'A', 'K'}, optional\n",
      " |          Controls the memory layout of the copy. 'C' means C-order,\n",
      " |          'F' means F-order, 'A' means 'F' if `a` is Fortran contiguous,\n",
      " |          'C' otherwise. 'K' means match the layout of `a` as closely\n",
      " |          as possible. (Note that this function and :func:`numpy.copy` are very\n",
      " |          similar, but have different default values for their order=\n",
      " |          arguments.)\n",
      " |      \n",
      " |      See also\n",
      " |      --------\n",
      " |      numpy.copy\n",
      " |      numpy.copyto\n",
      " |      \n",
      " |      Examples\n",
      " |      --------\n",
      " |      >>> x = np.array([[1,2,3],[4,5,6]], order='F')\n",
      " |      \n",
      " |      >>> y = x.copy()\n",
      " |      \n",
      " |      >>> x.fill(0)\n",
      " |      \n",
      " |      >>> x\n",
      " |      array([[0, 0, 0],\n",
      " |             [0, 0, 0]])\n",
      " |      \n",
      " |      >>> y\n",
      " |      array([[1, 2, 3],\n",
      " |             [4, 5, 6]])\n",
      " |      \n",
      " |      >>> y.flags['C_CONTIGUOUS']\n",
      " |      True\n",
      " |  \n",
      " |  cumprod(...)\n",
      " |      a.cumprod(axis=None, dtype=None, out=None)\n",
      " |      \n",
      " |      Return the cumulative product of the elements along the given axis.\n",
      " |      \n",
      " |      Refer to `numpy.cumprod` for full documentation.\n",
      " |      \n",
      " |      See Also\n",
      " |      --------\n",
      " |      numpy.cumprod : equivalent function\n",
      " |  \n",
      " |  cumsum(...)\n",
      " |      a.cumsum(axis=None, dtype=None, out=None)\n",
      " |      \n",
      " |      Return the cumulative sum of the elements along the given axis.\n",
      " |      \n",
      " |      Refer to `numpy.cumsum` for full documentation.\n",
      " |      \n",
      " |      See Also\n",
      " |      --------\n",
      " |      numpy.cumsum : equivalent function\n",
      " |  \n",
      " |  diagonal(...)\n",
      " |      a.diagonal(offset=0, axis1=0, axis2=1)\n",
      " |      \n",
      " |      Return specified diagonals. In NumPy 1.9 the returned array is a\n",
      " |      read-only view instead of a copy as in previous NumPy versions.  In\n",
      " |      a future version the read-only restriction will be removed.\n",
      " |      \n",
      " |      Refer to :func:`numpy.diagonal` for full documentation.\n",
      " |      \n",
      " |      See Also\n",
      " |      --------\n",
      " |      numpy.diagonal : equivalent function\n",
      " |  \n",
      " |  dot(...)\n",
      " |      a.dot(b, out=None)\n",
      " |      \n",
      " |      Dot product of two arrays.\n",
      " |      \n",
      " |      Refer to `numpy.dot` for full documentation.\n",
      " |      \n",
      " |      See Also\n",
      " |      --------\n",
      " |      numpy.dot : equivalent function\n",
      " |      \n",
      " |      Examples\n",
      " |      --------\n",
      " |      >>> a = np.eye(2)\n",
      " |      >>> b = np.ones((2, 2)) * 2\n",
      " |      >>> a.dot(b)\n",
      " |      array([[2.,  2.],\n",
      " |             [2.,  2.]])\n",
      " |      \n",
      " |      This array method can be conveniently chained:\n",
      " |      \n",
      " |      >>> a.dot(b).dot(b)\n",
      " |      array([[8.,  8.],\n",
      " |             [8.,  8.]])\n",
      " |  \n",
      " |  dump(...)\n",
      " |      a.dump(file)\n",
      " |      \n",
      " |      Dump a pickle of the array to the specified file.\n",
      " |      The array can be read back with pickle.load or numpy.load.\n",
      " |      \n",
      " |      Parameters\n",
      " |      ----------\n",
      " |      file : str or Path\n",
      " |          A string naming the dump file.\n",
      " |      \n",
      " |          .. versionchanged:: 1.17.0\n",
      " |              `pathlib.Path` objects are now accepted.\n",
      " |  \n",
      " |  dumps(...)\n",
      " |      a.dumps()\n",
      " |      \n",
      " |      Returns the pickle of the array as a string.\n",
      " |      pickle.loads or numpy.loads will convert the string back to an array.\n",
      " |      \n",
      " |      Parameters\n",
      " |      ----------\n",
      " |      None\n",
      " |  \n",
      " |  fill(...)\n",
      " |      a.fill(value)\n",
      " |      \n",
      " |      Fill the array with a scalar value.\n",
      " |      \n",
      " |      Parameters\n",
      " |      ----------\n",
      " |      value : scalar\n",
      " |          All elements of `a` will be assigned this value.\n",
      " |      \n",
      " |      Examples\n",
      " |      --------\n",
      " |      >>> a = np.array([1, 2])\n",
      " |      >>> a.fill(0)\n",
      " |      >>> a\n",
      " |      array([0, 0])\n",
      " |      >>> a = np.empty(2)\n",
      " |      >>> a.fill(1)\n",
      " |      >>> a\n",
      " |      array([1.,  1.])\n",
      " |  \n",
      " |  flatten(...)\n",
      " |      a.flatten(order='C')\n",
      " |      \n",
      " |      Return a copy of the array collapsed into one dimension.\n",
      " |      \n",
      " |      Parameters\n",
      " |      ----------\n",
      " |      order : {'C', 'F', 'A', 'K'}, optional\n",
      " |          'C' means to flatten in row-major (C-style) order.\n",
      " |          'F' means to flatten in column-major (Fortran-\n",
      " |          style) order. 'A' means to flatten in column-major\n",
      " |          order if `a` is Fortran *contiguous* in memory,\n",
      " |          row-major order otherwise. 'K' means to flatten\n",
      " |          `a` in the order the elements occur in memory.\n",
      " |          The default is 'C'.\n",
      " |      \n",
      " |      Returns\n",
      " |      -------\n",
      " |      y : ndarray\n",
      " |          A copy of the input array, flattened to one dimension.\n",
      " |      \n",
      " |      See Also\n",
      " |      --------\n",
      " |      ravel : Return a flattened array.\n",
      " |      flat : A 1-D flat iterator over the array.\n",
      " |      \n",
      " |      Examples\n",
      " |      --------\n",
      " |      >>> a = np.array([[1,2], [3,4]])\n",
      " |      >>> a.flatten()\n",
      " |      array([1, 2, 3, 4])\n",
      " |      >>> a.flatten('F')\n",
      " |      array([1, 3, 2, 4])\n",
      " |  \n",
      " |  getfield(...)\n",
      " |      a.getfield(dtype, offset=0)\n",
      " |      \n",
      " |      Returns a field of the given array as a certain type.\n",
      " |      \n",
      " |      A field is a view of the array data with a given data-type. The values in\n",
      " |      the view are determined by the given type and the offset into the current\n",
      " |      array in bytes. The offset needs to be such that the view dtype fits in the\n",
      " |      array dtype; for example an array of dtype complex128 has 16-byte elements.\n",
      " |      If taking a view with a 32-bit integer (4 bytes), the offset needs to be\n",
      " |      between 0 and 12 bytes.\n",
      " |      \n",
      " |      Parameters\n",
      " |      ----------\n",
      " |      dtype : str or dtype\n",
      " |          The data type of the view. The dtype size of the view can not be larger\n",
      " |          than that of the array itself.\n",
      " |      offset : int\n",
      " |          Number of bytes to skip before beginning the element view.\n",
      " |      \n",
      " |      Examples\n",
      " |      --------\n",
      " |      >>> x = np.diag([1.+1.j]*2)\n",
      " |      >>> x[1, 1] = 2 + 4.j\n",
      " |      >>> x\n",
      " |      array([[1.+1.j,  0.+0.j],\n",
      " |             [0.+0.j,  2.+4.j]])\n",
      " |      >>> x.getfield(np.float64)\n",
      " |      array([[1.,  0.],\n",
      " |             [0.,  2.]])\n",
      " |      \n",
      " |      By choosing an offset of 8 bytes we can select the complex part of the\n",
      " |      array for our view:\n",
      " |      \n",
      " |      >>> x.getfield(np.float64, offset=8)\n",
      " |      array([[1.,  0.],\n",
      " |             [0.,  4.]])\n",
      " |  \n",
      " |  item(...)\n",
      " |      a.item(*args)\n",
      " |      \n",
      " |      Copy an element of an array to a standard Python scalar and return it.\n",
      " |      \n",
      " |      Parameters\n",
      " |      ----------\n",
      " |      \\*args : Arguments (variable number and type)\n",
      " |      \n",
      " |          * none: in this case, the method only works for arrays\n",
      " |            with one element (`a.size == 1`), which element is\n",
      " |            copied into a standard Python scalar object and returned.\n",
      " |      \n",
      " |          * int_type: this argument is interpreted as a flat index into\n",
      " |            the array, specifying which element to copy and return.\n",
      " |      \n",
      " |          * tuple of int_types: functions as does a single int_type argument,\n",
      " |            except that the argument is interpreted as an nd-index into the\n",
      " |            array.\n",
      " |      \n",
      " |      Returns\n",
      " |      -------\n",
      " |      z : Standard Python scalar object\n",
      " |          A copy of the specified element of the array as a suitable\n",
      " |          Python scalar\n",
      " |      \n",
      " |      Notes\n",
      " |      -----\n",
      " |      When the data type of `a` is longdouble or clongdouble, item() returns\n",
      " |      a scalar array object because there is no available Python scalar that\n",
      " |      would not lose information. Void arrays return a buffer object for item(),\n",
      " |      unless fields are defined, in which case a tuple is returned.\n",
      " |      \n",
      " |      `item` is very similar to a[args], except, instead of an array scalar,\n",
      " |      a standard Python scalar is returned. This can be useful for speeding up\n",
      " |      access to elements of the array and doing arithmetic on elements of the\n",
      " |      array using Python's optimized math.\n",
      " |      \n",
      " |      Examples\n",
      " |      --------\n",
      " |      >>> np.random.seed(123)\n",
      " |      >>> x = np.random.randint(9, size=(3, 3))\n",
      " |      >>> x\n",
      " |      array([[2, 2, 6],\n",
      " |             [1, 3, 6],\n",
      " |             [1, 0, 1]])\n",
      " |      >>> x.item(3)\n",
      " |      1\n",
      " |      >>> x.item(7)\n",
      " |      0\n",
      " |      >>> x.item((0, 1))\n",
      " |      2\n",
      " |      >>> x.item((2, 2))\n",
      " |      1\n",
      " |  \n",
      " |  itemset(...)\n",
      " |      a.itemset(*args)\n",
      " |      \n",
      " |      Insert scalar into an array (scalar is cast to array's dtype, if possible)\n",
      " |      \n",
      " |      There must be at least 1 argument, and define the last argument\n",
      " |      as *item*.  Then, ``a.itemset(*args)`` is equivalent to but faster\n",
      " |      than ``a[args] = item``.  The item should be a scalar value and `args`\n",
      " |      must select a single item in the array `a`.\n",
      " |      \n",
      " |      Parameters\n",
      " |      ----------\n",
      " |      \\*args : Arguments\n",
      " |          If one argument: a scalar, only used in case `a` is of size 1.\n",
      " |          If two arguments: the last argument is the value to be set\n",
      " |          and must be a scalar, the first argument specifies a single array\n",
      " |          element location. It is either an int or a tuple.\n",
      " |      \n",
      " |      Notes\n",
      " |      -----\n",
      " |      Compared to indexing syntax, `itemset` provides some speed increase\n",
      " |      for placing a scalar into a particular location in an `ndarray`,\n",
      " |      if you must do this.  However, generally this is discouraged:\n",
      " |      among other problems, it complicates the appearance of the code.\n",
      " |      Also, when using `itemset` (and `item`) inside a loop, be sure\n",
      " |      to assign the methods to a local variable to avoid the attribute\n",
      " |      look-up at each loop iteration.\n",
      " |      \n",
      " |      Examples\n",
      " |      --------\n",
      " |      >>> np.random.seed(123)\n",
      " |      >>> x = np.random.randint(9, size=(3, 3))\n",
      " |      >>> x\n",
      " |      array([[2, 2, 6],\n",
      " |             [1, 3, 6],\n",
      " |             [1, 0, 1]])\n",
      " |      >>> x.itemset(4, 0)\n",
      " |      >>> x.itemset((2, 2), 9)\n",
      " |      >>> x\n",
      " |      array([[2, 2, 6],\n",
      " |             [1, 0, 6],\n",
      " |             [1, 0, 9]])\n",
      " |  \n",
      " |  max(...)\n",
      " |      a.max(axis=None, out=None, keepdims=False, initial=<no value>, where=True)\n",
      " |      \n",
      " |      Return the maximum along a given axis.\n",
      " |      \n",
      " |      Refer to `numpy.amax` for full documentation.\n",
      " |      \n",
      " |      See Also\n",
      " |      --------\n",
      " |      numpy.amax : equivalent function\n",
      " |  \n",
      " |  mean(...)\n",
      " |      a.mean(axis=None, dtype=None, out=None, keepdims=False)\n",
      " |      \n",
      " |      Returns the average of the array elements along given axis.\n",
      " |      \n",
      " |      Refer to `numpy.mean` for full documentation.\n",
      " |      \n",
      " |      See Also\n",
      " |      --------\n",
      " |      numpy.mean : equivalent function\n",
      " |  \n",
      " |  min(...)\n",
      " |      a.min(axis=None, out=None, keepdims=False, initial=<no value>, where=True)\n",
      " |      \n",
      " |      Return the minimum along a given axis.\n",
      " |      \n",
      " |      Refer to `numpy.amin` for full documentation.\n",
      " |      \n",
      " |      See Also\n",
      " |      --------\n",
      " |      numpy.amin : equivalent function\n",
      " |  \n",
      " |  newbyteorder(...)\n",
      " |      arr.newbyteorder(new_order='S')\n",
      " |      \n",
      " |      Return the array with the same data viewed with a different byte order.\n",
      " |      \n",
      " |      Equivalent to::\n",
      " |      \n",
      " |          arr.view(arr.dtype.newbytorder(new_order))\n",
      " |      \n",
      " |      Changes are also made in all fields and sub-arrays of the array data\n",
      " |      type.\n",
      " |      \n",
      " |      \n",
      " |      \n",
      " |      Parameters\n",
      " |      ----------\n",
      " |      new_order : string, optional\n",
      " |          Byte order to force; a value from the byte order specifications\n",
      " |          below. `new_order` codes can be any of:\n",
      " |      \n",
      " |          * 'S' - swap dtype from current to opposite endian\n",
      " |          * {'<', 'L'} - little endian\n",
      " |          * {'>', 'B'} - big endian\n",
      " |          * {'=', 'N'} - native order\n",
      " |          * {'|', 'I'} - ignore (no change to byte order)\n",
      " |      \n",
      " |          The default value ('S') results in swapping the current\n",
      " |          byte order. The code does a case-insensitive check on the first\n",
      " |          letter of `new_order` for the alternatives above.  For example,\n",
      " |          any of 'B' or 'b' or 'biggish' are valid to specify big-endian.\n",
      " |      \n",
      " |      \n",
      " |      Returns\n",
      " |      -------\n",
      " |      new_arr : array\n",
      " |          New array object with the dtype reflecting given change to the\n",
      " |          byte order.\n",
      " |  \n",
      " |  nonzero(...)\n",
      " |      a.nonzero()\n",
      " |      \n",
      " |      Return the indices of the elements that are non-zero.\n",
      " |      \n",
      " |      Refer to `numpy.nonzero` for full documentation.\n",
      " |      \n",
      " |      See Also\n",
      " |      --------\n",
      " |      numpy.nonzero : equivalent function\n",
      " |  \n",
      " |  partition(...)\n",
      " |      a.partition(kth, axis=-1, kind='introselect', order=None)\n",
      " |      \n",
      " |      Rearranges the elements in the array in such a way that the value of the\n",
      " |      element in kth position is in the position it would be in a sorted array.\n",
      " |      All elements smaller than the kth element are moved before this element and\n",
      " |      all equal or greater are moved behind it. The ordering of the elements in\n",
      " |      the two partitions is undefined.\n",
      " |      \n",
      " |      .. versionadded:: 1.8.0\n",
      " |      \n",
      " |      Parameters\n",
      " |      ----------\n",
      " |      kth : int or sequence of ints\n",
      " |          Element index to partition by. The kth element value will be in its\n",
      " |          final sorted position and all smaller elements will be moved before it\n",
      " |          and all equal or greater elements behind it.\n",
      " |          The order of all elements in the partitions is undefined.\n",
      " |          If provided with a sequence of kth it will partition all elements\n",
      " |          indexed by kth of them into their sorted position at once.\n",
      " |      axis : int, optional\n",
      " |          Axis along which to sort. Default is -1, which means sort along the\n",
      " |          last axis.\n",
      " |      kind : {'introselect'}, optional\n",
      " |          Selection algorithm. Default is 'introselect'.\n",
      " |      order : str or list of str, optional\n",
      " |          When `a` is an array with fields defined, this argument specifies\n",
      " |          which fields to compare first, second, etc. A single field can\n",
      " |          be specified as a string, and not all fields need to be specified,\n",
      " |          but unspecified fields will still be used, in the order in which\n",
      " |          they come up in the dtype, to break ties.\n",
      " |      \n",
      " |      See Also\n",
      " |      --------\n",
      " |      numpy.partition : Return a parititioned copy of an array.\n",
      " |      argpartition : Indirect partition.\n",
      " |      sort : Full sort.\n",
      " |      \n",
      " |      Notes\n",
      " |      -----\n",
      " |      See ``np.partition`` for notes on the different algorithms.\n",
      " |      \n",
      " |      Examples\n",
      " |      --------\n",
      " |      >>> a = np.array([3, 4, 2, 1])\n",
      " |      >>> a.partition(3)\n",
      " |      >>> a\n",
      " |      array([2, 1, 3, 4])\n",
      " |      \n",
      " |      >>> a.partition((1, 3))\n",
      " |      >>> a\n",
      " |      array([1, 2, 3, 4])\n",
      " |  \n",
      " |  prod(...)\n",
      " |      a.prod(axis=None, dtype=None, out=None, keepdims=False, initial=1, where=True)\n",
      " |      \n",
      " |      Return the product of the array elements over the given axis\n",
      " |      \n",
      " |      Refer to `numpy.prod` for full documentation.\n",
      " |      \n",
      " |      See Also\n",
      " |      --------\n",
      " |      numpy.prod : equivalent function\n",
      " |  \n",
      " |  ptp(...)\n",
      " |      a.ptp(axis=None, out=None, keepdims=False)\n",
      " |      \n",
      " |      Peak to peak (maximum - minimum) value along a given axis.\n",
      " |      \n",
      " |      Refer to `numpy.ptp` for full documentation.\n",
      " |      \n",
      " |      See Also\n",
      " |      --------\n",
      " |      numpy.ptp : equivalent function\n",
      " |  \n",
      " |  put(...)\n",
      " |      a.put(indices, values, mode='raise')\n",
      " |      \n",
      " |      Set ``a.flat[n] = values[n]`` for all `n` in indices.\n",
      " |      \n",
      " |      Refer to `numpy.put` for full documentation.\n",
      " |      \n",
      " |      See Also\n",
      " |      --------\n",
      " |      numpy.put : equivalent function\n",
      " |  \n",
      " |  ravel(...)\n",
      " |      a.ravel([order])\n",
      " |      \n",
      " |      Return a flattened array.\n",
      " |      \n",
      " |      Refer to `numpy.ravel` for full documentation.\n",
      " |      \n",
      " |      See Also\n",
      " |      --------\n",
      " |      numpy.ravel : equivalent function\n",
      " |      \n",
      " |      ndarray.flat : a flat iterator on the array.\n",
      " |  \n",
      " |  repeat(...)\n",
      " |      a.repeat(repeats, axis=None)\n",
      " |      \n",
      " |      Repeat elements of an array.\n",
      " |      \n",
      " |      Refer to `numpy.repeat` for full documentation.\n",
      " |      \n",
      " |      See Also\n",
      " |      --------\n",
      " |      numpy.repeat : equivalent function\n",
      " |  \n",
      " |  reshape(...)\n",
      " |      a.reshape(shape, order='C')\n",
      " |      \n",
      " |      Returns an array containing the same data with a new shape.\n",
      " |      \n",
      " |      Refer to `numpy.reshape` for full documentation.\n",
      " |      \n",
      " |      See Also\n",
      " |      --------\n",
      " |      numpy.reshape : equivalent function\n",
      " |      \n",
      " |      Notes\n",
      " |      -----\n",
      " |      Unlike the free function `numpy.reshape`, this method on `ndarray` allows\n",
      " |      the elements of the shape parameter to be passed in as separate arguments.\n",
      " |      For example, ``a.reshape(10, 11)`` is equivalent to\n",
      " |      ``a.reshape((10, 11))``.\n",
      " |  \n",
      " |  resize(...)\n",
      " |      a.resize(new_shape, refcheck=True)\n",
      " |      \n",
      " |      Change shape and size of array in-place.\n",
      " |      \n",
      " |      Parameters\n",
      " |      ----------\n",
      " |      new_shape : tuple of ints, or `n` ints\n",
      " |          Shape of resized array.\n",
      " |      refcheck : bool, optional\n",
      " |          If False, reference count will not be checked. Default is True.\n",
      " |      \n",
      " |      Returns\n",
      " |      -------\n",
      " |      None\n",
      " |      \n",
      " |      Raises\n",
      " |      ------\n",
      " |      ValueError\n",
      " |          If `a` does not own its own data or references or views to it exist,\n",
      " |          and the data memory must be changed.\n",
      " |          PyPy only: will always raise if the data memory must be changed, since\n",
      " |          there is no reliable way to determine if references or views to it\n",
      " |          exist.\n",
      " |      \n",
      " |      SystemError\n",
      " |          If the `order` keyword argument is specified. This behaviour is a\n",
      " |          bug in NumPy.\n",
      " |      \n",
      " |      See Also\n",
      " |      --------\n",
      " |      resize : Return a new array with the specified shape.\n",
      " |      \n",
      " |      Notes\n",
      " |      -----\n",
      " |      This reallocates space for the data area if necessary.\n",
      " |      \n",
      " |      Only contiguous arrays (data elements consecutive in memory) can be\n",
      " |      resized.\n",
      " |      \n",
      " |      The purpose of the reference count check is to make sure you\n",
      " |      do not use this array as a buffer for another Python object and then\n",
      " |      reallocate the memory. However, reference counts can increase in\n",
      " |      other ways so if you are sure that you have not shared the memory\n",
      " |      for this array with another Python object, then you may safely set\n",
      " |      `refcheck` to False.\n",
      " |      \n",
      " |      Examples\n",
      " |      --------\n",
      " |      Shrinking an array: array is flattened (in the order that the data are\n",
      " |      stored in memory), resized, and reshaped:\n",
      " |      \n",
      " |      >>> a = np.array([[0, 1], [2, 3]], order='C')\n",
      " |      >>> a.resize((2, 1))\n",
      " |      >>> a\n",
      " |      array([[0],\n",
      " |             [1]])\n",
      " |      \n",
      " |      >>> a = np.array([[0, 1], [2, 3]], order='F')\n",
      " |      >>> a.resize((2, 1))\n",
      " |      >>> a\n",
      " |      array([[0],\n",
      " |             [2]])\n",
      " |      \n",
      " |      Enlarging an array: as above, but missing entries are filled with zeros:\n",
      " |      \n",
      " |      >>> b = np.array([[0, 1], [2, 3]])\n",
      " |      >>> b.resize(2, 3) # new_shape parameter doesn't have to be a tuple\n",
      " |      >>> b\n",
      " |      array([[0, 1, 2],\n",
      " |             [3, 0, 0]])\n",
      " |      \n",
      " |      Referencing an array prevents resizing...\n",
      " |      \n",
      " |      >>> c = a\n",
      " |      >>> a.resize((1, 1))\n",
      " |      Traceback (most recent call last):\n",
      " |      ...\n",
      " |      ValueError: cannot resize an array that references or is referenced ...\n",
      " |      \n",
      " |      Unless `refcheck` is False:\n",
      " |      \n",
      " |      >>> a.resize((1, 1), refcheck=False)\n",
      " |      >>> a\n",
      " |      array([[0]])\n",
      " |      >>> c\n",
      " |      array([[0]])\n",
      " |  \n",
      " |  round(...)\n",
      " |      a.round(decimals=0, out=None)\n",
      " |      \n",
      " |      Return `a` with each element rounded to the given number of decimals.\n",
      " |      \n",
      " |      Refer to `numpy.around` for full documentation.\n",
      " |      \n",
      " |      See Also\n",
      " |      --------\n",
      " |      numpy.around : equivalent function\n",
      " |  \n",
      " |  searchsorted(...)\n",
      " |      a.searchsorted(v, side='left', sorter=None)\n",
      " |      \n",
      " |      Find indices where elements of v should be inserted in a to maintain order.\n",
      " |      \n",
      " |      For full documentation, see `numpy.searchsorted`\n",
      " |      \n",
      " |      See Also\n",
      " |      --------\n",
      " |      numpy.searchsorted : equivalent function\n",
      " |  \n",
      " |  setfield(...)\n",
      " |      a.setfield(val, dtype, offset=0)\n",
      " |      \n",
      " |      Put a value into a specified place in a field defined by a data-type.\n",
      " |      \n",
      " |      Place `val` into `a`'s field defined by `dtype` and beginning `offset`\n",
      " |      bytes into the field.\n",
      " |      \n",
      " |      Parameters\n",
      " |      ----------\n",
      " |      val : object\n",
      " |          Value to be placed in field.\n",
      " |      dtype : dtype object\n",
      " |          Data-type of the field in which to place `val`.\n",
      " |      offset : int, optional\n",
      " |          The number of bytes into the field at which to place `val`.\n",
      " |      \n",
      " |      Returns\n",
      " |      -------\n",
      " |      None\n",
      " |      \n",
      " |      See Also\n",
      " |      --------\n",
      " |      getfield\n",
      " |      \n",
      " |      Examples\n",
      " |      --------\n",
      " |      >>> x = np.eye(3)\n",
      " |      >>> x.getfield(np.float64)\n",
      " |      array([[1.,  0.,  0.],\n",
      " |             [0.,  1.,  0.],\n",
      " |             [0.,  0.,  1.]])\n",
      " |      >>> x.setfield(3, np.int32)\n",
      " |      >>> x.getfield(np.int32)\n",
      " |      array([[3, 3, 3],\n",
      " |             [3, 3, 3],\n",
      " |             [3, 3, 3]], dtype=int32)\n",
      " |      >>> x\n",
      " |      array([[1.0e+000, 1.5e-323, 1.5e-323],\n",
      " |             [1.5e-323, 1.0e+000, 1.5e-323],\n",
      " |             [1.5e-323, 1.5e-323, 1.0e+000]])\n",
      " |      >>> x.setfield(np.eye(3), np.int32)\n",
      " |      >>> x\n",
      " |      array([[1.,  0.,  0.],\n",
      " |             [0.,  1.,  0.],\n",
      " |             [0.,  0.,  1.]])\n",
      " |  \n",
      " |  setflags(...)\n",
      " |      a.setflags(write=None, align=None, uic=None)\n",
      " |      \n",
      " |      Set array flags WRITEABLE, ALIGNED, (WRITEBACKIFCOPY and UPDATEIFCOPY),\n",
      " |      respectively.\n",
      " |      \n",
      " |      These Boolean-valued flags affect how numpy interprets the memory\n",
      " |      area used by `a` (see Notes below). The ALIGNED flag can only\n",
      " |      be set to True if the data is actually aligned according to the type.\n",
      " |      The WRITEBACKIFCOPY and (deprecated) UPDATEIFCOPY flags can never be set\n",
      " |      to True. The flag WRITEABLE can only be set to True if the array owns its\n",
      " |      own memory, or the ultimate owner of the memory exposes a writeable buffer\n",
      " |      interface, or is a string. (The exception for string is made so that\n",
      " |      unpickling can be done without copying memory.)\n",
      " |      \n",
      " |      Parameters\n",
      " |      ----------\n",
      " |      write : bool, optional\n",
      " |          Describes whether or not `a` can be written to.\n",
      " |      align : bool, optional\n",
      " |          Describes whether or not `a` is aligned properly for its type.\n",
      " |      uic : bool, optional\n",
      " |          Describes whether or not `a` is a copy of another \"base\" array.\n",
      " |      \n",
      " |      Notes\n",
      " |      -----\n",
      " |      Array flags provide information about how the memory area used\n",
      " |      for the array is to be interpreted. There are 7 Boolean flags\n",
      " |      in use, only four of which can be changed by the user:\n",
      " |      WRITEBACKIFCOPY, UPDATEIFCOPY, WRITEABLE, and ALIGNED.\n",
      " |      \n",
      " |      WRITEABLE (W) the data area can be written to;\n",
      " |      \n",
      " |      ALIGNED (A) the data and strides are aligned appropriately for the hardware\n",
      " |      (as determined by the compiler);\n",
      " |      \n",
      " |      UPDATEIFCOPY (U) (deprecated), replaced by WRITEBACKIFCOPY;\n",
      " |      \n",
      " |      WRITEBACKIFCOPY (X) this array is a copy of some other array (referenced\n",
      " |      by .base). When the C-API function PyArray_ResolveWritebackIfCopy is\n",
      " |      called, the base array will be updated with the contents of this array.\n",
      " |      \n",
      " |      All flags can be accessed using the single (upper case) letter as well\n",
      " |      as the full name.\n",
      " |      \n",
      " |      Examples\n",
      " |      --------\n",
      " |      >>> y = np.array([[3, 1, 7],\n",
      " |      ...               [2, 0, 0],\n",
      " |      ...               [8, 5, 9]])\n",
      " |      >>> y\n",
      " |      array([[3, 1, 7],\n",
      " |             [2, 0, 0],\n",
      " |             [8, 5, 9]])\n",
      " |      >>> y.flags\n",
      " |        C_CONTIGUOUS : True\n",
      " |        F_CONTIGUOUS : False\n",
      " |        OWNDATA : True\n",
      " |        WRITEABLE : True\n",
      " |        ALIGNED : True\n",
      " |        WRITEBACKIFCOPY : False\n",
      " |        UPDATEIFCOPY : False\n",
      " |      >>> y.setflags(write=0, align=0)\n",
      " |      >>> y.flags\n",
      " |        C_CONTIGUOUS : True\n",
      " |        F_CONTIGUOUS : False\n",
      " |        OWNDATA : True\n",
      " |        WRITEABLE : False\n",
      " |        ALIGNED : False\n",
      " |        WRITEBACKIFCOPY : False\n",
      " |        UPDATEIFCOPY : False\n",
      " |      >>> y.setflags(uic=1)\n",
      " |      Traceback (most recent call last):\n",
      " |        File \"<stdin>\", line 1, in <module>\n",
      " |      ValueError: cannot set WRITEBACKIFCOPY flag to True\n",
      " |  \n",
      " |  sort(...)\n",
      " |      a.sort(axis=-1, kind=None, order=None)\n",
      " |      \n",
      " |      Sort an array in-place. Refer to `numpy.sort` for full documentation.\n",
      " |      \n",
      " |      Parameters\n",
      " |      ----------\n",
      " |      axis : int, optional\n",
      " |          Axis along which to sort. Default is -1, which means sort along the\n",
      " |          last axis.\n",
      " |      kind : {'quicksort', 'mergesort', 'heapsort', 'stable'}, optional\n",
      " |          Sorting algorithm. The default is 'quicksort'. Note that both 'stable'\n",
      " |          and 'mergesort' use timsort under the covers and, in general, the\n",
      " |          actual implementation will vary with datatype. The 'mergesort' option\n",
      " |          is retained for backwards compatibility.\n",
      " |      \n",
      " |          .. versionchanged:: 1.15.0.\n",
      " |             The 'stable' option was added.\n",
      " |      \n",
      " |      order : str or list of str, optional\n",
      " |          When `a` is an array with fields defined, this argument specifies\n",
      " |          which fields to compare first, second, etc.  A single field can\n",
      " |          be specified as a string, and not all fields need be specified,\n",
      " |          but unspecified fields will still be used, in the order in which\n",
      " |          they come up in the dtype, to break ties.\n",
      " |      \n",
      " |      See Also\n",
      " |      --------\n",
      " |      numpy.sort : Return a sorted copy of an array.\n",
      " |      numpy.argsort : Indirect sort.\n",
      " |      numpy.lexsort : Indirect stable sort on multiple keys.\n",
      " |      numpy.searchsorted : Find elements in sorted array.\n",
      " |      numpy.partition: Partial sort.\n",
      " |      \n",
      " |      Notes\n",
      " |      -----\n",
      " |      See `numpy.sort` for notes on the different sorting algorithms.\n",
      " |      \n",
      " |      Examples\n",
      " |      --------\n",
      " |      >>> a = np.array([[1,4], [3,1]])\n",
      " |      >>> a.sort(axis=1)\n",
      " |      >>> a\n",
      " |      array([[1, 4],\n",
      " |             [1, 3]])\n",
      " |      >>> a.sort(axis=0)\n",
      " |      >>> a\n",
      " |      array([[1, 3],\n",
      " |             [1, 4]])\n",
      " |      \n",
      " |      Use the `order` keyword to specify a field to use when sorting a\n",
      " |      structured array:\n",
      " |      \n",
      " |      >>> a = np.array([('a', 2), ('c', 1)], dtype=[('x', 'S1'), ('y', int)])\n",
      " |      >>> a.sort(order='y')\n",
      " |      >>> a\n",
      " |      array([(b'c', 1), (b'a', 2)],\n",
      " |            dtype=[('x', 'S1'), ('y', '<i8')])\n",
      " |  \n",
      " |  squeeze(...)\n",
      " |      a.squeeze(axis=None)\n",
      " |      \n",
      " |      Remove single-dimensional entries from the shape of `a`.\n",
      " |      \n",
      " |      Refer to `numpy.squeeze` for full documentation.\n",
      " |      \n",
      " |      See Also\n",
      " |      --------\n",
      " |      numpy.squeeze : equivalent function\n",
      " |  \n",
      " |  std(...)\n",
      " |      a.std(axis=None, dtype=None, out=None, ddof=0, keepdims=False)\n",
      " |      \n",
      " |      Returns the standard deviation of the array elements along given axis.\n",
      " |      \n",
      " |      Refer to `numpy.std` for full documentation.\n",
      " |      \n",
      " |      See Also\n",
      " |      --------\n",
      " |      numpy.std : equivalent function\n",
      " |  \n",
      " |  sum(...)\n",
      " |      a.sum(axis=None, dtype=None, out=None, keepdims=False, initial=0, where=True)\n",
      " |      \n",
      " |      Return the sum of the array elements over the given axis.\n",
      " |      \n",
      " |      Refer to `numpy.sum` for full documentation.\n",
      " |      \n",
      " |      See Also\n",
      " |      --------\n",
      " |      numpy.sum : equivalent function\n",
      " |  \n",
      " |  swapaxes(...)\n",
      " |      a.swapaxes(axis1, axis2)\n",
      " |      \n",
      " |      Return a view of the array with `axis1` and `axis2` interchanged.\n",
      " |      \n",
      " |      Refer to `numpy.swapaxes` for full documentation.\n",
      " |      \n",
      " |      See Also\n",
      " |      --------\n",
      " |      numpy.swapaxes : equivalent function\n",
      " |  \n",
      " |  take(...)\n",
      " |      a.take(indices, axis=None, out=None, mode='raise')\n",
      " |      \n",
      " |      Return an array formed from the elements of `a` at the given indices.\n",
      " |      \n",
      " |      Refer to `numpy.take` for full documentation.\n",
      " |      \n",
      " |      See Also\n",
      " |      --------\n",
      " |      numpy.take : equivalent function\n",
      " |  \n",
      " |  tobytes(...)\n",
      " |      a.tobytes(order='C')\n",
      " |      \n",
      " |      Construct Python bytes containing the raw data bytes in the array.\n",
      " |      \n",
      " |      Constructs Python bytes showing a copy of the raw contents of\n",
      " |      data memory. The bytes object can be produced in either 'C' or 'Fortran',\n",
      " |      or 'Any' order (the default is 'C'-order). 'Any' order means C-order\n",
      " |      unless the F_CONTIGUOUS flag in the array is set, in which case it\n",
      " |      means 'Fortran' order.\n",
      " |      \n",
      " |      .. versionadded:: 1.9.0\n",
      " |      \n",
      " |      Parameters\n",
      " |      ----------\n",
      " |      order : {'C', 'F', None}, optional\n",
      " |          Order of the data for multidimensional arrays:\n",
      " |          C, Fortran, or the same as for the original array.\n",
      " |      \n",
      " |      Returns\n",
      " |      -------\n",
      " |      s : bytes\n",
      " |          Python bytes exhibiting a copy of `a`'s raw data.\n",
      " |      \n",
      " |      Examples\n",
      " |      --------\n",
      " |      >>> x = np.array([[0, 1], [2, 3]], dtype='<u2')\n",
      " |      >>> x.tobytes()\n",
      " |      b'\\x00\\x00\\x01\\x00\\x02\\x00\\x03\\x00'\n",
      " |      >>> x.tobytes('C') == x.tobytes()\n",
      " |      True\n",
      " |      >>> x.tobytes('F')\n",
      " |      b'\\x00\\x00\\x02\\x00\\x01\\x00\\x03\\x00'\n",
      " |  \n",
      " |  tofile(...)\n",
      " |      a.tofile(fid, sep=\"\", format=\"%s\")\n",
      " |      \n",
      " |      Write array to a file as text or binary (default).\n",
      " |      \n",
      " |      Data is always written in 'C' order, independent of the order of `a`.\n",
      " |      The data produced by this method can be recovered using the function\n",
      " |      fromfile().\n",
      " |      \n",
      " |      Parameters\n",
      " |      ----------\n",
      " |      fid : file or str or Path\n",
      " |          An open file object, or a string containing a filename.\n",
      " |      \n",
      " |          .. versionchanged:: 1.17.0\n",
      " |              `pathlib.Path` objects are now accepted.\n",
      " |      \n",
      " |      sep : str\n",
      " |          Separator between array items for text output.\n",
      " |          If \"\" (empty), a binary file is written, equivalent to\n",
      " |          ``file.write(a.tobytes())``.\n",
      " |      format : str\n",
      " |          Format string for text file output.\n",
      " |          Each entry in the array is formatted to text by first converting\n",
      " |          it to the closest Python type, and then using \"format\" % item.\n",
      " |      \n",
      " |      Notes\n",
      " |      -----\n",
      " |      This is a convenience function for quick storage of array data.\n",
      " |      Information on endianness and precision is lost, so this method is not a\n",
      " |      good choice for files intended to archive data or transport data between\n",
      " |      machines with different endianness. Some of these problems can be overcome\n",
      " |      by outputting the data as text files, at the expense of speed and file\n",
      " |      size.\n",
      " |      \n",
      " |      When fid is a file object, array contents are directly written to the\n",
      " |      file, bypassing the file object's ``write`` method. As a result, tofile\n",
      " |      cannot be used with files objects supporting compression (e.g., GzipFile)\n",
      " |      or file-like objects that do not support ``fileno()`` (e.g., BytesIO).\n",
      " |  \n",
      " |  tolist(...)\n",
      " |      a.tolist()\n",
      " |      \n",
      " |      Return the array as an ``a.ndim``-levels deep nested list of Python scalars.\n",
      " |      \n",
      " |      Return a copy of the array data as a (nested) Python list.\n",
      " |      Data items are converted to the nearest compatible builtin Python type, via\n",
      " |      the `~numpy.ndarray.item` function.\n",
      " |      \n",
      " |      If ``a.ndim`` is 0, then since the depth of the nested list is 0, it will\n",
      " |      not be a list at all, but a simple Python scalar.\n",
      " |      \n",
      " |      Parameters\n",
      " |      ----------\n",
      " |      none\n",
      " |      \n",
      " |      Returns\n",
      " |      -------\n",
      " |      y : object, or list of object, or list of list of object, or ...\n",
      " |          The possibly nested list of array elements.\n",
      " |      \n",
      " |      Notes\n",
      " |      -----\n",
      " |      The array may be recreated via ``a = np.array(a.tolist())``, although this\n",
      " |      may sometimes lose precision.\n",
      " |      \n",
      " |      Examples\n",
      " |      --------\n",
      " |      For a 1D array, ``a.tolist()`` is almost the same as ``list(a)``, \n",
      " |      except that ``tolist`` changes numpy scalars to Python scalars:\n",
      " |      \n",
      " |      >>> a = np.uint32([1, 2])\n",
      " |      >>> a_list = list(a)\n",
      " |      >>> a_list\n",
      " |      [1, 2]\n",
      " |      >>> type(a_list[0])\n",
      " |      <class 'numpy.uint32'>\n",
      " |      >>> a_tolist = a.tolist()\n",
      " |      >>> a_tolist\n",
      " |      [1, 2]\n",
      " |      >>> type(a_tolist[0])\n",
      " |      <class 'int'>\n",
      " |      \n",
      " |      Additionally, for a 2D array, ``tolist`` applies recursively:\n",
      " |      \n",
      " |      >>> a = np.array([[1, 2], [3, 4]])\n",
      " |      >>> list(a)\n",
      " |      [array([1, 2]), array([3, 4])]\n",
      " |      >>> a.tolist()\n",
      " |      [[1, 2], [3, 4]]\n",
      " |      \n",
      " |      The base case for this recursion is a 0D array:\n",
      " |      \n",
      " |      >>> a = np.array(1)\n",
      " |      >>> list(a)\n",
      " |      Traceback (most recent call last):\n",
      " |        ...\n",
      " |      TypeError: iteration over a 0-d array\n",
      " |      >>> a.tolist()\n",
      " |      1\n",
      " |  \n",
      " |  tostring(...)\n",
      " |      a.tostring(order='C')\n",
      " |      \n",
      " |      Construct Python bytes containing the raw data bytes in the array.\n",
      " |      \n",
      " |      Constructs Python bytes showing a copy of the raw contents of\n",
      " |      data memory. The bytes object can be produced in either 'C' or 'Fortran',\n",
      " |      or 'Any' order (the default is 'C'-order). 'Any' order means C-order\n",
      " |      unless the F_CONTIGUOUS flag in the array is set, in which case it\n",
      " |      means 'Fortran' order.\n",
      " |      \n",
      " |      This function is a compatibility alias for tobytes. Despite its name it returns bytes not strings.\n",
      " |      \n",
      " |      Parameters\n",
      " |      ----------\n",
      " |      order : {'C', 'F', None}, optional\n",
      " |          Order of the data for multidimensional arrays:\n",
      " |          C, Fortran, or the same as for the original array.\n",
      " |      \n",
      " |      Returns\n",
      " |      -------\n",
      " |      s : bytes\n",
      " |          Python bytes exhibiting a copy of `a`'s raw data.\n",
      " |      \n",
      " |      Examples\n",
      " |      --------\n",
      " |      >>> x = np.array([[0, 1], [2, 3]], dtype='<u2')\n",
      " |      >>> x.tobytes()\n",
      " |      b'\\x00\\x00\\x01\\x00\\x02\\x00\\x03\\x00'\n",
      " |      >>> x.tobytes('C') == x.tobytes()\n",
      " |      True\n",
      " |      >>> x.tobytes('F')\n",
      " |      b'\\x00\\x00\\x02\\x00\\x01\\x00\\x03\\x00'\n",
      " |  \n",
      " |  trace(...)\n",
      " |      a.trace(offset=0, axis1=0, axis2=1, dtype=None, out=None)\n",
      " |      \n",
      " |      Return the sum along diagonals of the array.\n",
      " |      \n",
      " |      Refer to `numpy.trace` for full documentation.\n",
      " |      \n",
      " |      See Also\n",
      " |      --------\n",
      " |      numpy.trace : equivalent function\n",
      " |  \n",
      " |  transpose(...)\n",
      " |      a.transpose(*axes)\n",
      " |      \n",
      " |      Returns a view of the array with axes transposed.\n",
      " |      \n",
      " |      For a 1-D array this has no effect, as a transposed vector is simply the\n",
      " |      same vector. To convert a 1-D array into a 2D column vector, an additional\n",
      " |      dimension must be added. `np.atleast2d(a).T` achieves this, as does\n",
      " |      `a[:, np.newaxis]`.\n",
      " |      For a 2-D array, this is a standard matrix transpose.\n",
      " |      For an n-D array, if axes are given, their order indicates how the\n",
      " |      axes are permuted (see Examples). If axes are not provided and\n",
      " |      ``a.shape = (i[0], i[1], ... i[n-2], i[n-1])``, then\n",
      " |      ``a.transpose().shape = (i[n-1], i[n-2], ... i[1], i[0])``.\n",
      " |      \n",
      " |      Parameters\n",
      " |      ----------\n",
      " |      axes : None, tuple of ints, or `n` ints\n",
      " |      \n",
      " |       * None or no argument: reverses the order of the axes.\n",
      " |      \n",
      " |       * tuple of ints: `i` in the `j`-th place in the tuple means `a`'s\n",
      " |         `i`-th axis becomes `a.transpose()`'s `j`-th axis.\n",
      " |      \n",
      " |       * `n` ints: same as an n-tuple of the same ints (this form is\n",
      " |         intended simply as a \"convenience\" alternative to the tuple form)\n",
      " |      \n",
      " |      Returns\n",
      " |      -------\n",
      " |      out : ndarray\n",
      " |          View of `a`, with axes suitably permuted.\n",
      " |      \n",
      " |      See Also\n",
      " |      --------\n",
      " |      ndarray.T : Array property returning the array transposed.\n",
      " |      ndarray.reshape : Give a new shape to an array without changing its data.\n",
      " |      \n",
      " |      Examples\n",
      " |      --------\n",
      " |      >>> a = np.array([[1, 2], [3, 4]])\n",
      " |      >>> a\n",
      " |      array([[1, 2],\n",
      " |             [3, 4]])\n",
      " |      >>> a.transpose()\n",
      " |      array([[1, 3],\n",
      " |             [2, 4]])\n",
      " |      >>> a.transpose((1, 0))\n",
      " |      array([[1, 3],\n",
      " |             [2, 4]])\n",
      " |      >>> a.transpose(1, 0)\n",
      " |      array([[1, 3],\n",
      " |             [2, 4]])\n",
      " |  \n",
      " |  var(...)\n",
      " |      a.var(axis=None, dtype=None, out=None, ddof=0, keepdims=False)\n",
      " |      \n",
      " |      Returns the variance of the array elements, along given axis.\n",
      " |      \n",
      " |      Refer to `numpy.var` for full documentation.\n",
      " |      \n",
      " |      See Also\n",
      " |      --------\n",
      " |      numpy.var : equivalent function\n",
      " |  \n",
      " |  view(...)\n",
      " |      a.view(dtype=None, type=None)\n",
      " |      \n",
      " |      New view of array with the same data.\n",
      " |      \n",
      " |      Parameters\n",
      " |      ----------\n",
      " |      dtype : data-type or ndarray sub-class, optional\n",
      " |          Data-type descriptor of the returned view, e.g., float32 or int16. The\n",
      " |          default, None, results in the view having the same data-type as `a`.\n",
      " |          This argument can also be specified as an ndarray sub-class, which\n",
      " |          then specifies the type of the returned object (this is equivalent to\n",
      " |          setting the ``type`` parameter).\n",
      " |      type : Python type, optional\n",
      " |          Type of the returned view, e.g., ndarray or matrix.  Again, the\n",
      " |          default None results in type preservation.\n",
      " |      \n",
      " |      Notes\n",
      " |      -----\n",
      " |      ``a.view()`` is used two different ways:\n",
      " |      \n",
      " |      ``a.view(some_dtype)`` or ``a.view(dtype=some_dtype)`` constructs a view\n",
      " |      of the array's memory with a different data-type.  This can cause a\n",
      " |      reinterpretation of the bytes of memory.\n",
      " |      \n",
      " |      ``a.view(ndarray_subclass)`` or ``a.view(type=ndarray_subclass)`` just\n",
      " |      returns an instance of `ndarray_subclass` that looks at the same array\n",
      " |      (same shape, dtype, etc.)  This does not cause a reinterpretation of the\n",
      " |      memory.\n",
      " |      \n",
      " |      For ``a.view(some_dtype)``, if ``some_dtype`` has a different number of\n",
      " |      bytes per entry than the previous dtype (for example, converting a\n",
      " |      regular array to a structured array), then the behavior of the view\n",
      " |      cannot be predicted just from the superficial appearance of ``a`` (shown\n",
      " |      by ``print(a)``). It also depends on exactly how ``a`` is stored in\n",
      " |      memory. Therefore if ``a`` is C-ordered versus fortran-ordered, versus\n",
      " |      defined as a slice or transpose, etc., the view may give different\n",
      " |      results.\n",
      " |      \n",
      " |      \n",
      " |      Examples\n",
      " |      --------\n",
      " |      >>> x = np.array([(1, 2)], dtype=[('a', np.int8), ('b', np.int8)])\n",
      " |      \n",
      " |      Viewing array data using a different type and dtype:\n",
      " |      \n",
      " |      >>> y = x.view(dtype=np.int16, type=np.matrix)\n",
      " |      >>> y\n",
      " |      matrix([[513]], dtype=int16)\n",
      " |      >>> print(type(y))\n",
      " |      <class 'numpy.matrix'>\n",
      " |      \n",
      " |      Creating a view on a structured array so it can be used in calculations\n",
      " |      \n",
      " |      >>> x = np.array([(1, 2),(3,4)], dtype=[('a', np.int8), ('b', np.int8)])\n",
      " |      >>> xv = x.view(dtype=np.int8).reshape(-1,2)\n",
      " |      >>> xv\n",
      " |      array([[1, 2],\n",
      " |             [3, 4]], dtype=int8)\n",
      " |      >>> xv.mean(0)\n",
      " |      array([2.,  3.])\n",
      " |      \n",
      " |      Making changes to the view changes the underlying array\n",
      " |      \n",
      " |      >>> xv[0,1] = 20\n",
      " |      >>> x\n",
      " |      array([(1, 20), (3,  4)], dtype=[('a', 'i1'), ('b', 'i1')])\n",
      " |      \n",
      " |      Using a view to convert an array to a recarray:\n",
      " |      \n",
      " |      >>> z = x.view(np.recarray)\n",
      " |      >>> z.a\n",
      " |      array([1, 3], dtype=int8)\n",
      " |      \n",
      " |      Views share data:\n",
      " |      \n",
      " |      >>> x[0] = (9, 10)\n",
      " |      >>> z[0]\n",
      " |      (9, 10)\n",
      " |      \n",
      " |      Views that change the dtype size (bytes per entry) should normally be\n",
      " |      avoided on arrays defined by slices, transposes, fortran-ordering, etc.:\n",
      " |      \n",
      " |      >>> x = np.array([[1,2,3],[4,5,6]], dtype=np.int16)\n",
      " |      >>> y = x[:, 0:2]\n",
      " |      >>> y\n",
      " |      array([[1, 2],\n",
      " |             [4, 5]], dtype=int16)\n",
      " |      >>> y.view(dtype=[('width', np.int16), ('length', np.int16)])\n",
      " |      Traceback (most recent call last):\n",
      " |          ...\n",
      " |      ValueError: To change to a dtype of a different size, the array must be C-contiguous\n",
      " |      >>> z = y.copy()\n",
      " |      >>> z.view(dtype=[('width', np.int16), ('length', np.int16)])\n",
      " |      array([[(1, 2)],\n",
      " |             [(4, 5)]], dtype=[('width', '<i2'), ('length', '<i2')])\n",
      " |  \n",
      " |  ----------------------------------------------------------------------\n",
      " |  Static methods defined here:\n",
      " |  \n",
      " |  __new__(*args, **kwargs) from builtins.type\n",
      " |      Create and return a new object.  See help(type) for accurate signature.\n",
      " |  \n",
      " |  ----------------------------------------------------------------------\n",
      " |  Data descriptors defined here:\n",
      " |  \n",
      " |  T\n",
      " |      The transposed array.\n",
      " |      \n",
      " |      Same as ``self.transpose()``.\n",
      " |      \n",
      " |      Examples\n",
      " |      --------\n",
      " |      >>> x = np.array([[1.,2.],[3.,4.]])\n",
      " |      >>> x\n",
      " |      array([[ 1.,  2.],\n",
      " |             [ 3.,  4.]])\n",
      " |      >>> x.T\n",
      " |      array([[ 1.,  3.],\n",
      " |             [ 2.,  4.]])\n",
      " |      >>> x = np.array([1.,2.,3.,4.])\n",
      " |      >>> x\n",
      " |      array([ 1.,  2.,  3.,  4.])\n",
      " |      >>> x.T\n",
      " |      array([ 1.,  2.,  3.,  4.])\n",
      " |      \n",
      " |      See Also\n",
      " |      --------\n",
      " |      transpose\n",
      " |  \n",
      " |  __array_finalize__\n",
      " |      None.\n",
      " |  \n",
      " |  __array_interface__\n",
      " |      Array protocol: Python side.\n",
      " |  \n",
      " |  __array_priority__\n",
      " |      Array priority.\n",
      " |  \n",
      " |  __array_struct__\n",
      " |      Array protocol: C-struct side.\n",
      " |  \n",
      " |  base\n",
      " |      Base object if memory is from some other object.\n",
      " |      \n",
      " |      Examples\n",
      " |      --------\n",
      " |      The base of an array that owns its memory is None:\n",
      " |      \n",
      " |      >>> x = np.array([1,2,3,4])\n",
      " |      >>> x.base is None\n",
      " |      True\n",
      " |      \n",
      " |      Slicing creates a view, whose memory is shared with x:\n",
      " |      \n",
      " |      >>> y = x[2:]\n",
      " |      >>> y.base is x\n",
      " |      True\n",
      " |  \n",
      " |  ctypes\n",
      " |      An object to simplify the interaction of the array with the ctypes\n",
      " |      module.\n",
      " |      \n",
      " |      This attribute creates an object that makes it easier to use arrays\n",
      " |      when calling shared libraries with the ctypes module. The returned\n",
      " |      object has, among others, data, shape, and strides attributes (see\n",
      " |      Notes below) which themselves return ctypes objects that can be used\n",
      " |      as arguments to a shared library.\n",
      " |      \n",
      " |      Parameters\n",
      " |      ----------\n",
      " |      None\n",
      " |      \n",
      " |      Returns\n",
      " |      -------\n",
      " |      c : Python object\n",
      " |          Possessing attributes data, shape, strides, etc.\n",
      " |      \n",
      " |      See Also\n",
      " |      --------\n",
      " |      numpy.ctypeslib\n",
      " |      \n",
      " |      Notes\n",
      " |      -----\n",
      " |      Below are the public attributes of this object which were documented\n",
      " |      in \"Guide to NumPy\" (we have omitted undocumented public attributes,\n",
      " |      as well as documented private attributes):\n",
      " |      \n",
      " |      .. autoattribute:: numpy.core._internal._ctypes.data\n",
      " |          :noindex:\n",
      " |      \n",
      " |      .. autoattribute:: numpy.core._internal._ctypes.shape\n",
      " |          :noindex:\n",
      " |      \n",
      " |      .. autoattribute:: numpy.core._internal._ctypes.strides\n",
      " |          :noindex:\n",
      " |      \n",
      " |      .. automethod:: numpy.core._internal._ctypes.data_as\n",
      " |          :noindex:\n",
      " |      \n",
      " |      .. automethod:: numpy.core._internal._ctypes.shape_as\n",
      " |          :noindex:\n",
      " |      \n",
      " |      .. automethod:: numpy.core._internal._ctypes.strides_as\n",
      " |          :noindex:\n",
      " |      \n",
      " |      If the ctypes module is not available, then the ctypes attribute\n",
      " |      of array objects still returns something useful, but ctypes objects\n",
      " |      are not returned and errors may be raised instead. In particular,\n",
      " |      the object will still have the ``as_parameter`` attribute which will\n",
      " |      return an integer equal to the data attribute.\n",
      " |      \n",
      " |      Examples\n",
      " |      --------\n",
      " |      >>> import ctypes\n",
      " |      >>> x\n",
      " |      array([[0, 1],\n",
      " |             [2, 3]])\n",
      " |      >>> x.ctypes.data\n",
      " |      30439712\n",
      " |      >>> x.ctypes.data_as(ctypes.POINTER(ctypes.c_long))\n",
      " |      <ctypes.LP_c_long object at 0x01F01300>\n",
      " |      >>> x.ctypes.data_as(ctypes.POINTER(ctypes.c_long)).contents\n",
      " |      c_long(0)\n",
      " |      >>> x.ctypes.data_as(ctypes.POINTER(ctypes.c_longlong)).contents\n",
      " |      c_longlong(4294967296L)\n",
      " |      >>> x.ctypes.shape\n",
      " |      <numpy.core._internal.c_long_Array_2 object at 0x01FFD580>\n",
      " |      >>> x.ctypes.shape_as(ctypes.c_long)\n",
      " |      <numpy.core._internal.c_long_Array_2 object at 0x01FCE620>\n",
      " |      >>> x.ctypes.strides\n",
      " |      <numpy.core._internal.c_long_Array_2 object at 0x01FCE620>\n",
      " |      >>> x.ctypes.strides_as(ctypes.c_longlong)\n",
      " |      <numpy.core._internal.c_longlong_Array_2 object at 0x01F01300>\n",
      " |  \n",
      " |  data\n",
      " |      Python buffer object pointing to the start of the array's data.\n",
      " |  \n",
      " |  dtype\n",
      " |      Data-type of the array's elements.\n",
      " |      \n",
      " |      Parameters\n",
      " |      ----------\n",
      " |      None\n",
      " |      \n",
      " |      Returns\n",
      " |      -------\n",
      " |      d : numpy dtype object\n",
      " |      \n",
      " |      See Also\n",
      " |      --------\n",
      " |      numpy.dtype\n",
      " |      \n",
      " |      Examples\n",
      " |      --------\n",
      " |      >>> x\n",
      " |      array([[0, 1],\n",
      " |             [2, 3]])\n",
      " |      >>> x.dtype\n",
      " |      dtype('int32')\n",
      " |      >>> type(x.dtype)\n",
      " |      <type 'numpy.dtype'>\n",
      " |  \n",
      " |  flags\n",
      " |      Information about the memory layout of the array.\n",
      " |      \n",
      " |      Attributes\n",
      " |      ----------\n",
      " |      C_CONTIGUOUS (C)\n",
      " |          The data is in a single, C-style contiguous segment.\n",
      " |      F_CONTIGUOUS (F)\n",
      " |          The data is in a single, Fortran-style contiguous segment.\n",
      " |      OWNDATA (O)\n",
      " |          The array owns the memory it uses or borrows it from another object.\n",
      " |      WRITEABLE (W)\n",
      " |          The data area can be written to.  Setting this to False locks\n",
      " |          the data, making it read-only.  A view (slice, etc.) inherits WRITEABLE\n",
      " |          from its base array at creation time, but a view of a writeable\n",
      " |          array may be subsequently locked while the base array remains writeable.\n",
      " |          (The opposite is not true, in that a view of a locked array may not\n",
      " |          be made writeable.  However, currently, locking a base object does not\n",
      " |          lock any views that already reference it, so under that circumstance it\n",
      " |          is possible to alter the contents of a locked array via a previously\n",
      " |          created writeable view onto it.)  Attempting to change a non-writeable\n",
      " |          array raises a RuntimeError exception.\n",
      " |      ALIGNED (A)\n",
      " |          The data and all elements are aligned appropriately for the hardware.\n",
      " |      WRITEBACKIFCOPY (X)\n",
      " |          This array is a copy of some other array. The C-API function\n",
      " |          PyArray_ResolveWritebackIfCopy must be called before deallocating\n",
      " |          to the base array will be updated with the contents of this array.\n",
      " |      UPDATEIFCOPY (U)\n",
      " |          (Deprecated, use WRITEBACKIFCOPY) This array is a copy of some other array.\n",
      " |          When this array is\n",
      " |          deallocated, the base array will be updated with the contents of\n",
      " |          this array.\n",
      " |      FNC\n",
      " |          F_CONTIGUOUS and not C_CONTIGUOUS.\n",
      " |      FORC\n",
      " |          F_CONTIGUOUS or C_CONTIGUOUS (one-segment test).\n",
      " |      BEHAVED (B)\n",
      " |          ALIGNED and WRITEABLE.\n",
      " |      CARRAY (CA)\n",
      " |          BEHAVED and C_CONTIGUOUS.\n",
      " |      FARRAY (FA)\n",
      " |          BEHAVED and F_CONTIGUOUS and not C_CONTIGUOUS.\n",
      " |      \n",
      " |      Notes\n",
      " |      -----\n",
      " |      The `flags` object can be accessed dictionary-like (as in ``a.flags['WRITEABLE']``),\n",
      " |      or by using lowercased attribute names (as in ``a.flags.writeable``). Short flag\n",
      " |      names are only supported in dictionary access.\n",
      " |      \n",
      " |      Only the WRITEBACKIFCOPY, UPDATEIFCOPY, WRITEABLE, and ALIGNED flags can be\n",
      " |      changed by the user, via direct assignment to the attribute or dictionary\n",
      " |      entry, or by calling `ndarray.setflags`.\n",
      " |      \n",
      " |      The array flags cannot be set arbitrarily:\n",
      " |      \n",
      " |      - UPDATEIFCOPY can only be set ``False``.\n",
      " |      - WRITEBACKIFCOPY can only be set ``False``.\n",
      " |      - ALIGNED can only be set ``True`` if the data is truly aligned.\n",
      " |      - WRITEABLE can only be set ``True`` if the array owns its own memory\n",
      " |        or the ultimate owner of the memory exposes a writeable buffer\n",
      " |        interface or is a string.\n",
      " |      \n",
      " |      Arrays can be both C-style and Fortran-style contiguous simultaneously.\n",
      " |      This is clear for 1-dimensional arrays, but can also be true for higher\n",
      " |      dimensional arrays.\n",
      " |      \n",
      " |      Even for contiguous arrays a stride for a given dimension\n",
      " |      ``arr.strides[dim]`` may be *arbitrary* if ``arr.shape[dim] == 1``\n",
      " |      or the array has no elements.\n",
      " |      It does *not* generally hold that ``self.strides[-1] == self.itemsize``\n",
      " |      for C-style contiguous arrays or ``self.strides[0] == self.itemsize`` for\n",
      " |      Fortran-style contiguous arrays is true.\n",
      " |  \n",
      " |  flat\n",
      " |      A 1-D iterator over the array.\n",
      " |      \n",
      " |      This is a `numpy.flatiter` instance, which acts similarly to, but is not\n",
      " |      a subclass of, Python's built-in iterator object.\n",
      " |      \n",
      " |      See Also\n",
      " |      --------\n",
      " |      flatten : Return a copy of the array collapsed into one dimension.\n",
      " |      \n",
      " |      flatiter\n",
      " |      \n",
      " |      Examples\n",
      " |      --------\n",
      " |      >>> x = np.arange(1, 7).reshape(2, 3)\n",
      " |      >>> x\n",
      " |      array([[1, 2, 3],\n",
      " |             [4, 5, 6]])\n",
      " |      >>> x.flat[3]\n",
      " |      4\n",
      " |      >>> x.T\n",
      " |      array([[1, 4],\n",
      " |             [2, 5],\n",
      " |             [3, 6]])\n",
      " |      >>> x.T.flat[3]\n",
      " |      5\n",
      " |      >>> type(x.flat)\n",
      " |      <class 'numpy.flatiter'>\n",
      " |      \n",
      " |      An assignment example:\n",
      " |      \n",
      " |      >>> x.flat = 3; x\n",
      " |      array([[3, 3, 3],\n",
      " |             [3, 3, 3]])\n",
      " |      >>> x.flat[[1,4]] = 1; x\n",
      " |      array([[3, 1, 3],\n",
      " |             [3, 1, 3]])\n",
      " |  \n",
      " |  imag\n",
      " |      The imaginary part of the array.\n",
      " |      \n",
      " |      Examples\n",
      " |      --------\n",
      " |      >>> x = np.sqrt([1+0j, 0+1j])\n",
      " |      >>> x.imag\n",
      " |      array([ 0.        ,  0.70710678])\n",
      " |      >>> x.imag.dtype\n",
      " |      dtype('float64')\n",
      " |  \n",
      " |  itemsize\n",
      " |      Length of one array element in bytes.\n",
      " |      \n",
      " |      Examples\n",
      " |      --------\n",
      " |      >>> x = np.array([1,2,3], dtype=np.float64)\n",
      " |      >>> x.itemsize\n",
      " |      8\n",
      " |      >>> x = np.array([1,2,3], dtype=np.complex128)\n",
      " |      >>> x.itemsize\n",
      " |      16\n",
      " |  \n",
      " |  nbytes\n",
      " |      Total bytes consumed by the elements of the array.\n",
      " |      \n",
      " |      Notes\n",
      " |      -----\n",
      " |      Does not include memory consumed by non-element attributes of the\n",
      " |      array object.\n",
      " |      \n",
      " |      Examples\n",
      " |      --------\n",
      " |      >>> x = np.zeros((3,5,2), dtype=np.complex128)\n",
      " |      >>> x.nbytes\n",
      " |      480\n",
      " |      >>> np.prod(x.shape) * x.itemsize\n",
      " |      480\n",
      " |  \n",
      " |  ndim\n",
      " |      Number of array dimensions.\n",
      " |      \n",
      " |      Examples\n",
      " |      --------\n",
      " |      >>> x = np.array([1, 2, 3])\n",
      " |      >>> x.ndim\n",
      " |      1\n",
      " |      >>> y = np.zeros((2, 3, 4))\n",
      " |      >>> y.ndim\n",
      " |      3\n",
      " |  \n",
      " |  real\n",
      " |      The real part of the array.\n",
      " |      \n",
      " |      Examples\n",
      " |      --------\n",
      " |      >>> x = np.sqrt([1+0j, 0+1j])\n",
      " |      >>> x.real\n",
      " |      array([ 1.        ,  0.70710678])\n",
      " |      >>> x.real.dtype\n",
      " |      dtype('float64')\n",
      " |      \n",
      " |      See Also\n",
      " |      --------\n",
      " |      numpy.real : equivalent function\n",
      " |  \n",
      " |  shape\n",
      " |      Tuple of array dimensions.\n",
      " |      \n",
      " |      The shape property is usually used to get the current shape of an array,\n",
      " |      but may also be used to reshape the array in-place by assigning a tuple of\n",
      " |      array dimensions to it.  As with `numpy.reshape`, one of the new shape\n",
      " |      dimensions can be -1, in which case its value is inferred from the size of\n",
      " |      the array and the remaining dimensions. Reshaping an array in-place will\n",
      " |      fail if a copy is required.\n",
      " |      \n",
      " |      Examples\n",
      " |      --------\n",
      " |      >>> x = np.array([1, 2, 3, 4])\n",
      " |      >>> x.shape\n",
      " |      (4,)\n",
      " |      >>> y = np.zeros((2, 3, 4))\n",
      " |      >>> y.shape\n",
      " |      (2, 3, 4)\n",
      " |      >>> y.shape = (3, 8)\n",
      " |      >>> y\n",
      " |      array([[ 0.,  0.,  0.,  0.,  0.,  0.,  0.,  0.],\n",
      " |             [ 0.,  0.,  0.,  0.,  0.,  0.,  0.,  0.],\n",
      " |             [ 0.,  0.,  0.,  0.,  0.,  0.,  0.,  0.]])\n",
      " |      >>> y.shape = (3, 6)\n",
      " |      Traceback (most recent call last):\n",
      " |        File \"<stdin>\", line 1, in <module>\n",
      " |      ValueError: total size of new array must be unchanged\n",
      " |      >>> np.zeros((4,2))[::2].shape = (-1,)\n",
      " |      Traceback (most recent call last):\n",
      " |        File \"<stdin>\", line 1, in <module>\n",
      " |      AttributeError: incompatible shape for a non-contiguous array\n",
      " |      \n",
      " |      See Also\n",
      " |      --------\n",
      " |      numpy.reshape : similar function\n",
      " |      ndarray.reshape : similar method\n",
      " |  \n",
      " |  size\n",
      " |      Number of elements in the array.\n",
      " |      \n",
      " |      Equal to ``np.prod(a.shape)``, i.e., the product of the array's\n",
      " |      dimensions.\n",
      " |      \n",
      " |      Notes\n",
      " |      -----\n",
      " |      `a.size` returns a standard arbitrary precision Python integer. This\n",
      " |      may not be the case with other methods of obtaining the same value\n",
      " |      (like the suggested ``np.prod(a.shape)``, which returns an instance\n",
      " |      of ``np.int_``), and may be relevant if the value is used further in\n",
      " |      calculations that may overflow a fixed size integer type.\n",
      " |      \n",
      " |      Examples\n",
      " |      --------\n",
      " |      >>> x = np.zeros((3, 5, 2), dtype=np.complex128)\n",
      " |      >>> x.size\n",
      " |      30\n",
      " |      >>> np.prod(x.shape)\n",
      " |      30\n",
      " |  \n",
      " |  strides\n",
      " |      Tuple of bytes to step in each dimension when traversing an array.\n",
      " |      \n",
      " |      The byte offset of element ``(i[0], i[1], ..., i[n])`` in an array `a`\n",
      " |      is::\n",
      " |      \n",
      " |          offset = sum(np.array(i) * a.strides)\n",
      " |      \n",
      " |      A more detailed explanation of strides can be found in the\n",
      " |      \"ndarray.rst\" file in the NumPy reference guide.\n",
      " |      \n",
      " |      Notes\n",
      " |      -----\n",
      " |      Imagine an array of 32-bit integers (each 4 bytes)::\n",
      " |      \n",
      " |        x = np.array([[0, 1, 2, 3, 4],\n",
      " |                      [5, 6, 7, 8, 9]], dtype=np.int32)\n",
      " |      \n",
      " |      This array is stored in memory as 40 bytes, one after the other\n",
      " |      (known as a contiguous block of memory).  The strides of an array tell\n",
      " |      us how many bytes we have to skip in memory to move to the next position\n",
      " |      along a certain axis.  For example, we have to skip 4 bytes (1 value) to\n",
      " |      move to the next column, but 20 bytes (5 values) to get to the same\n",
      " |      position in the next row.  As such, the strides for the array `x` will be\n",
      " |      ``(20, 4)``.\n",
      " |      \n",
      " |      See Also\n",
      " |      --------\n",
      " |      numpy.lib.stride_tricks.as_strided\n",
      " |      \n",
      " |      Examples\n",
      " |      --------\n",
      " |      >>> y = np.reshape(np.arange(2*3*4), (2,3,4))\n",
      " |      >>> y\n",
      " |      array([[[ 0,  1,  2,  3],\n",
      " |              [ 4,  5,  6,  7],\n",
      " |              [ 8,  9, 10, 11]],\n",
      " |             [[12, 13, 14, 15],\n",
      " |              [16, 17, 18, 19],\n",
      " |              [20, 21, 22, 23]]])\n",
      " |      >>> y.strides\n",
      " |      (48, 16, 4)\n",
      " |      >>> y[1,1,1]\n",
      " |      17\n",
      " |      >>> offset=sum(y.strides * np.array((1,1,1)))\n",
      " |      >>> offset/y.itemsize\n",
      " |      17\n",
      " |      \n",
      " |      >>> x = np.reshape(np.arange(5*6*7*8), (5,6,7,8)).transpose(2,3,1,0)\n",
      " |      >>> x.strides\n",
      " |      (32, 4, 224, 1344)\n",
      " |      >>> i = np.array([3,5,2,2])\n",
      " |      >>> offset = sum(i * x.strides)\n",
      " |      >>> x[3,5,2,2]\n",
      " |      813\n",
      " |      >>> offset / x.itemsize\n",
      " |      813\n",
      " |  \n",
      " |  ----------------------------------------------------------------------\n",
      " |  Data and other attributes defined here:\n",
      " |  \n",
      " |  __hash__ = None\n",
      "\n"
     ]
    }
   ],
   "source": [
    "help(np.eye(10))"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### 4. Optional "
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "##### Do you want to build a CNN from scratch ? "
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "In this optianl part, you will build a cnn network step by step."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 24,
   "metadata": {},
   "outputs": [],
   "source": [
    "import numpy as np\n",
    "np.random.seed(1)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "##### 4.1 Implement a convolution step"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Hints: Numpy.sum function might be useful."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "metadata": {},
   "outputs": [],
   "source": [
    "def conv_single_step(a_prev_slice,W,b):\n",
    "    '''\n",
    "    Apply one filter defined by parameters W on a single slice (a_slice_prev) of the output activation \n",
    "    of the previous layer.\n",
    "    Arguments:\n",
    "    a_prev_slice: slice of input data (shape=(f,f,n_C_prev))\n",
    "    W: Weight parameters contained in a window. (shape = (f,f,n_C_prev))\n",
    "    b: Bias parameters contained in a window. (shape=(1,1,1))\n",
    "    \n",
    "    Reutrns:\n",
    "    \n",
    "    Z: a scalar value, the result of convolving the sliding window (W, b) on a slice x of the input data \n",
    "    '''\n",
    "    # Element-wise product\n",
    "    s = None\n",
    "    \n",
    "    # Sum over s  \n",
    "    Z = None\n",
    "    \n",
    "    # Add bias b to z.\n",
    "    Z = None\n",
    "    \n",
    "    return Z"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "np.random.seed(1)\n",
    "a_slice_prev = np.random.randn(4, 4, 3)\n",
    "W = np.random.randn(4, 4, 3)\n",
    "b = np.random.randn(1, 1, 1)\n",
    "\n",
    "Z = conv_single_step(a_slice_prev, W, b)\n",
    "print(\"Z =\", Z)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Use the code above to test your code. If your c\n",
    "ode is correct, you should be able to get the output shown as below."
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "<table>\n",
    "    <tr>\n",
    "        <td>\n",
    "            Z\n",
    "        </td>\n",
    "        <td>\n",
    "            -6.99908945068\n",
    "        </td>\n",
    "    </tr>\n",
    "\n",
    "</table>"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "##### 4.2 Padding"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Hints: Numpy.pad function might be useful."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "def zero_pad(X, pad):\n",
    "    \"\"\"\n",
    "    Pad with zeros all images of the dataset X. The padding is applied to the height and width of an image, \n",
    "    as illustrated in Figure 1.\n",
    "    \n",
    "    Argument:\n",
    "    X: python numpy array of shape (m, n_H, n_W, n_C) representing a batch of m images\n",
    "    pad: integer, amount of padding around each image on vertical and horizontal dimensions\n",
    "    \n",
    "    Returns:\n",
    "    X_pad: padded image of shape (m, n_H + 2*pad, n_W + 2*pad, n_C)\n",
    "    \"\"\"\n",
    "\n",
    "    X_pad = None\n",
    "    \n",
    "    return X_pad"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "np.random.seed(1)\n",
    "x = np.random.randn(4, 3, 3, 2)\n",
    "x_pad = zero_pad(x, 2)\n",
    "print (\"x.shape =\\n\", x.shape)\n",
    "print (\"x_pad.shape =\\n\", x_pad.shape)\n",
    "print (\"x[1,1] =\\n\", x[1,1])\n",
    "print (\"x_pad[1,1] =\\n\", x_pad[1,1])"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Use the code above to test your code. If your code is correct, you should be able to get the output shown as below."
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "```\n",
    "x.shape =\n",
    " (4, 3, 3, 2)\n",
    "x_pad.shape =\n",
    " (4, 7, 7, 2)\n",
    "x[1,1] =\n",
    " [[ 0.90085595 -0.68372786]\n",
    " [-0.12289023 -0.93576943]\n",
    " [-0.26788808  0.53035547]]\n",
    "x_pad[1,1] =\n",
    " [[ 0.  0.]\n",
    " [ 0.  0.]\n",
    " [ 0.  0.]\n",
    " [ 0.  0.]\n",
    " [ 0.  0.]\n",
    " [ 0.  0.]\n",
    " [ 0.  0.]]\n",
    "```"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "##### 4.3 Implement a forward propagation in CNN."
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Hints: The formulas to calculate the output shapes are :"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "$$ n_H = \\lfloor \\frac{n_{H_{prev}} - f + 2 \\times pad}{stride} \\rfloor +1 $$\n",
    "$$ n_W = \\lfloor \\frac{n_{W_{prev}} - f + 2 \\times pad}{stride} \\rfloor +1 $$\n",
    "$$ n_C = \\text{number of filters used in the convolution}$$"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "def conv_forward(A_prev, W, b, hparameters):\n",
    "    \"\"\"\n",
    "    Implements the forward propagation for a convolution function\n",
    "    \n",
    "    Arguments:\n",
    "    A_prev: output activations of the previous layer, \n",
    "        numpy array of shape (m, n_H_prev, n_W_prev, n_C_prev)\n",
    "    W: Weights, numpy array of shape (f, f, n_C_prev, n_C)\n",
    "    b: Biases, numpy array of shape (1, 1, 1, n_C)\n",
    "    hparameters: python dictionary containing \"stride\" and \"pad\"\n",
    "        \n",
    "    Returns:\n",
    "    Z: conv output, numpy array of shape (m, n_H, n_W, n_C)\n",
    "    cache: cache of values needed for the conv_backward() function\n",
    "    \"\"\"\n",
    "    \n",
    "    # Get dimensions from A_prev's shape  \n",
    "    (m, n_H_prev, n_W_prev, n_C_prev) = None\n",
    "    \n",
    "    # Get dimensions from W's shape \n",
    "    (f, f, n_C_prev, n_C) = None\n",
    "    \n",
    "    # Get information from \"hparameters\" \n",
    "    stride = None\n",
    "    pad = None\n",
    "    \n",
    "    # Compute the dimensions of the CONV output volume using the formula given above. \n",
    "    # Hint: use int() to apply the 'floor' operation. \n",
    "    n_H = None\n",
    "    n_W = None\n",
    "    \n",
    "    # Initialize the output volume Z with zeros. \n",
    "    Z = None\n",
    "    \n",
    "    # Create A_prev_pad by padding A_prev\n",
    "    A_prev_pad = None\n",
    "    \n",
    "    for i in range(None):               # loop over the batch of training examples\n",
    "        a_prev_pad = None               # Select ith training example's padded activation\n",
    "        for h in range(None):           # loop over vertical axis of the output volume\n",
    "            # Find the vertical start and end of the current \"slice\" \n",
    "            vert_start = None\n",
    "            vert_end = None\n",
    "            \n",
    "            for w in range(None):       # loop over horizontal axis of the output volume\n",
    "                # Find the horizontal start and end of the current \"slice\"\n",
    "                horiz_start = None\n",
    "                horiz_end = None\n",
    "                \n",
    "                for c in range(None):   # loop over channels (= #filters) of the output volume\n",
    "                                        \n",
    "                    # Use the corners to define the (3D) slice of a_prev_pad (See Hint above the cell). \n",
    "                    a_slice_prev = None\n",
    "                    \n",
    "                    # Convolve the (3D) slice with the correct filter W and bias b, to get back one output neuron. \n",
    "                    weights = None\n",
    "                    biases = None\n",
    "                    Z[i, h, w, c] = None\n",
    "                                        \n",
    "    \n",
    "    # Making sure your output shape is correct\n",
    "    assert(Z.shape == (m, n_H, n_W, n_C))\n",
    "    \n",
    "    # Save information in \"cache\" for the backprop\n",
    "    cache = (A_prev, W, b, hparameters)\n",
    "    \n",
    "    return Z, cache"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "np.random.seed(1)\n",
    "A_prev = np.random.randn(10,5,7,4)\n",
    "W = np.random.randn(3,3,4,8)\n",
    "b = np.random.randn(1,1,1,8)\n",
    "hparameters = {\"pad\" : 1,\n",
    "               \"stride\": 2}\n",
    "\n",
    "Z, cache_conv = conv_forward(A_prev, W, b, hparameters)\n",
    "print(\"Z's mean =\\n\", np.mean(Z))\n",
    "print(\"Z[3,2,1] =\\n\", Z[3,2,1])\n",
    "print(\"cache_conv[0][1][2][3] =\\n\", cache_conv[0][1][2][3])"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Use the code above to test your code. If your code is correct, you should be able to get the output shown as below."
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "```\n",
    "Z's mean =\n",
    " 0.692360880758\n",
    "Z[3,2,1] =\n",
    " [ -1.28912231   2.27650251   6.61941931   0.95527176   8.25132576\n",
    "   2.31329639  13.00689405   2.34576051]\n",
    "cache_conv[0][1][2][3] = [-1.1191154   1.9560789  -0.3264995  -1.34267579]\n",
    "```"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "##### 4.4 Pooling layers"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Hints: As there\\'s no padding, the formulas binding the output shape of the pooling to the input shape is:\n",
    "\n",
    "$$ n_H = \\lfloor \\frac{n_{H_{prev}} - f}{stride} \\rfloor +1 $$\n",
    "\n",
    "$$ n_W = \\lfloor \\frac{n_{W_{prev}} - f}{stride} \\rfloor +1 $$\n",
    "\n",
    "$$ n_C = n_{C_{prev}}$$"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "def pool_forward(A_prev, hparameters, mode = \"max\"):\n",
    "    \"\"\"\n",
    "    Implements the forward pass of the pooling layer\n",
    "    \n",
    "    Arguments:\n",
    "    A_prev: Input data, numpy array of shape (m, n_H_prev, n_W_prev, n_C_prev)\n",
    "    hparameters: python dictionary containing \"f\" and \"stride\"\n",
    "    mode: the pooling mode you would like to use, defined as a string (\"max\" or \"average\")\n",
    "    \n",
    "    Returns:\n",
    "    A: output of the pool layer, a numpy array of shape (m, n_H, n_W, n_C)\n",
    "    cache: cache used in the backward pass of the pooling layer, contains the input and hparameters \n",
    "    \"\"\"\n",
    "    \n",
    "    # Get dimensions from the input shape\n",
    "    (m, n_H_prev, n_W_prev, n_C_prev) = A_prev.shape\n",
    "    \n",
    "    # Get hyperparameters from \"hparameters\"\n",
    "    f = hparameters[\"f\"]\n",
    "    stride = hparameters[\"stride\"]\n",
    "    \n",
    "    # Define the dimensions of the output\n",
    "    n_H = int(1 + (n_H_prev - f) / stride)\n",
    "    n_W = int(1 + (n_W_prev - f) / stride)\n",
    "    n_C = n_C_prev\n",
    "    \n",
    "    # Initialize output matrix A\n",
    "    A = np.zeros((m, n_H, n_W, n_C))              \n",
    "    \n",
    "    for i in range(None):                         # loop over the training examples\n",
    "        for h in range(None):                     # loop on the vertical axis of the output volume\n",
    "            # Find the vertical start and end of the current \"slice\" (≈2 lines)\n",
    "            vert_start = None\n",
    "            vert_end = None\n",
    "            \n",
    "            for w in range(None):                 # loop on the horizontal axis of the output volume\n",
    "                # Find the vertical start and end of the current \"slice\" (≈2 lines)\n",
    "                horiz_start = None\n",
    "                horiz_end = None\n",
    "                \n",
    "                for c in range (None):            # loop over the channels of the output volume\n",
    "                    \n",
    "                    # Use the corners to define the current slice on the ith training example of A_prev, channel c. (≈1 line)\n",
    "                    a_prev_slice = None\n",
    "                    \n",
    "                    # Compute the pooling operation on the slice. \n",
    "                    # Use an if statement to differentiate the modes. \n",
    "                    # Use np.max and np.mean.\n",
    "                    if mode == \"max\":\n",
    "                        A[i, h, w, c] = None\n",
    "                    elif mode == \"average\":\n",
    "                        A[i, h, w, c] = None\n",
    "    \n",
    "    # Store the input and hparameters in \"cache\" for pool_backward()\n",
    "    cache = (A_prev, hparameters)\n",
    "    \n",
    "    # Making sure your output shape is correct\n",
    "    assert(A.shape == (m, n_H, n_W, n_C))\n",
    "    \n",
    "    return A, cache"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "np.random.seed(1)\n",
    "A_prev = np.random.randn(2, 5, 5, 3)\n",
    "hparameters = {\"stride\" : 1, \"f\": 3}\n",
    "\n",
    "A, cache = pool_forward(A_prev, hparameters)\n",
    "print(\"mode = max\")\n",
    "print(\"A.shape = \" + str(A.shape))\n",
    "print(\"A =\\n\", A)\n",
    "print()\n",
    "A, cache = pool_forward(A_prev, hparameters, mode = \"average\")\n",
    "print(\"mode = average\")\n",
    "print(\"A.shape = \" + str(A.shape))\n",
    "print(\"A =\\n\", A)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Use the code above to test your code. If your code is correct, you should be able to get the output shown as below."
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "```\n",
    "mode = max\n",
    "A.shape = (2, 3, 3, 3)\n",
    "A =\n",
    " [[[[ 1.74481176  0.90159072  1.65980218]\n",
    "   [ 1.74481176  1.46210794  1.65980218]\n",
    "   [ 1.74481176  1.6924546   1.65980218]]\n",
    "\n",
    "  [[ 1.14472371  0.90159072  2.10025514]\n",
    "   [ 1.14472371  0.90159072  1.65980218]\n",
    "   [ 1.14472371  1.6924546   1.65980218]]\n",
    "\n",
    "  [[ 1.13162939  1.51981682  2.18557541]\n",
    "   [ 1.13162939  1.51981682  2.18557541]\n",
    "   [ 1.13162939  1.6924546   2.18557541]]]\n",
    "\n",
    "\n",
    " [[[ 1.19891788  0.84616065  0.82797464]\n",
    "   [ 0.69803203  0.84616065  1.2245077 ]\n",
    "   [ 0.69803203  1.12141771  1.2245077 ]]\n",
    "\n",
    "  [[ 1.96710175  0.84616065  1.27375593]\n",
    "   [ 1.96710175  0.84616065  1.23616403]\n",
    "   [ 1.62765075  1.12141771  1.2245077 ]]\n",
    "\n",
    "  [[ 1.96710175  0.86888616  1.27375593]\n",
    "   [ 1.96710175  0.86888616  1.23616403]\n",
    "   [ 1.62765075  1.12141771  0.79280687]]]]\n",
    "\n",
    "mode = average\n",
    "A.shape = (2, 3, 3, 3)\n",
    "A =\n",
    " [[[[ -3.01046719e-02  -3.24021315e-03  -3.36298859e-01]\n",
    "   [  1.43310483e-01   1.93146751e-01  -4.44905196e-01]\n",
    "   [  1.28934436e-01   2.22428468e-01   1.25067597e-01]]\n",
    "\n",
    "  [[ -3.81801899e-01   1.59993515e-02   1.70562706e-01]\n",
    "   [  4.73707165e-02   2.59244658e-02   9.20338402e-02]\n",
    "   [  3.97048605e-02   1.57189094e-01   3.45302489e-01]]\n",
    "\n",
    "  [[ -3.82680519e-01   2.32579951e-01   6.25997903e-01]\n",
    "   [ -2.47157416e-01  -3.48524998e-04   3.50539717e-01]\n",
    "   [ -9.52551510e-02   2.68511000e-01   4.66056368e-01]]]\n",
    "\n",
    "\n",
    " [[[ -1.73134159e-01   3.23771981e-01  -3.43175716e-01]\n",
    "   [  3.80634669e-02   7.26706274e-02  -2.30268958e-01]\n",
    "   [  2.03009393e-02   1.41414785e-01  -1.23158476e-02]]\n",
    "\n",
    "  [[  4.44976963e-01  -2.61694592e-03  -3.10403073e-01]\n",
    "   [  5.08114737e-01  -2.34937338e-01  -2.39611830e-01]\n",
    "   [  1.18726772e-01   1.72552294e-01  -2.21121966e-01]]\n",
    "\n",
    "  [[  4.29449255e-01   8.44699612e-02  -2.72909051e-01]\n",
    "   [  6.76351685e-01  -1.20138225e-01  -2.44076712e-01]\n",
    "   [  1.50774518e-01   2.89111751e-01   1.23238536e-03]]]]\n",
    "```"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "####  Congratulations! You hanve finished the assignment 6.  You now understand how convolutional neural networks work."
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.7.3"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
