{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "import numpy as np\n",
    "import h5py\n",
    "import matplotlib.pyplot as plt\n",
    "import testCases\n",
    "from dnn_utils import sigmoid, sigmoid_backward, relu, relu_backward\n",
    "import lr_utils"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "# 指定随机种子\n",
    "np.random.seed(1)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "# 初始化参数\n",
    "def initialize_parameters(n_x, n_h, n_y):\n",
    "    \"\"\"\n",
    "    此函数是为了初始化两层网络参数而使用的函数：\n",
    "    参数：\n",
    "        n_x: 输入层节点数量\n",
    "        n_h: 隐藏层节点数量\n",
    "        n_y: 输出层节点数量\n",
    "    \n",
    "    返回：\n",
    "        W1: 权重矩阵，维度为(n_h, n_x)\n",
    "        b1: 偏向量，维度为(n_h, 1)\n",
    "        W2: 权重矩阵，维度为(n_y, n_h)\n",
    "        b2: 偏向量，维度为(n_y, 1)\n",
    "    \"\"\"\n",
    "    W1 = np.random.randn(n_h, n_x) * 0.01\n",
    "    b1 = np.zeros((n_h, 1))\n",
    "    W2 = np.random.randn(n_y, n_h) * 0.01\n",
    "    b2 = np.zeros((n_y, 1))\n",
    "    \n",
    "    # 使用断言判断数据格式是否正确\n",
    "    assert(W1.shape == (n_h, n_x))\n",
    "    assert(b1.shape == (n_h, 1))\n",
    "    assert(W2.shape == (n_y, n_h))\n",
    "    assert(b2.shape == (n_y, 1))\n",
    "    \n",
    "    parameters = {\"W1\": W1,\n",
    "                  \"b1\": b1,\n",
    "                  \"W2\": W2,\n",
    "                  \"b2\": b2}\n",
    "    \n",
    "    return parameters"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "==============测试initialize_parameters==============\n",
      "W1 = [[ 0.01624345 -0.00611756 -0.00528172 -0.01072969  0.00865408 -0.02301539\n",
      "   0.01744812 -0.00761207  0.00319039]\n",
      " [-0.0024937   0.01462108 -0.02060141 -0.00322417 -0.00384054  0.01133769\n",
      "  -0.01099891 -0.00172428 -0.00877858]\n",
      " [ 0.00042214  0.00582815 -0.01100619  0.01144724  0.00901591  0.00502494\n",
      "   0.00900856 -0.00683728 -0.0012289 ]\n",
      " [-0.00935769 -0.00267888  0.00530355 -0.00691661 -0.00396754 -0.00687173\n",
      "  -0.00845206 -0.00671246 -0.00012665]\n",
      " [-0.0111731   0.00234416  0.01659802  0.00742044 -0.00191836 -0.00887629\n",
      "  -0.00747158  0.01692455  0.00050808]\n",
      " [-0.00636996  0.00190915  0.02100255  0.00120159  0.00617203  0.0030017\n",
      "  -0.0035225  -0.01142518 -0.00349343]\n",
      " [-0.00208894  0.00586623  0.00838983  0.00931102  0.00285587  0.00885141\n",
      "  -0.00754398  0.01252868  0.0051293 ]\n",
      " [-0.00298093  0.00488518 -0.00075572  0.01131629  0.01519817  0.02185575\n",
      "  -0.01396496 -0.01444114 -0.00504466]]\n",
      "b1 = [[ 0.]\n",
      " [ 0.]\n",
      " [ 0.]\n",
      " [ 0.]\n",
      " [ 0.]\n",
      " [ 0.]\n",
      " [ 0.]\n",
      " [ 0.]]\n",
      "W2 = [[ 0.00160037  0.00876169  0.00315635 -0.02022201 -0.00306204  0.00827975\n",
      "   0.00230095  0.00762011]\n",
      " [-0.00222328 -0.00200758  0.00186561  0.00410052  0.001983    0.00119009\n",
      "  -0.00670662  0.00377564]\n",
      " [ 0.00121821  0.01129484  0.01198918  0.00185156 -0.00375285 -0.0063873\n",
      "   0.00423494  0.0007734 ]\n",
      " [-0.00343854  0.00043597 -0.00620001  0.00698032 -0.00447129  0.01224508\n",
      "   0.00403492  0.00593579]\n",
      " [-0.01094912  0.00169382  0.00740556 -0.00953701 -0.00266219  0.00032615\n",
      "  -0.01373117  0.00315159]\n",
      " [ 0.00846161 -0.00859516  0.00350546 -0.01312283 -0.00038696 -0.01615772\n",
      "   0.01121418  0.00408901]\n",
      " [-0.00024617 -0.00775162  0.01273756  0.01967102 -0.01857982  0.01236164\n",
      "   0.01627651  0.00338012]]\n",
      "b2 = [[ 0.]\n",
      " [ 0.]\n",
      " [ 0.]\n",
      " [ 0.]\n",
      " [ 0.]\n",
      " [ 0.]\n",
      " [ 0.]]\n"
     ]
    }
   ],
   "source": [
    "print(\"==============测试initialize_parameters==============\")\n",
    "parameters = initialize_parameters(9, 8, 7)\n",
    "print(\"W1 = \" + str(parameters[\"W1\"]))\n",
    "print(\"b1 = \" + str(parameters[\"b1\"]))\n",
    "print(\"W2 = \" + str(parameters[\"W2\"]))\n",
    "print(\"b2 = \" + str(parameters[\"b2\"]))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "def initialize_parameters_deep(layers_dims):\n",
    "    \"\"\"\n",
    "    此函数是为了初始化多层网络参数而使用\n",
    "    输入：\n",
    "        layers_dims: 包含网络各个层数节点的列表\n",
    "    返回：\n",
    "        parameters: 包含参数\"W1\", \"b1\", ... , \"WL\", \"bL\" 的字典\n",
    "            Wl: 权重矩阵，维度为(layers_dims[l], layers_dims[l-1])\n",
    "            bl: 偏向量，维度为(layers_dims[l], 1)\n",
    "    \"\"\"\n",
    "    np.random.seed(3)\n",
    "    parameters = {}\n",
    "    L = len(layers_dims)\n",
    "    \n",
    "    for l in range(1, L):\n",
    "        parameters[\"W\" + str(l)] = np.random.randn(layers_dims[l], layers_dims[l - 1]) / np.sqrt(layers_dims[l - 1])\n",
    "        parameters[\"b\" + str(l)] = np.zeros((layers_dims[l], 1))\n",
    "        \n",
    "        # 使用断言判断数据格式\n",
    "        assert(parameters[\"W\" + str(l)].shape == (layers_dims[l], layers_dims[l-1]))\n",
    "        assert(parameters[\"b\" + str(l)].shape == (layers_dims[l], 1))\n",
    "    \n",
    "    return parameters"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "==============测试initialize_parameters_deep==============\n",
      "W1 = [[ 0.79989897  0.19521314  0.04315498 -0.83337927 -0.12405178]\n",
      " [-0.15865304 -0.03700312 -0.28040323 -0.01959608 -0.21341839]\n",
      " [-0.58757818  0.39561516  0.39413741  0.76454432  0.02237573]\n",
      " [-0.18097724 -0.24389238 -0.69160568  0.43932807 -0.49241241]]\n",
      "b1 = [[ 0.]\n",
      " [ 0.]\n",
      " [ 0.]\n",
      " [ 0.]]\n",
      "W2 = [[-0.59252326 -0.10282495  0.74307418  0.11835813]\n",
      " [-0.51189257 -0.3564966   0.31262248 -0.08025668]\n",
      " [-0.38441818 -0.11501536  0.37252813  0.98805539]]\n",
      "b2 = [[ 0.]\n",
      " [ 0.]\n",
      " [ 0.]]\n"
     ]
    }
   ],
   "source": [
    "# 测试initialize_parameters_deep\n",
    "print(\"==============测试initialize_parameters_deep==============\")\n",
    "layers_dims = [5,4,3]\n",
    "parameters = initialize_parameters_deep(layers_dims)\n",
    "print(\"W1 = \" + str(parameters[\"W1\"]))\n",
    "print(\"b1 = \" + str(parameters[\"b1\"]))\n",
    "print(\"W2 = \" + str(parameters[\"W2\"]))\n",
    "print(\"b2 = \" + str(parameters[\"b2\"]))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "# 前向传播线性部分\n",
    "def linear_forward(A, W, b):\n",
    "    \"\"\"\n",
    "    参数：\n",
    "        A: 来自上一层的激活，维度为（上一层的节点数量，示例的数量）\n",
    "        W: 权重矩阵，numpy数组，维度为（当前图层的节点数量，上一层的节点数量）\n",
    "        b: 偏向量，numpy向量，维度为（当前图层的节点数量，1）\n",
    "    返回：\n",
    "        Z: 激活功能的输入，也称为预激活参数\n",
    "        cache: 一个包含“A”， “W”和“b”的字典，存储这些变量以有效地计算后向传递\n",
    "    \"\"\"\n",
    "    Z = np.dot(W, A) + b\n",
    "    assert(Z.shape == (W.shape[0], A.shape[1]))\n",
    "    cache = (A, W, b)\n",
    "    \n",
    "    return Z, cache"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "==============测试linear_forward==============\n",
      "Z = [[ 3.26295337 -1.23429987]]\n"
     ]
    }
   ],
   "source": [
    "# 测试linear_forward\n",
    "print(\"==============测试linear_forward==============\")\n",
    "A,W,b = testCases.linear_forward_test_case()\n",
    "Z,linear_cache = linear_forward(A,W,b)\n",
    "print(\"Z = \" + str(Z))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "def linear_activation_forward(A_prev, W, b, activation):\n",
    "    \"\"\"\n",
    "    实现LINEAR->AVTIVATION这一层前向传播\n",
    "    \n",
    "    参数：\n",
    "        A_prev: 来自上一层（或输入层）的激活，维度为（上一层的节点数量，示例数）\n",
    "        W：权重矩阵，numpy数组，维度为（当前层的节点数量，前一层大小）\n",
    "        b：偏向量，numpy阵列，维度为（当前层的节点数量，1）\n",
    "        activation：此层集合函数类型，字符串类型【\"sigmoid\"|\"relu\"】\n",
    "    返回：\n",
    "        A：激活函数的输出，也称为激活后的值\n",
    "        cache：一个包含“linear_cache”和“activation_cache”的字典，我们需要存储它以有效地计算后向传递\n",
    "    \"\"\"\n",
    "    if(activation == \"sigmoid\"):\n",
    "        Z, linear_cache = linear_forward(A_prev, W, b)\n",
    "        A, activation_cache = sigmoid(Z)\n",
    "    elif(activation == \"relu\"):\n",
    "        Z, linear_cache = linear_forward(A_prev, W, b)\n",
    "        A, activation_cache = relu(Z)\n",
    "    \n",
    "    assert(A.shape == (W.shape[0], A_prev.shape[1]))\n",
    "    cache = (linear_cache, activation_cache)\n",
    "    \n",
    "    return A, cache"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "==============测试linear_activation_forward==============\n",
      "sigmoid，A = [[ 0.96890023  0.11013289]]\n",
      "ReLU，A = [[ 3.43896131  0.        ]]\n"
     ]
    }
   ],
   "source": [
    "# 测试linear_activation_forward\n",
    "print(\"==============测试linear_activation_forward==============\")\n",
    "A_prev, W,b = testCases.linear_activation_forward_test_case()\n",
    "\n",
    "A, linear_activation_cache = linear_activation_forward(A_prev, W, b, activation = \"sigmoid\")\n",
    "print(\"sigmoid，A = \" + str(A))\n",
    "\n",
    "A, linear_activation_cache = linear_activation_forward(A_prev, W, b, activation = \"relu\")\n",
    "print(\"ReLU，A = \" + str(A))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "def L_model_forward(X, parameters):\n",
    "    \"\"\"\n",
    "    实现[LINEAR-> RELU] *（L-1） - > LINEAR-> SIGMOID计算前向传播，也就是多层网络的前向传播，为后面每一层都执行LINEAR和ACTIVATION\n",
    "    \n",
    "    参数：\n",
    "        X：数据，numpy数组，维度为（输入节点的数量，示例数）\n",
    "        parameters：initilize_parameters_deep()的输出\n",
    "        \n",
    "    返回：\n",
    "        AL：最后的激活值\n",
    "        caches - 包含以下内容的缓存列表：\n",
    "            linear_relu_forward（）的每个cache（有L-1个，索引为从0到L-2）\n",
    "            linear_sigmoid_forward（）的cache（只有一个，索引为L-1）\n",
    "    \"\"\"\n",
    "    caches = []\n",
    "    A = X\n",
    "    L = len(parameters) // 2\n",
    "    for i in range(1, L):\n",
    "        A_prev = A\n",
    "        A, cache = linear_activation_forward(A_prev, parameters[\"W\" + str(i)], parameters[\"b\" + str(i)], activation=\"relu\")\n",
    "        caches.append(cache)\n",
    "    \n",
    "    AL, cache = linear_activation_forward(A, parameters[\"W\" + str(L)], parameters[\"b\" + str(L)], activation=\"sigmoid\")\n",
    "    caches.append(cache)\n",
    "    \n",
    "    assert(AL.shape == (1, X.shape[1]))\n",
    "    \n",
    "    return AL, caches"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "==============测试L_model_forward==============\n",
      "AL = [[ 0.17007265  0.2524272 ]]\n",
      "caches 的长度为 = 2\n"
     ]
    }
   ],
   "source": [
    "#测试L_model_forward\n",
    "print(\"==============测试L_model_forward==============\")\n",
    "X,parameters = testCases.L_model_forward_test_case()\n",
    "AL,caches = L_model_forward(X,parameters)\n",
    "print(\"AL = \" + str(AL))\n",
    "print(\"caches 的长度为 = \" + str(len(caches)))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "def compute_cost(AL, Y):\n",
    "    \"\"\"\n",
    "    计算成本\n",
    "    \n",
    "    参数：\n",
    "        AL：与标签相对应的概率向量，维度为（1，示例数量）\n",
    "        Y：标签向量，维度为（1，示例数量）\n",
    "    \n",
    "    返回：\n",
    "        cost：交叉熵损失\n",
    "    \"\"\"\n",
    "    m = Y.shape[1]\n",
    "    cost = -np.sum(np.multiply(np.log(AL), Y) + np.multiply(np.log(1 - AL), 1 - Y)) / m\n",
    "    \n",
    "    cost = np.squeeze(cost)\n",
    "    \n",
    "    assert(cost.shape == ())\n",
    "    \n",
    "    return cost"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "==============测试compute_cost==============\n",
      "cost = 0.414931599615\n"
     ]
    }
   ],
   "source": [
    "# 测试compute_cost\n",
    "print(\"==============测试compute_cost==============\")\n",
    "Y,AL = testCases.compute_cost_test_case()\n",
    "print(\"cost = \" + str(compute_cost(AL, Y)))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "def linear_backward(dZ, cache):\n",
    "    \"\"\"\n",
    "    为单层实现反向传播过程（第L层）\n",
    "    \n",
    "    参数：\n",
    "        dZ：上一层线性输出的成本梯度\n",
    "        cache：来自当前层前向传播的值的元组（A_prev, W, b）\n",
    "        \n",
    "    返回：\n",
    "        dA_prev - 相对于激活（前一层l-1）的成本梯度，与A_prev维度相同\n",
    "        dW - 相对于W（当前层l）的成本梯度，与W的维度相同\n",
    "        db - 相对于b（当前层l）的成本梯度，与b维度相同\n",
    "    \"\"\"\n",
    "    \n",
    "    A_prev, W, b = cache\n",
    "    m = A_prev.shape[1]\n",
    "    dW = np.dot(dZ, A_prev.T) / m\n",
    "    db = np. sum(dZ, axis=1, keepdims=True) / m\n",
    "    dA_prev = np.dot(W.T, dZ)\n",
    "    \n",
    "    assert(dA_prev.shape == A_prev.shape)\n",
    "    assert(dW.shape == W.shape)\n",
    "    assert(db.shape == b.shape)\n",
    "    \n",
    "    return dA_prev, dW, db"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "==============测试linear_backward==============\n",
      "dA_prev = [[ 0.51822968 -0.19517421]\n",
      " [-0.40506361  0.15255393]\n",
      " [ 2.37496825 -0.89445391]]\n",
      "dW = [[-0.10076895  1.40685096  1.64992505]]\n",
      "db = [[ 0.50629448]]\n"
     ]
    }
   ],
   "source": [
    "# 测试linear_backward\n",
    "print(\"==============测试linear_backward==============\")\n",
    "dZ, linear_cache = testCases.linear_backward_test_case()\n",
    "\n",
    "dA_prev, dW, db = linear_backward(dZ, linear_cache)\n",
    "print (\"dA_prev = \"+ str(dA_prev))\n",
    "print (\"dW = \" + str(dW))\n",
    "print (\"db = \" + str(db))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "def linear_activation_backward(dA, cache, activation = \"relu\"):\n",
    "    \"\"\"\n",
    "    实现LINEAR->ACTIVATION层的后向传播\n",
    "    \n",
    "    参数：\n",
    "        dA：当前层l的激活后的梯度值\n",
    "        cache：我们存储的用于有效计算反向传播的值的元组（值为linear_cache，activation_cache）\n",
    "        activation： 要在此层中使用的激活函数名，字符串类型，【\"sigmoid\" | \"relu\"】\n",
    "        \n",
    "    返回：\n",
    "        dA_prev：相对于前一层的成本梯度值，与A_prev维度相同\n",
    "        dW：相对于W的成本梯度值，与W的维度相同\n",
    "        db：相对于b的成本梯度值，与b的维度相同\n",
    "    \"\"\"\n",
    "    linear_cache, activation_cache = cache\n",
    "    if activation == \"relu\":\n",
    "        dZ = relu_backward(dA, activation_cache)\n",
    "        dA_prev, dW, db = linear_backward(dZ, linear_cache)\n",
    "    elif activation == \"sigmoid\":\n",
    "        dZ = sigmoid_backward(dA, activation_cache)\n",
    "        dA_prev, dW, db = linear_backward(dZ, linear_cache)\n",
    "    \n",
    "    return dA_prev, dW, db"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "==============测试linear_activation_backward==============\n",
      "sigmoid:\n",
      "dA_prev = [[ 0.11017994  0.01105339]\n",
      " [ 0.09466817  0.00949723]\n",
      " [-0.05743092 -0.00576154]]\n",
      "dW = [[ 0.10266786  0.09778551 -0.01968084]]\n",
      "db = [[-0.05729622]]\n",
      "\n",
      "relu:\n",
      "dA_prev = [[ 0.44090989 -0.        ]\n",
      " [ 0.37883606 -0.        ]\n",
      " [-0.2298228   0.        ]]\n",
      "dW = [[ 0.44513824  0.37371418 -0.10478989]]\n",
      "db = [[-0.20837892]]\n"
     ]
    }
   ],
   "source": [
    "# 测试linear_activation_backward\n",
    "print(\"==============测试linear_activation_backward==============\")\n",
    "AL, linear_activation_cache = testCases.linear_activation_backward_test_case()\n",
    "\n",
    "dA_prev, dW, db = linear_activation_backward(AL, linear_activation_cache, activation = \"sigmoid\")\n",
    "print (\"sigmoid:\")\n",
    "print (\"dA_prev = \"+ str(dA_prev))\n",
    "print (\"dW = \" + str(dW))\n",
    "print (\"db = \" + str(db) + \"\\n\")\n",
    "\n",
    "dA_prev, dW, db = linear_activation_backward(AL, linear_activation_cache, activation = \"relu\")\n",
    "print (\"relu:\")\n",
    "print (\"dA_prev = \"+ str(dA_prev))\n",
    "print (\"dW = \" + str(dW))\n",
    "print (\"db = \" + str(db))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "def L_model_backward(AL, Y, caches):\n",
    "    \"\"\"\n",
    "    对[LINEAR-> RELU] *（L-1） - > LINEAR - > SIGMOID组执行反向传播，就是多层网络的向后传播\n",
    "     \n",
    "    参数：\n",
    "        AL：概率向量，正向传播输出（L_model_forward（））\n",
    "        Y：标签向量，维度为（1， 数量）\n",
    "        caches：\n",
    "                linear_activation_forward（\"relu\"）的cache，不包含输出层\n",
    "                linear_activation_forward（\"sigmoid\"）的cache\n",
    "   \n",
    "    返回：\n",
    "        grads - 具有梯度值的字典\n",
    "            grads [“dA”+ str（l）] = ...\n",
    "            grads [“dW”+ str（l）] = ...\n",
    "            grads [“db”+ str（l）] = ...\n",
    "    \"\"\"\n",
    "    grads = {}\n",
    "    L = len(caches)\n",
    "    m = AL.shape[1]\n",
    "    Y = Y.reshape(AL.shape)\n",
    "    dAL = -(np.divide(Y, AL) - np.divide(1 - Y, 1 - AL))\n",
    "    \n",
    "    current_cache = caches[L-1]\n",
    "    grads[\"dA\" + str(L)], grads[\"dW\" + str(L)], grads[\"db\" + str(L)] = linear_activation_backward(dAL, current_cache, \"sigmoid\")\n",
    "    \n",
    "    for l in reversed(range(L-1)):\n",
    "        current_cache = caches[l]\n",
    "        grads[\"dA\" + str(l+1)], grads[\"dW\" + str(l+1)], grads[\"db\" + str(l+1)] = linear_activation_backward(grads[\"dA\" + str(l+2)], current_cache, \"relu\")\n",
    "        \n",
    "    return grads"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "==============测试L_model_backward==============\n",
      "dW1 = [[ 0.41010002  0.07807203  0.13798444  0.10502167]\n",
      " [ 0.          0.          0.          0.        ]\n",
      " [ 0.05283652  0.01005865  0.01777766  0.0135308 ]]\n",
      "db1 = [[-0.22007063]\n",
      " [ 0.        ]\n",
      " [-0.02835349]]\n",
      "dA1 = [[ 0.          0.52257901]\n",
      " [ 0.         -0.3269206 ]\n",
      " [ 0.         -0.32070404]\n",
      " [ 0.         -0.74079187]]\n"
     ]
    }
   ],
   "source": [
    "# 测试L_model_backward\n",
    "print(\"==============测试L_model_backward==============\")\n",
    "AL, Y_assess, caches = testCases.L_model_backward_test_case()\n",
    "grads = L_model_backward(AL, Y_assess, caches)\n",
    "print (\"dW1 = \"+ str(grads[\"dW1\"]))\n",
    "print (\"db1 = \"+ str(grads[\"db1\"]))\n",
    "print (\"dA1 = \"+ str(grads[\"dA1\"]))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "def update_parameters(parameters, grads, learning_rate):\n",
    "    \"\"\"\n",
    "    使用梯度下降更新参数\n",
    "    \n",
    "    参数：\n",
    "        parameters：参数字典\n",
    "        grads：梯度值字典\n",
    "        learning_rate：学习率\n",
    "    \n",
    "    返回：\n",
    "        parameters：包含更新后的参数字典\n",
    "            参数[\"W\" + str(l)] = ...\n",
    "            参数[\"b\" + str(l)] = ...\n",
    "    \"\"\"\n",
    "    L = len(parameters) // 2 # 整除\n",
    "    for l in range(L):\n",
    "        parameters[\"W\" + str(l+1)] = parameters[\"W\" + str(l+1)] - learning_rate * grads[\"dW\" + str(l+1)]\n",
    "        parameters[\"b\" + str(l+1)] = parameters[\"b\" + str(l+1)] - learning_rate * grads[\"db\" + str(l+1)]\n",
    "    \n",
    "    return parameters"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 22,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "==============测试update_parameters==============\n",
      "W1 = [[-0.59562069 -0.09991781 -2.14584584  1.82662008]\n",
      " [-1.76569676 -0.80627147  0.51115557 -1.18258802]\n",
      " [-1.0535704  -0.86128581  0.68284052  2.20374577]]\n",
      "b1 = [[-0.04659241]\n",
      " [-1.28888275]\n",
      " [ 0.53405496]]\n",
      "W2 = [[-0.55569196  0.0354055   1.32964895]]\n",
      "b2 = [[-0.84610769]]\n"
     ]
    }
   ],
   "source": [
    "# 测试update_parameters\n",
    "print(\"==============测试update_parameters==============\")\n",
    "parameters, grads = testCases.update_parameters_test_case()\n",
    "parameters = update_parameters(parameters, grads, 0.1)\n",
    "\n",
    "print (\"W1 = \"+ str(parameters[\"W1\"]))\n",
    "print (\"b1 = \"+ str(parameters[\"b1\"]))\n",
    "print (\"W2 = \"+ str(parameters[\"W2\"]))\n",
    "print (\"b2 = \"+ str(parameters[\"b2\"]))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 23,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "# 构建两层神经网络\n",
    "def two_layer_model(X, Y, layers_dims, learning_rate=0.0075, num_iterations=3000, print_cost=False, isPlot=True):\n",
    "    \"\"\"\n",
    "        实现一个两层的神经网络，【LINEAR->RELU】 -> 【LINEAR->SIGMOID】\n",
    "    参数：\n",
    "        X - 输入的数据，维度为(n_x，例子数)\n",
    "        Y - 标签，向量，0为非猫，1为猫，维度为(1,数量)\n",
    "        layers_dims - 层数的向量，维度为(n_y,n_h,n_y)\n",
    "        learning_rate - 学习率\n",
    "        num_iterations - 迭代的次数\n",
    "        print_cost - 是否打印成本值，每100次打印一次\n",
    "        isPlot - 是否绘制出误差值的图谱\n",
    "    返回:\n",
    "        parameters - 一个包含W1，b1，W2，b2的字典变量\n",
    "    \"\"\"\n",
    "    np.random.seed(1)\n",
    "    grads = {}\n",
    "    costs = []\n",
    "    (n_x, n_h, n_y) = layers_dims\n",
    "    \n",
    "    # 初始化参数\n",
    "    parameters = initialize_parameters(n_x, n_h, n_y)\n",
    "    \n",
    "    W1 = parameters[\"W1\"]\n",
    "    b1 = parameters[\"b1\"]\n",
    "    W2 = parameters[\"W2\"]\n",
    "    b2 = parameters[\"b2\"]\n",
    "    \n",
    "    # 开始进行迭代\n",
    "    for i in range(0, num_iterations):\n",
    "        # 前向传播\n",
    "        A1, cache1 = linear_activation_forward(X, W1, b1, \"relu\")\n",
    "        A2, cache2 = linear_activation_forward(A1, W2, b2, \"sigmoid\")\n",
    "        \n",
    "        # 计算成本\n",
    "        cost = compute_cost(A2, Y)\n",
    "        \n",
    "        # 后向传播\n",
    "        ## 初始化后向传播\n",
    "        dA2 = -(np.divide(Y, A2) - np.divide(1 - Y, 1 - A2))\n",
    "        \n",
    "        ## 向后传播\n",
    "        dA1, dW2, db2 = linear_activation_backward(dA2, cache2, \"sigmoid\")\n",
    "        dA0, dW1, db1 = linear_activation_backward(dA1, cache1, \"relu\")\n",
    "        \n",
    "        ## 数据保存\n",
    "        grads[\"dW2\"] = dW2\n",
    "        grads[\"db2\"] = db2\n",
    "        grads[\"dW1\"] = dW1\n",
    "        grads[\"db1\"] = db1\n",
    "        \n",
    "        # 更新参数\n",
    "        parameters = update_parameters(parameters, grads, learning_rate)\n",
    "        W1 = parameters[\"W1\"]\n",
    "        b1 = parameters[\"b1\"]\n",
    "        W2 = parameters[\"W2\"]\n",
    "        b2 = parameters[\"b2\"]\n",
    "        \n",
    "        # 打印成本\n",
    "        if i%100 == 0:\n",
    "            costs.append(cost)\n",
    "            if print_cost:\n",
    "                print(\"第\", i, \"次迭代，成本值为\", np.squeeze(cost))\n",
    "                \n",
    "    # 迭代完成，根据条件绘图\n",
    "    if isPlot:\n",
    "        plt.plot(np.squeeze(costs))\n",
    "        plt.ylabel('cost')\n",
    "        plt.xlabel('iteartions(pre tens)')\n",
    "        plt.title(\"learning rate =\" + str(learning_rate))\n",
    "        plt.show()\n",
    "    \n",
    "    return parameters"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 24,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "train_set_x_orig, train_set_y, test_set_x_orig, test_set_y, classes = lr_utils.load_dataset()\n",
    "\n",
    "train_x_flatten = train_set_x_orig.reshape(train_set_x_orig.shape[0], -1).T \n",
    "test_x_flatten = test_set_x_orig.reshape(test_set_x_orig.shape[0], -1).T\n",
    "\n",
    "train_x = train_x_flatten / 255\n",
    "train_y = train_set_y\n",
    "test_x = test_x_flatten / 255\n",
    "test_y = test_set_y"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 25,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "第 0 次迭代，成本值为 0.69304973566\n",
      "第 100 次迭代，成本值为 0.646432095343\n",
      "第 200 次迭代，成本值为 0.632514064791\n",
      "第 300 次迭代，成本值为 0.601502492035\n",
      "第 400 次迭代，成本值为 0.560196631161\n",
      "第 500 次迭代，成本值为 0.515830477276\n",
      "第 600 次迭代，成本值为 0.475490131394\n",
      "第 700 次迭代，成本值为 0.433916315123\n",
      "第 800 次迭代，成本值为 0.40079775362\n",
      "第 900 次迭代，成本值为 0.358070501132\n",
      "第 1000 次迭代，成本值为 0.339428153837\n",
      "第 1100 次迭代，成本值为 0.30527536362\n",
      "第 1200 次迭代，成本值为 0.274913772821\n",
      "第 1300 次迭代，成本值为 0.246817682106\n",
      "第 1400 次迭代，成本值为 0.198507350375\n",
      "第 1500 次迭代，成本值为 0.174483181126\n",
      "第 1600 次迭代，成本值为 0.170807629781\n",
      "第 1700 次迭代，成本值为 0.113065245622\n",
      "第 1800 次迭代，成本值为 0.0962942684594\n",
      "第 1900 次迭代，成本值为 0.0834261795973\n",
      "第 2000 次迭代，成本值为 0.0743907870432\n",
      "第 2100 次迭代，成本值为 0.0663074813227\n",
      "第 2200 次迭代，成本值为 0.0591932950104\n",
      "第 2300 次迭代，成本值为 0.0533614034856\n",
      "第 2400 次迭代，成本值为 0.0485547856288\n"
     ]
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYYAAAEWCAYAAABi5jCmAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzt3XeYVOXZx/HvvZ2yBdillwUBlSIoSxGxpBnRKHZB7AUx\n1hTfaPImMc0kJqaoxAj2BvaOr5rELm0pUkRwRXpb+i59d+/3jzmY2XUXljJ7Znd+n+uai5nnPOfM\nfXYu5jfnOc3cHRERkT2Swi5ARETii4JBREQqUTCIiEglCgYREalEwSAiIpUoGEREpBIFgxwSZrbY\nzL4d0nuXmlmXMN5bpCFSMEi95+5N3X1R2HUAmJmbWdc6eJ98M3vHzLaZ2Wd7C2WL+KOZrQ8efzQz\nq82yzOynQfDueWw3swozyw2mP2Jmu6r0SY7t2kusKRgkrsXTl4yZpYRdQ5TxwEygBfAz4Dkzy6uh\n7yjgTKAPcBRwOnBNbZbl7ncEwdvU3ZsCfwTedfd1UfPfGd3H3csP3WpKGBQMcsiZWZKZ3WpmXwS/\nUJ8xs+ZR0581s9VmttnM3jeznlHTHjGz+8xsopltBb4RtI0xs9fNrMTMppjZYVHzfPUrvRZ9Tzaz\nBcF7/8PM3jOzq2pYj9vN7Dkze8LMtgCXmdkAM5tkZpvMbJWZ3WtmaUH/94NZPwl+OV8QtH/PzGYF\n83xsZkcd5N+3O3AM8Et33+7uzwOzgXNqmOVS4C53X+7uK4A/A5ft77KCrYxLgEcPpn6JfwoGiYUb\niPxCPRFoC2wExkRNfwPoBrQEZgBPVpn/QuB3QCbwYdA2HPgV0AwoCqbXpNq+wfDHc8BtRH4dLwAG\n72NdhgXz5AR1lgM/AHKBY4FvAd8HcPcTgnn6BL+cnzazo4GHiPxCbwHcD7xiZunVvZmZzQ4CpLrH\nP4JuPYFF7l4SNesnQXt1egbTq+u7P8s6nshn9nyV9u+b2QYzm25mNYWT1CMKBomF0cDPgl+oO4Hb\ngXP3DMW4+0PuXhI1rY+ZZUfN/7K7f+TuFe6+I2h70d2nunsZkS/ovnt5/5r6ngrMc/cXgml3A6v3\nsS6T3P2loJbt7j7d3Se7e5m7LybyRX/iXuYfBdzv7lPcvdzdHwV2AoOq6+zuR7l7Tg2P7wfdmgKb\nq8y6hUiQVqdq/y1A02ALYH+WdSnwnLuXRrXdzX9D/ufAI2Z2XA11SD0RT2Om0nB0Al40s4qotnKg\nlZmtJvIL/jwgD9jTJ5f/fkEtq2aZ0V/g24h8odWkpr5to5ft7m5my/e+KpVrCYZe/gIUAI2J/B+a\nvpf5OwGXmtkNUW1pQS0HqhTIqtKWDZRU07e6/tlAabD+tVqWmTUm8pkNi2539xlRLyea2ZPA2cBH\ntVgPiVPaYpBYWAYMrfJrNyMY376QyJfLt4l8AeUH81jU/LG65O8qoP2eF8Ev5vY1d6+2lvuAz4Bu\n7p4F/JTKtVe1DPhdlb9FY3cfX11nM5tX5Qif6Mc/g27zgC5mFv2rvk/QXp15wfTq+tZ2WWcBG4B3\n97KuEPl77e3vIfWAgkFi4Z/A78ysE4CZ5ZnZnl+amUSGUtYT+cV9Rx3W9TrQ28zODIa1rgNa7+cy\nMokMtZSa2RHAtVWmrwGiz6kYB4w2s4EW0cTMTqvyRfwVd+9Z5Qif6MfooM9CYBbwSzPLMLOzgd58\nfex/j8eAH5pZOzNrB/wIeGQ/l3Up8JhXuU6/mZ1rZk2DAw5OBi4CXqmhDqknFAwSC38n8uXwlpmV\nAJOBgcG0x4AlwArg02BanQgOsTwPuJNIMPUACokEVW39mMhWTwmRL/2nq0y/HXg02Fl8vrsXAlcD\n9xLZCV9EcETQQRpOZDhrI/B74Fx3LwYws+ODIaI97gdeBeYEj9eCtn0uK1heO+CbRD67qm4i8llu\nAv4EXO3u7x6C9ZMQmW7UI4nKzJKA5cBId38n7HpE4oW2GCShmNl3zSwnOFx0z/6BOttqEakPFAyS\naI4FvgDWETkD+Ex33x5uSSLxRUNJIiJSibYYRESkknp3gltubq7n5+eHXYaISL0yffr0de5e04UW\nK6l3wZCfn09hYWHYZYiI1CtmtqS2fTWUJCIilSgYRESkEgWDiIhUEtNgMLNTgpuiFJnZrdVMvyW4\ngcksM5trZuUWdUMXERGpezELBovcknEMMJTINWlGmFmP6D7u/id37+vufYncPOU9d98Qq5pERGTf\nYrnFMAAocvdF7r4LmECVa7lXMYLIvWdFRCREsQyGdlS+ycnyoO1rgpuAnEINlw02s1FmVmhmhcXF\nxdV1ERGRQyRedj6fDnxU0zCSu4919wJ3L8jLq9X5GV9TXLKT21+Zx66yin13FhFJYLEMhhVAh6jX\n7YO26gwnxsNIU7/cwCMfL+aW5z6hokLXhxIRqUksg2Ea0M3MOptZGpEv/6/d2Sm4CfyJwMsxrIXT\njmrDLd89nJdnreSPb34Wy7cSEanXYnZJDHcvM7PrgTeBZOAhd59nZntuT7jn/rVnAW+5+9ZY1bLH\n9086jFWbt3P/e4tom92ISwfnx/otRUTqnZheK8ndJwITq7T9s8rrRwjuPxtrZsavzujFmi07uf3V\nebTKSueUXm3q4q1FROqNeNn5XGeSk4y7hx9N3w453DRhFoWLddqEiEi0hAsGgEZpyTx4aX/a5jTi\nykcLKVpbuu+ZREQSREIGA0DzJmk8evkAUpONSx+aytotO8IuSUQkLiRsMAB0bNGYhy8bwMZtu7j8\nkWmU7iwLuyQRkdAldDAA9G6fzZiRx/DZ6hKufWI6u8t1ApyIJLaEDwaAbxzekt+f3ZsPPl/HT56f\njbtOgBORxFXvbu0ZK+cXdGDVph389V8LaZvdiB9/9/CwSxIRCYWCIcqN3+rK6i3bufedItrkZDBy\nYKewSxIRqXMKhihmxm+GRU6A+/lLc2mZmcF3erQKuywRkTqlfQxVpCQnce+FR9O7fQ43jJ+hE+BE\nJOEoGKrROC2FBy8toFVWBuf+cxJnjvmIBz5YxMpN28MuTUQk5qy+HYFTUFDghYWFdfJexSU7eXb6\nMl6fvYp5K7cA0K9TM07r3YZTe7ehdXZGndQhInKwzGy6uxfUqq+CoXa+XLeViXNW8drsVcxftQUz\n6N+pOacd1YahvVrTMkshISLxS8EQY0VrS5k4ZxWvz17FgjUlmMGA/OZ8r09bTunZmrzM9FDrExGp\nSsFQhz5fU8LrwZZE0dpSkgxGDuzEz047kozU5LDLExEBFAyhcHcWrinlySlLeGzSEnq0yWLMyGPo\nnNsk7NJERPYrGHRU0iFiZhzeOpNfD+vFQ5cVsHLzdk6/50Ne/WRl2KWJiOwXBUMMfPOIVky88Xi6\nt2rKDeNn8rMX57Bjd3nYZYmI1IqCIUba5jTi6WuO5ZoTu/DklKWc/Y+P+XJdzG9rLSJy0BQMMZSa\nnMRtQ4/U0JKI1CsKhjqgoSURqU8UDHVEQ0siUl/ENBjM7BQzW2BmRWZ2aw19TjKzWWY2z8zei2U9\nYdPQkojUBzELBjNLBsYAQ4EewAgz61GlTw7wD+AMd+8JnBereuJJdUNLu8p0S1ERiQ+x3GIYABS5\n+yJ33wVMAIZV6XMh8IK7LwVw97UxrCeufDW0dEJkaOmG8TMUDiISF2IZDO2AZVGvlwdt0boDzczs\nXTObbmaXVLcgMxtlZoVmVlhcXByjcuteanISt516JL86oydvzlujcBCRuBD2zucUoB9wGvBd4Odm\n1r1qJ3cf6+4F7l6Ql5dX1zXG3KWD878Kh+ufUjiISLhiGQwrgA5Rr9sHbdGWA2+6+1Z3Xwe8D/SJ\nYU1xa084vPWpwkFEwhXLYJgGdDOzzmaWBgwHXqnS52VgiJmlmFljYCAwP4Y1xTWFg4jEg5gFg7uX\nAdcDbxL5sn/G3eeZ2WgzGx30mQ/8HzAbmAo84O5zY1VTfaBwEJGw6bLbceqxSYv5xcvz+E6PVoy5\n8BjSUsLeHSQi9Zkuu90AXHJsPr8e1pO3P13DddpyEJE6pGCIYwoHEQmDgiHOKRxEpK4pGOqB6HD4\n/pMKBxGJLQVDPbEnHP41X+EgIrGlYKhHosPhuqdmsLtc4SAih56CoZ6JHlb6n+dmU1FRvw43FpH4\nlxJ2AbL/Ljk2n5IdZfzpzQVkZaRw+xk9MbOwyxKRBkLBUE99/6TD2Lx9N2PfX0R2o1R+ePLhYZck\nIg2EgqGeMjNuG3oEm7ft5u7/FJHVKJWrju8Sdlki0gAoGOoxM+OOs3tTsnM3v319PlmNUjm/oMO+\nZxQR2QsFQz2XnGT89YK+lOwo5NbnZ5OVkcIpvdqEXZaI1GM6KqkBSE9J5v6L+9G3Qw43jp/Fh5+v\nC7skEanHFAwNROO0FB6+bABd8pow6vFCZizdGHZJIlJPKRgakOzGqTx25QDyMtO5/OFpLFhdEnZJ\nIlIPKRgamJaZGTxx5UAyUpO4+MEpLF2/LeySRKSeUTA0QB2aN+bxKweyq7yCkQ9OZs2WHWGXJCL1\niIKhgereKpNHLh/AhtJdXPzgFDZt2xV2SSJSTygYGrC+HXIYd0kBi9dv47KHp7F1Z1nYJYlIPaBg\naOAGd83l3hFHM2fFZi5/ZBqlCgcR2QcFQwI4uWdr/npBX6Yv2cilD01ly47dYZckInEspsFgZqeY\n2QIzKzKzW6uZfpKZbTazWcHjF7GsJ5Gd0act9444mk+WbeLiB6eyeZvCQUSqF7NgMLNkYAwwFOgB\njDCzHtV0/cDd+waPX8eqHoGhvdtw30X9mL9yCxc+MJmNW7VDWkS+LpZbDAOAIndf5O67gAnAsBi+\nn9TCd3q04v5L+vH52lJGjJvMutKdYZckInEmlsHQDlgW9Xp50FbVYDObbWZvmFnP6hZkZqPMrNDM\nCouLi2NRa0L5xuEteejS/ixev5URYyeztkTnOYjIf4W983kG0NHdjwLuAV6qrpO7j3X3AncvyMvL\nq9MCG6oh3XJ55PIBrNi0neH3T2b1ZoWDiETEMhhWANE3B2gftH3F3be4e2nwfCKQama5MaxJogzq\n0oLHrhjA2pKdXDB2Eis2bQ+7JBGJA7EMhmlANzPrbGZpwHDglegOZtbagpsVm9mAoJ71MaxJqijI\nb87jVw5gw9ZdXHD/JJZt0LWVRBJdzILB3cuA64E3gfnAM+4+z8xGm9nooNu5wFwz+wS4Gxju7h6r\nmqR6R3dsxlNXDaJkRxnn3z+Jxeu2hl2SiITI6tv3cEFBgRcWFoZdRoP06cotXPTgFFKSjKeuHkTX\nlk3DLklEDhEzm+7uBbXpG/bOZ4kjPdpmMWHUICocho+drPs5iCQoBYNU0r1VJhNGDSLJYMS4ycxf\ntSXskkSkjikY5Gu6tmzK09ccS1pyEiMfmMJnqxUOIolEwSDV6pzbhAmjBpGWnMSF46ZoWEkkgSgY\npEb5uU0YP2oQqcnGheMms3CNwkEkESgYZK865zZh/NWDSE6KhMPnCgeRBk/BIPvUJa9psEPaGKFw\nEGnwFAxSK13ymjJ+1CDMjBHjplC0VuEg0lApGKTWDstryvirB2EGw8dOoWhtadgliUgMKBhkv3Rt\nGQkHiJzn8EWxwkGkoVEwyH7r2rIpE0YNxN0ZMVbhINLQKBjkgHRtmcn4qwdREYTDIoWDSIOhYJAD\n1q1VJk9dPYjyCmfEuMl8qauyijQICgY5KN2DcCgrj2w56JLdIvWfgkEO2uGtI+Gwq7yC8++fxIyl\nG8MuSUQOgoJBDonDW0euypqemsTw+yczYerSsEsSkQOkYJBDpnurTF69fggDuzTn1hfm8L8vzWFX\nWUXYZYnIflIwyCGV0ziNRy4fwDUnduGJyUu5cNxk1pbsCLssEdkPCgY55JKTjNuGHsk9I45m3sot\nnH7Ph8zUfgeRekPBIDFzep+2PH/tYNJSkrjg/sk8M21Z2CWJSC0oGCSmerTN4pXrIvsd/uf52drv\nIFIPKBgk5po1SePhy/pzzQmR/Q4jH9B+B5F4FtNgMLNTzGyBmRWZ2a176dffzMrM7NxY1iPhSUlO\n4rZTj+TuEUczZ8VmzrjnI2Yt2xR2WSJSjZgFg5klA2OAoUAPYISZ9aih3x+Bt2JVi8SPM/q05YVr\njyMl2Tj/n5O030EkDsVyi2EAUOTui9x9FzABGFZNvxuA54G1MaxF4kiPtlm8ev0Q+nduxv88P5vf\nvzEfdw+7LBEJxDIY2gHRPweXB21fMbN2wFnAfXtbkJmNMrNCMyssLi4+5IVK3WvWJI1HLx/AyIEd\nuf+9Rfzl7YVhlyQigVoFg5mdV5u2A/A34CfuvtfDVNx9rLsXuHtBXl7eIXhbiQcpyUn8ZlgvRgzo\nwD3/KeKef38edkkiAqTUst9twLO1aIu2AugQ9bp90BatAJhgZgC5wKlmVubuL9WyLqnnkpKM353Z\nm527K7jr7YWkpyYx6oTDwi5LJKHtNRjMbChwKtDOzO6OmpQFlO1j2dOAbmbWmUggDAcujO7g7p2j\n3usR4DWFQuJJSjLuPPcodpZXcMfEz0hPSebSwflhlyWSsPa1xbASKATOAKZHtZcAP9jbjO5eZmbX\nA28CycBD7j7PzEYH0/95wFVLg5OSnMTfLujLrrIKfvnKPNJTkhg+oGPYZYkkJKvN0SBmluruu4Pn\nzYAO7j471sVVp6CgwAsLC8N4a6kDO8vKuebx6by3sJi7zuvD2ce0D7skkQbBzKa7e0Ft+tb2qKS3\nzSzLzJoDM4BxZvbXA65QpAbpKcn886J+HNulBT9+9hNem70y7JJEEk5tgyHb3bcAZwOPuftA4Fux\nK0sSWUZqMg9cWkC/Ts24acIs3pq3OuySRBJKbYMhxczaAOcDr8WwHhEAGqel8NBl/endLpvrnprB\nOwt0/qNIXaltMPyayE7kL9x9mpl1AXTQucRUZkYqj14xgO6tMhn9+HQ+KloXdkkiCaFWweDuz7r7\nUe5+bfB6kbufE9vSRCC7USqPXzmQ/BZNuOrRQqZ+uSHskkQavNqe+dzezF40s7XB43kz0+EiUiea\nN0njiasG0iYng8sfnqq7wYnEWG2Hkh4GXgHaBo9XgzaROpGXmc5TVw0iNzOdSx6ayuOTFrN9V3nY\nZYk0SLUNhjx3f9jdy4LHI4AuWiR1qnV2Bk9dPYiuLZvy85fnMfgP/+autxZQXLIz7NJEGpTaBsN6\nM7vIzJKDx0XA+lgWJlKddjmNeOHawTw7+lj65zfn3neKOO4P/+Enz83m8zUlYZcn0iDU9sznTsA9\nwLGAAx8DN7h7nd9lRWc+S7Qv123lwQ8X8dz05ezYXcE3Ds/j6uO7cOxhLQguzigi7N+Zz7UNhkeB\nm919Y/C6OfBnd7/ioCo9AAoGqc6Grbt4YvISHpu0mHWlu+jZNourj+/CaUe1ITVZtzYXiUUwzHT3\no/fVVhcUDLI3O3aX8/KsFYz74EuK1pbSJjuDy4/LZ/iAjmRlpIZdnkhoYnGtpKTg4nl73qA5tb+X\ng0idyUhN5oL+HXnr5hN46LIC8ls04Y6Jn3Hine8w6QvtFhOpjdoGw13AJDP7jZn9hsg+hjtjV5bI\nwUlKMr55RCvGjxrEq9cPoUXTdC5+cApPTlkSdmkica+2Zz4/RuQCemuCx9nu/ngsCxM5VHq3z+aF\n7w9mSLdcfvbiXH7x8lx2l+/1brIiCa3Ww0Hu/inwaQxrEYmZrIxUHry0P3/8v88Y+/4iviguZcyF\nx5DTOC3s0kTijg7XkISRnGT89NQj+fN5fZj25UbOHPMRRWtLwy5LJO4oGCThnNuvPeNHDaR0Zxln\njflIl/QWqULBIAmpX6fmvHz9ENo3b8yVj0zjgQ8WUZtDt0USgYJBEla7nEY8f+2xfLdna377+nxu\neW42O8t0YT4RBYMktMZpKYy58Bhu/FY3npu+nAvHTdFF+SThKRgk4SUlGT/8TnfGXHgM81ZuZti9\nHzJv5eawyxIJTUyDwcxOMbMFZlZkZrdWM32Ymc02s1lmVmhmQ2JZj8jenHZUG54bPRgHzr1vEq/N\nXhl2SSKhiFkwmFkyMAYYCvQARphZjyrd/g30cfe+wBXAA7GqR6Q2erXL5uXrj+PINplc/9RMfvbi\nHHbs1n4HSSyx3GIYABQF94feBUwAhkV3cPdS/++hIE2IXNJbJFQtMzN4+ppjuebELjw5ZSnD7v1I\n93qQhBLLYGgHRN+vYXnQVomZnWVmnwGvE9lq+BozGxUMNRUWFxfHpFiRaKnJSdw29EgevWIA60p3\ncvq9H/L0tKU6pFUSQug7n939RXc/AjgT+E0Nfca6e4G7F+Tl6Y6iUndO7J7HGzcdT79OzfjJ83O4\nccIstuzYHXZZIjEVy2BYAXSIet0+aKuWu78PdDGz3BjWJLLfWmZl8PgVA7nlu4czcc4qvnf3h3yy\nbFPYZYnETCyDYRrQzcw6m1kaMBx4JbqDmXW14P6LZnYMkI7uJS1xKCnJuO4bXXnmmkGUVzjn3Pcx\n495fREWFhpak4YlZMLh7GXA98CYwH3jG3eeZ2WgzGx10OweYa2aziBzBdIFrEFfiWL9OzZl44/F8\n+8hW/G7ifK54dBrrSnVCnDQstbq1ZzzRrT0lHrg7T0xZym9e+5ScRqn87YK+DO6qUVCJX7G4taeI\nRDEzLh7UiZevO47MjBRGPjiFu95aQJluACQNgIJB5CAc2SaLV28Ywnn92nPPf4o4576PKVqrcx6k\nflMwiBykxmkp3HluH/4x8hiWbtjGqXd/yAMfLKJcO6alnlIwiBwip/Zuw1s/OJETu+fx29fnM2Ls\nZJas3xp2WSL7TcEgcgjlZaYz9uJ+3HVeH+av3sLQv3/AE5OX6IxpqVcUDCKHmJlxTr/2vHnzCfTr\n1Iz/fWkulzw0lZWbtoddmkitKBhEYqRtTiMeu2IAvz2zF9OXbOS7f3uf56cv19aDxD0Fg0gMmRkX\nDerEGzcdz5Gts/jRs58w6vHpukucxDUFg0gd6NSiCeNHDeJ/TzuS9xYWc/Jf3+P12avCLkukWgoG\nkTqSnGRcdXwXJt44hA7NG3PdUzO4ecJMduukOIkzCgaROta1ZSYvXDuYm7/djZdmreTnL83VfgeJ\nKylhFyCSiFKSk7j5293ZXV7BmHe+ID+3CaNPPCzsskQABYNIqH70ncNZsn4bf3jjMzo1b8zQ3m3C\nLklEQ0kiYUpKMv58Xh+O6ZjDzU/PYpZuACRxQMEgErKM1GTGXVJAy6x0rnq0kOUbt4VdkiQ4BYNI\nHGjRNJ2HL+vPzrJyrnhkmu4rLaFSMIjEia4tM7n/on4sKt7KdU/O0GGsEhoFg0gcGdw1lzvO6s0H\nn6/jl6/M02GsEgodlSQSZ87v34Ev12/lvne/oHOLJlx9QpewS5IEo2AQiUO3nHw4S9dv44435tOh\neWNO6dU67JIkgWgoSSQOJSUZd53fhz7tc7j56Zl8osNYpQ7FNBjM7BQzW2BmRWZ2azXTR5rZbDOb\nY2Yfm1mfWNYjUp/sOYw1t2k6Vz1WyArdz0HqSMyCwcySgTHAUKAHMMLMelTp9iVworv3Bn4DjI1V\nPSL1UV5m5DDWHbvLueLhaZToMFapA7HcYhgAFLn7InffBUwAhkV3cPeP3X1j8HIy0D6G9YjUS91a\nZXLfyH58UVzK9U/NpEyHsUqMxTIY2gHLol4vD9pqciXwRnUTzGyUmRWaWWFxcfEhLFGkfhjSLZff\nntmL9xYWc/1TM1mzZUfYJUkDFhdHJZnZN4gEw5Dqprv7WIJhpoKCAh3YLQlp+ICObN6+m7veWsj7\nnxdz/Te7csVxnclITQ67NGlgYrnFsALoEPW6fdBWiZkdBTwADHP39TGsR6Teu+bEw3j7hydwXNdc\n7vy/BZz81/d5a95qnQgnh1Qsg2Ea0M3MOptZGjAceCW6g5l1BF4ALnb3hTGsRaTB6NSiCeMuKeDx\nKweQnpLEqMenc8lDU/l8TUnYpUkDEbNgcPcy4HrgTWA+8Iy7zzOz0WY2Ouj2C6AF8A8zm2VmhbGq\nR6ShOb5bHhNvOp5fnt6DT5Zt4pS/f8Dtr8xj8zYduSQHx+rbJmhBQYEXFio/RKKtL93JX95eyPip\nS8lulMqPv3s4w/t3JDnJwi5N4oSZTXf3gtr01ZnPIg1Ai6bp/O6s3rx6wxC6tcrkZy/O5Xv3fMiU\nRdptJ/tPWwwiDYy7M3HOau6YOJ8Vm7ZzWu82nNA9l9ym6eQ2TadF0zRym6braKYEsz9bDHFxuKqI\nHDpmxmlHteGbR7Rk7PuLuO+9Il6fs+pr/TLTU74KiejAyM1M58jWmRTkNw+heokH2mIQaeB2lpVT\nXLKTdaW7WF+6k3WlkeeRtp2sL90VtO1kY9SO67EX9+Pknrqqa0OhLQYR+Up6SjLtmzWmfbPG++y7\nu7yC9aW7uOqxafzk+dn06ZBDq6yMOqhS4ol2PovIV1KTk2idncHfhx/Njt0V/PCZWVRU1K9RBTl4\nCgYR+ZrD8pryi9N78FHReh74cFHY5UgdUzCISLWG9+/AKT1b86c3FzB3xeawy5E6pGAQkWqZGX84\npzctmqRz4/iZbNtVFnZJUkcUDCJSo5zGafzlgj58uX4rv3nt07DLkTqiYBCRvRp8WC6jTzyM8VOX\n8UY150NIw6NgEJF9+sG3u3NU+2xufWEOqzbr3tMNnYJBRPYpLSWJvw8/mt3lFfzw6U8o1yGsDZqC\nQURqpXNuE24/oyeTFq3n/ve/CLsciSEFg4jU2nn92nNa7zb85a2FfLJsU9jlSIwoGESk1syMO87q\nTcvMdG6aMJOtO3UIa0OkYBCR/ZLdOJW/XNCXJRu2cfsr88IuR2JAwSAi+21QlxZcd1JXnp2+nNdn\n6xDWhkbBICIH5KZvd6Nvhxxue2E2KzbpENaGRMEgIgckNTmJvw/vS3mF84MJs3QIawOiYBCRA9ap\nRRN+PawXUxdv4Ocvz2Xtlh1hlySHgG7UIyIH5exj2jFz2UaemLyUZ6Yt4zs9WjFyYCcGH9aCpCQL\nuzw5ADHdYjCzU8xsgZkVmdmt1Uw/wswmmdlOM/txLGsRkdgwM357Zm/e+fFJXDGkM5MXreeiB6fw\nzbveZexKdlCUAAAMVklEQVT7X7Bh666wS5T9FLN7PptZMrAQ+A6wHJgGjHD3T6P6tAQ6AWcCG939\nz/taru75LBLfduwu5815q3ly8lKmLt5AWnISQ3u3ZuTATvTPb4aZtiLCEC/3fB4AFLn7oqCoCcAw\n4KtgcPe1wFozOy2GdYhIHcpITWZY33YM69uOhWtKeGrKUp6fsZyXZ62kW8umjBzYkbOOaU92o9Sw\nS5UaxHIoqR2wLOr18qBtv5nZKDMrNLPC4uLiQ1KciMRe91aZ3H5GT6b89Fvcec5RNE5L5vZXP2Xg\nHf/ilmc/4aOidewurwi7TKmiXux8dvexwFiIDCWFXI6I7KfGaSmc378D5/fvwNwVm3lyylJenrWC\nZ6cvJysjhW8e0ZKTe7bmhO55NE2vF19LDVosP4EVQIeo1+2DNhFJYL3aZfP7s3vzi+/14IPPi3nr\n0zX8e/4aXpq1krSUJI47rAUn92zNt45sScvMjLDLTUixDIZpQDcz60wkEIYDF8bw/USkHmmUlszJ\nPVtzcs/WlJVXULhkI29/uoa3Pl3NOy/MwQyO7pAT6dOjFV3ymoZdcsKI2VFJAGZ2KvA3IBl4yN1/\nZ2ajAdz9n2bWGigEsoAKoBTo4e5balqmjkoSadjcnQVrSnhrXiQk5q6IfB0clteEb/doRf9Ozenb\nMYfcpukhV1q/7M9RSTENhlhQMIgklhWbtvOvYEtiyqINlAWX3ujYvDFHd8yhb4ccju7YjB5tskhL\n0cUcaqJgEJEGacfucuau2MzMpZuYuWwjM5duYtXmyGU40lKS6NU2i6M7NuPojpGwaJudofMmAgoG\nEUkYqzZvZ9bSTcxctomZSzcye/lmdpZFDoFtmZlOnw459GqbTa92WfRql02rrMTcoR0vJ7iJiMRc\nm+xGtOndiKG92wCwu7yCz1aVMHPZRmYs2cjsFZv51/w17PkNnJeZTq+2kZDoGQRGu5xG2rKIomAQ\nkQYlNTmJ3u2z6d0+m0uOzQegdGcZ81dtYe6KzcxdsYV5Kzfz/ufrvrpUeE7jVHq1zaZnuyx6tc3m\nyDaZ5LdoQkpyYu6zUDCISIPXND2F/vnN6Z/f/Ku2HbvL+Wx1CXNXbGbeykhgPPzhYnYFZ2KnJSfR\nJa8Jh7fOpHurTI4I/m2X06jBXzVWwSAiCSkjNZm+HSJHNe2xq6yChWtKWLimhAVrSli4uoTCxRt5\nedbKr/o0SUumW6tMDm+VSffWwb+tmpKXmd5ghqMUDCIigbSUJHq1y6ZXu+xK7Vt27ObzNaWRwFgd\nCY5/f7aGpwv/ezm4xmnJdGrRhPwWjcnPjfwbed2Elpnp9WorQ8EgIrIPWRmp9OvUjH6dmlVqX1e6\nk4VrSvh8TSmL129lyfptLFhTwr/mr2F3+X+P+MxITaJT8yZ0CkKjU4vGdGzemLY5jWib3YhGacl1\nvUp7pWAQETlAuU3TyW2azuDDciu1l1c4KzdtZ/H6rSxev40l6yL/frluK+8uLGZXWeUryjZvkkab\n7Aza5jSiXU4j2uZk0Ca70Vev8zLTSa7DLQ4Fg4jIIZacZHRo3pgOzRtzfLfK0yoqnNVbdrB0wzZW\nbd7Oyk07WLlpOys3bWfp+m1M/mI9JTvLKs2TkmS0ysrgssH5XH1Cl5jXr2AQEalDSUkWGULKaVRj\nny07drNqT2Bs3h4Exw5aZtXN9aEUDCIicSYrI5Ws1qkc3jozlPdPzLM3RESkRgoGERGpRMEgIiKV\nKBhERKQSBYOIiFSiYBARkUoUDCIiUomCQUREKql3t/Y0s2JgyQHOngusO4Tl1DeJvP6JvO6Q2Ouv\ndY/o5O55tZmp3gXDwTCzwtre87QhSuT1T+R1h8Ref637/q+7hpJERKQSBYOIiFSSaMEwNuwCQpbI\n65/I6w6Jvf5a9/2UUPsYRERk3xJti0FERPZBwSAiIpUkTDCY2SlmtsDMiszs1rDrqUtmttjM5pjZ\nLDMrDLueWDOzh8xsrZnNjWprbmZvm9nnwb/N9raM+qqGdb/dzFYEn/8sMzs1zBpjxcw6mNk7Zvap\nmc0zs5uC9kT57Gta//3+/BNiH4OZJQMLge8Ay4FpwAh3/zTUwuqImS0GCtw9IU7yMbMTgFLgMXfv\nFbTdCWxw9z8EPwyauftPwqwzFmpY99uBUnf/c5i1xZqZtQHauPsMM8sEpgNnApeRGJ99Tet/Pvv5\n+SfKFsMAoMjdF7n7LmACMCzkmiRG3P19YEOV5mHAo8HzR4n8h2lwalj3hODuq9x9RvC8BJgPtCNx\nPvua1n+/JUowtAOWRb1ezgH+weopB/5lZtPNbFTYxYSklbuvCp6vBlqFWUwIbjCz2cFQU4McSolm\nZvnA0cAUEvCzr7L+sJ+ff6IEQ6Ib4u59gaHAdcFwQ8LyyPhpwx9D/a/7gC5AX2AVcFe45cSWmTUF\nngdudvct0dMS4bOvZv33+/NPlGBYAXSIet0+aEsI7r4i+Hct8CKRobVEsyYYg90zFrs25HrqjLuv\ncfdyd68AxtGAP38zSyXypfiku78QNCfMZ1/d+h/I558owTAN6GZmnc0sDRgOvBJyTXXCzJoEO6Iw\nsybAycDcvc/VIL0CXBo8vxR4OcRa6tSeL8XAWTTQz9/MDHgQmO/uf4malBCffU3rfyCff0IclQQQ\nHKL1NyAZeMjdfxdySXXCzLoQ2UoASAGeaujrbmbjgZOIXHJ4DfBL4CXgGaAjkcu2n+/uDW4nbQ3r\nfhKRYQQHFgPXRI25NxhmNgT4AJgDVATNPyUyzp4In31N6z+C/fz8EyYYRESkdhJlKElERGpJwSAi\nIpUoGEREpBIFg4iIVKJgEBGRShQMElfM7OPg33wzu/AQL/un1b3XIX6Pm83skhgsN8fMvh+D5X7P\nzH59qJcr9ZsOV5W4ZGYnAT929+8dgmUZYMAWd296sMvby/ukADOAY9y9rLbz1KZvcO2b1/ZcMfVQ\nCf42M4Dj3H3boVy21F/aYpC4YmalwdM/AMcH14//gZklm9mfzGxacDGwa4L+Tc3s32Y2I7jnxLCg\nPT+4/8ZjRM70fBBoFCzvyej3sog/mdncYBkXBO0nmdm7ZvacmX1mZk8GX6SY2R+C697PNrM9lzP+\nJjBjzxd9MO/fg/eca2YDgvbbzexxM/sIeLymdaviD8BhwbL+FCznlqh5fhW13vPNbJxFrsn/lpk1\nCqbdGFXzBPjq2kHvAgcdwNKAuLseesTNg8h14yFytu5rUe2jgP8NnqcDhUBnImdzZwXtuUARka2D\nfCJnfw6quuxq3usc4G0iZ8W3ApYCbYIaNhO5tlYSMAkYArQAFvDfLe6c4N9fATdELf9dYFzw/ARg\nbvD8diLXym+0t3WrUmv+nvmD1ycTudG7BbW9FrxHPlAG9A36PQNcFDxfCaRH1xw8HwncE/Znr0f8\nPLTFIPXFycAlZjaLyCUOWgDdiHwx3mFms4F/Ebmc+p7LKi9x98m1WPYQYLxHLjS2BngP6B9Mm+ru\nyz1yAbJZRL54NwM7gAfN7GxgzxBMG6C4yrLHw1f3Scgys5yg/RV3376PddvX3+NkYCaRoaAjoub5\n0t1nBc+nBzUDzAaeNLOLiITHHmuBtvt4P0kgKWEXIFJLRuTX+JuVGs0uA/KAfu6+2yJ3q8sIJm89\nBO+7M+p5OZDi7mXBsNC3gHOB64kMI22Peu89qu7E2/M6urZq120fDPi9u99fqTGyL6JqzY2C56cR\n2ao4HfiZmfX2yLBXRlC7CKB9DBK/SoDMqNdvAtcGlxXGzLoHV4vNBtYGofANoNNelrl7z/xVfABc\nEIz15xH58pxa00Iscr37bHefCPwA6BNMmg90rdJ9z/6KIcBmd99czSJrWrdo1f09rghqwczamVnL\nvdScBHRw93eAnxD5u+3ZEd+dBnrFVTkw2mKQeDUbKDezT4BHgL8TGRKZEewALiZyi8YngVfNbA6R\nsfnP9rLMscBsM5vh7iOj2l8EjgU+IfKL/n/cfbWZHVHDcjKBl80sg8gv9x8G7W8Aj1fpu8PMZgKp\nwBU1LO+BGtbtK+6+3sw+MrO5wBvufouZHQlMCvaHlwIXEdlCqE4y8ISZZQc13+3um4Jp3wBuq2E+\nSUA6XFXkEDKzF4kEy+dm9i6RQ24LQy6rRmbWisil2L8Vdi0SPzSUJHJo3UpkJ3R90RH4UdhFSHzR\nFoOIiFSiLQYREalEwSAiIpUoGEREpBIFg4iIVKJgEBGRSv4f75+WdNwM1goAAAAASUVORK5CYII=\n",
      "text/plain": [
       "<matplotlib.figure.Figure at 0x1ce4bb12dd8>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "n_x = 12288\n",
    "n_h = 7\n",
    "n_y = 1\n",
    "layers_dims = (n_x,n_h,n_y)\n",
    "\n",
    "parameters = two_layer_model(train_x, train_set_y, layers_dims = (n_x, n_h, n_y), num_iterations = 2500, print_cost=True,isPlot=True)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 28,
   "metadata": {},
   "outputs": [],
   "source": [
    "def predict(X, y, parameters):\n",
    "    \"\"\"\n",
    "    预测神经网络结果\n",
    "    \n",
    "    参数：\n",
    "        X：测试集\n",
    "        y：标签\n",
    "        parameters：训练模型的参数\n",
    "    \n",
    "    返回：\n",
    "        p：给定数据集X的预测\n",
    "    \"\"\"\n",
    "    \n",
    "    m = X.shape[1]\n",
    "    n = len(parameters) // 2\n",
    "    p = np.zeros((1, m))\n",
    "    \n",
    "    probas, caches = L_model_forward(X, parameters)\n",
    "    \n",
    "    for i in range(0, probas.shape[1]):\n",
    "        if probas[0, i] > 0.5:\n",
    "            p[0, i] = 1\n",
    "        else:\n",
    "            p[0, i] = 0\n",
    "            \n",
    "    print(\"准确度为：\" + str(float(np.sum((p==y))/m)))\n",
    "    \n",
    "    return p"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 29,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "准确度为：1.0\n",
      "准确度为：0.72\n"
     ]
    }
   ],
   "source": [
    "predictions_train = predict(train_x, train_y, parameters)\n",
    "predictions_test = predict(test_x, test_y, parameters)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 30,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "def L_layer_model(X, Y, layers_dims, learning_rate=0.0075, num_iterations=3000, print_cost=False, isPlot=True):\n",
    "    \"\"\"\n",
    "    实现一个L层神经网络：[LINEAR-> RELU] *（L-1） - > LINEAR-> SIGMOID。\n",
    "\n",
    "    参数：\n",
    "        X - 输入的数据，维度为(n_x，例子数)\n",
    "        Y - 标签，向量，0为非猫，1为猫，维度为(1,数量)\n",
    "        layers_dims - 层数的向量，维度为(n_y,n_h,···,n_h,n_y)\n",
    "        learning_rate - 学习率\n",
    "        num_iterations - 迭代的次数\n",
    "        print_cost - 是否打印成本值，每100次打印一次\n",
    "        isPlot - 是否绘制出误差值的图谱\n",
    "\n",
    "    返回：\n",
    "     parameters - 模型学习的参数。 然后他们可以用来预测。\n",
    "    \"\"\"\n",
    "    np.random.seed(1)\n",
    "    costs = []\n",
    "    \n",
    "    # 初始化参数\n",
    "    parameters = initialize_parameters_deep(layers_dims)\n",
    "    \n",
    "    for i in range(0, num_iterations):\n",
    "        # 前向传播\n",
    "        AL, caches = L_model_forward(X, parameters)\n",
    "        \n",
    "        # 计算损失\n",
    "        cost = compute_cost(AL, Y)\n",
    "        \n",
    "        # 后向传播\n",
    "        grads = L_model_backward(AL, Y, caches)\n",
    "        \n",
    "        # 参数更新\n",
    "        parameters = update_parameters(parameters, grads, learning_rate)\n",
    "        \n",
    "        # 打印成本值\n",
    "        if i % 100 == 0:\n",
    "            costs.append(cost)\n",
    "            if print_cost:\n",
    "                print(\"第\", i, \"次迭代，成本值为\", np.squeeze(cost))\n",
    "        \n",
    "    # 绘图\n",
    "    if isPlot:\n",
    "        plt.plot(np.squeeze(costs))\n",
    "        plt.ylabel('cost')\n",
    "        plt.xlabel('iterations(per tens)')\n",
    "        plt.title(\"learning rate=\" + str(learning_rate))\n",
    "        plt.show()\n",
    "    return parameters"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 31,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "train_set_x_orig , train_set_y , test_set_x_orig , test_set_y , classes = lr_utils.load_dataset()\n",
    "\n",
    "train_x_flatten = train_set_x_orig.reshape(train_set_x_orig.shape[0], -1).T \n",
    "test_x_flatten = test_set_x_orig.reshape(test_set_x_orig.shape[0], -1).T\n",
    "\n",
    "train_x = train_x_flatten / 255\n",
    "train_y = train_set_y\n",
    "test_x = test_x_flatten / 255\n",
    "test_y = test_set_y"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 32,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "第 0 次迭代，成本值为 0.715731513414\n",
      "第 100 次迭代，成本值为 0.674737759347\n",
      "第 200 次迭代，成本值为 0.660336543362\n",
      "第 300 次迭代，成本值为 0.646288780215\n",
      "第 400 次迭代，成本值为 0.629813121693\n",
      "第 500 次迭代，成本值为 0.606005622927\n",
      "第 600 次迭代，成本值为 0.569004126398\n",
      "第 700 次迭代，成本值为 0.519796535044\n",
      "第 800 次迭代，成本值为 0.464157167863\n",
      "第 900 次迭代，成本值为 0.408420300483\n",
      "第 1000 次迭代，成本值为 0.373154992161\n",
      "第 1100 次迭代，成本值为 0.30572374573\n",
      "第 1200 次迭代，成本值为 0.268101528477\n",
      "第 1300 次迭代，成本值为 0.238724748277\n",
      "第 1400 次迭代，成本值为 0.206322632579\n",
      "第 1500 次迭代，成本值为 0.179438869275\n",
      "第 1600 次迭代，成本值为 0.157987358188\n",
      "第 1700 次迭代，成本值为 0.142404130123\n",
      "第 1800 次迭代，成本值为 0.128651659979\n",
      "第 1900 次迭代，成本值为 0.112443149982\n",
      "第 2000 次迭代，成本值为 0.0850563103497\n",
      "第 2100 次迭代，成本值为 0.0575839119861\n",
      "第 2200 次迭代，成本值为 0.044567534547\n",
      "第 2300 次迭代，成本值为 0.038082751666\n",
      "第 2400 次迭代，成本值为 0.0344107490184\n"
     ]
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYYAAAEWCAYAAABi5jCmAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzt3Xl4VPXZ//H3nYQECDsJO8iqCLIHFNyXKlgUN1Tq3qcK\nrq22fWr71F+1Pm1tre3jjrhWa0Xc0Yq41JVFCItAWJRFWSQQQJYEQrb798cc7CRNIEEmZ5L5vK5r\nLmbO+Z4z98lczGfO95zzPebuiIiI7JMUdgEiIhJfFAwiIlKOgkFERMpRMIiISDkKBhERKUfBICIi\n5SgY5JAysy/N7LSQ3jvfzLqH8d4i9YmCQeoNd2/i7qvDrgPAzNzMesZgva3M7BUzKzCzr8zsBwdo\nf7OZ5ZrZTjN7wszSqrMuM7skCNp9j93BNg0J5t9uZsUV2iiU6wkFg9QJZpYcdg37mFlKiG//IFAE\ntAUuAR42s76VNTSzM4BbgVOBw4DuwB3VWZe7PxsEbRN3bwJcB6wG5kct/3x0m3gJZfnuFAwSM2aW\nZGa3mtkqM9tqZlPMrFXU/BeCX7M7zOyj6C84M3vKzB42szfNrAA4OZj2oJn908x2mdmnZtYjaplv\nf6VXo+3pZrYieO+HzOxDM/tRFdtxu5m9aGZ/N7OdwJVmNszMZpnZdjPbaGYPmFlq0P6jYNHPgl/S\nFwXTR5vZwmCZmWbWv4Z/z3TgfOA2d89390+A14DLqljkCuBxd89x92+A3wJXfod1Pe0aKiEhKBgk\nlm4EzgFOBDoA3xD5lbrPNKAX0IbIL9FnKyz/A+B3QFPgk2DaxUR+9bYEVgbzq1JpWzPLAF4Efgm0\nBlYAIw6wLWOCZVoEdZYCNwMZwHAiv8qvA3D3E4JlBgS/pJ83s0HAE8D44D0fAabu69oxszeCwKjs\n8UawvsOBEnf/PKquz4BK9xiC6Z9VaNvWzFrXZF1mdhhwAvB0hVlnmdk2M8sxs2urqEHqIAWDxNIE\n4H/cfb277wVuBy7Y1xXj7k+4+66oeQPMrHnU8q+5+wx3L3P3wmDaK+4+x91LiHxBD9zP+1fV9kwg\nx91fDubdB+QeYFtmufurQS173H2eu8929xJ3/5LIF/2J+1n+GuARd//U3Uvd/W/AXuCY4G8x2t1b\nVPEYHayjCbCzwnp3EgnOyjQBdlRoS9C+Juu6HPjY3ddETZsCHAlkAlcD/8/MxlVRh9QxCgaJpcOA\nV/b98gWWEfml3dbMks3srqCbaSfwZbBMRtTy6ypZZ/QX+G4iX3BVqapth+h1B90j6w+wLeVqMbPD\ng1/5uUH9v69Qe0WHAT+N3hMAOge1VFc+0KzCtObArmq23xe6u2q4rsuBv0VPcPel7v51EHIzgXuB\nCw64BVInKBgkltYBoyr8+m3o7huIdBONAU4j8oXUNVjGopaPVX/2RqDTvhdmZtGvq1CxloeB5UAv\nd28G/IrytVe0Dvhdhb9FY3d/LqhhWoUzfKIf04J1fA6kmFmvqPUOAHKqeM+cYH50203uvrW66zKz\nY4mE14v72TaI/H32t/1ShygYJJYmAr8L+qgxs0wzGxPMa0qkK2Ur0JjIL+7a8k+gn5mdE3RrXQ+0\nq+E6mhLpesk3s95AxT72TUTOAtrnUWCCmR1tEelm9n0zawrg7qMqnOET/RgVtCkAXgZ+Gyx/HHA2\n8EwVNT4N/JeZ9TGzlsBtwFM1XNcVwEvuXm5PwszGmFnLYFuGAT8mcvBa6gEFg8TSvcBU4G0z2wXM\nBo4O5j0NfAVsAJYG82qFu28BxgJ/IhJMfYBsIkFVXT8jstezi8iX/vMV5t8O/C3oNrrQ3bOJ9MU/\nQOQg/EqCM4Rq6DqgEbAZ+AdwrbvnAJhZl2APowuAu78VbOP7RP7Wa4DfVGddwfoaAhdSoRspcHGw\nDbuIfJZ3BcdNpB4wnX0mic7MkogcY7jE3d8Pux6RsGmPQRKSmZ1hZi2C00X3HR+otb0WkXimYJBE\nNRxYBWwBzgLOcfc94ZYkEh/UlSQiIuVoj0FERMoJczCwg5KRkeFdu3YNuwwRkTpl3rx5W9w9szpt\n61wwdO3alezs7LDLEBGpU8zsq+q2VVeSiIiUo2AQEZFyFAwiIlKOgkFERMpRMIiISDkKBhERKUfB\nICIi5SRMMGzN38sdr+dQWFwadikiInEtYYJh1uqtPDnjS3741Fzy95aEXY6ISNxKmGAY3b8Df71o\nAJ+u2calj33K9t1FYZckIhKXYhoMZjbSzFaY2Uozu7WS+T83s4XBY4mZlZpZq1jVc+6gTky8dAhL\nN+7kokdms3lnYazeSkSkzopZMJhZMvAgMIrIrRPHmVmf6Dbufre7D3T3gcAvgQ/dfVusagL4Xp+2\nPHXlUNZ9s5uxj8xi3bbdsXw7EZE6J5Z7DMOAle6+2t2LgMnAmP20Hwc8F8N6vjWiZwbP/uhotu8u\nZuzEWazcvOvAC4mIJIhYBkNHYF3U6/XBtP9gZo2BkcBLVcy/xsyyzSw7Ly/vkBQ3qEtLnh9/DKXu\njJ04i8XrdxyS9YqI1HXxcvD5LGBGVd1I7j7J3bPcPSszs1rDiVdL73bNeGH8cBqnpjDu0dl8unrr\nIVu3iEhdFctg2AB0jnrdKZhWmYuppW6kirpmpPPitcNp2yyNy5+Yw/vLN4dRhohI3IhlMMwFeplZ\nNzNLJfLlP7ViIzNrDpwIvBbDWvarffNGTBk/nF5tm3D109m8/tnXYZUiIhK6mAWDu5cANwDTgWXA\nFHfPMbMJZjYhqum5wNvuXhCrWqqjdZM0/nH1MQzu0pKbJi/guTlrwyxHRCQ05u5h11AjWVlZHstb\ne+4pKuXaZ+fxwYo8fnVmb645oUfM3ktEpLaY2Tx3z6pO23g5+Bw3GqUmM+myLEb3b8/v31zOL19e\nzJINO6hrASoicrBSwi4gHqWmJHHvxYPIaJLGM7O/4rk5a+nSqjGj+rXjzKPa079Tc8ws7DJFRGJC\nXUkH8E1BEW8vzeXNxbnMWLmFkjKnY4tGnNmvHaP6tWdQ5xYKCRGJezXpSlIw1MCO3cW8vTSXaUty\n+fiLPIpLnQ7NGzLyqPac2a8dg7u0JClJISEi8UfBUAt27CnmvWWbeHNxLh99nkdRaRltm6Ux6qj2\njDyqHUO7tiJZISEicULBUMt2FRbzr+WbeXPxRj5YkcfekjJap6dyet+2nNG3HSN6ZJCaouP8IhIe\nBUOICvaW8MGKPN7KyeVfyzZRUFRK04YpnHZkJCROPDyTRqnJYZcpIglGwRAnCotLmbFyC28tyeWd\nZZvYvruYRg2SOemITEYe1Y5TerehacMGYZcpIgmgJsGg01VjqGGDZE49si2nHtmWktIyPl2zjbeW\n5DI9J3IAOzU5iWN7to4cl+jXjmYKCRGJA9pjCEFZmbNg3XbeWrKRt3JyWbdtD2kpSYw8qh1jh3Rm\neI/WOnAtIoeUupLqEHdn0fodvDhvPa8t3MDOwhI6NG/IeYM7ccGQTnTNSA+7RBGpBxQMdVRhcSnv\nLtvEC9nr+fiLPMochnZtydghnTmzf3uapKnnT0QOjoKhHsjdUcjLC9bz4rz1rM4roFGDZEb1a8cF\nQzpxTLfWupBORGpEwVCPuDvz127nxXnreeOzr9m1t4ROLRsxblgXrhzRlXTtRYhINSgY6qk9RaW8\nvTSXKdnrmLFyKxlN0rj5e724KKszKcm6gE5EqqZgSADzvtrGH95cTvZX39A9M51fjOzN6X3aakA/\nEamU7seQAIYc1ooXJgxn0mVDMGD8M/O4YOIs5n21LezSRKSOUzDUYWbG6X3bMf0nJ/D7c/uxdttu\nzn94FuOfyWZVXn7Y5YlIHaWupHpkd1EJj328hkc+XEVhSRkXD+3Mj0/rRZumDcMuTURCpmMMCW5L\n/l7uf+8Lnv10LakpSVx9fHeuOaG7zmASSWBxc4zBzEaa2QozW2lmt1bR5iQzW2hmOWb2YSzrSRQZ\nTdK4Y8xRvHvLiZx8RBvufe8LTrz7fd5cvDHs0kSkDohZMJhZMvAgMAroA4wzsz4V2rQAHgLOdve+\nwNhY1ZOIumak8+Alg3n1+mPp2LIx1z07n/ve+4K6tpcoIrUrlnsMw4CV7r7a3YuAycCYCm1+ALzs\n7msB3H1zDOtJWAM7t2DK+GM4b3BH/vLO59z8/EIKi0vDLktE4lQsg6EjsC7q9fpgWrTDgZZm9oGZ\nzTOzyytbkZldY2bZZpadl5cXo3Lrt7SUZO4ZO4Cfn3EEry78mksf+5St+XvDLktE4lDYp6umAEOA\n7wNnALeZ2eEVG7n7JHfPcveszMzM2q6x3jAzrj+5Jw9dMpjFG3ZwzkMz+GLTrrDLEpE4E8tg2AB0\njnrdKZgWbT0w3d0L3H0L8BEwIIY1CXBmv/ZMGT+cwuIyzntoJh9/ob0wEfm3WAbDXKCXmXUzs1Tg\nYmBqhTavAceZWYqZNQaOBpbFsCYJDOjcIjgo3Ygrn5zLM7O/CrskEYkTMQsGdy8BbgCmE/myn+Lu\nOWY2wcwmBG2WAW8Bi4A5wGPuviRWNUl5HVs04sVrR3DS4Znc9uoS7ng9h9IynbEkkuh0gZtQWub8\n/s1lPP7JGk7p3Yb7xg3STYFE6pm4ucBN6obkJOO20X3433OO4sPP87jg4Zls2L4n7LJEJCQKBvnW\npcccxlNXDWXD9j2MeWAGC9dtD7skEQmBgkHKOb5XJq9cN4JGqUlc9MgsZq7cEnZJIlLLFAzyH3q2\nacqr1x1L19bpjH9mHss27gy7JBGpRQoGqVTrJmk8edVQ0tNSuPLJOTrmIJJAFAxSpQ4tGvHUD4ey\nu6iUK56Yw47dxWGXJCK1QMEg+9W7XTMmXZbF2q27ufrpbA2+J5IAFAxyQMN7tObPFw5gzpfbuGXK\nQsp0EZxIvaZgkGo5e0AHfv39I3lzcS53/nOp7ukgUo/p8lapth8d352vtxfyxIw1dGjeiKtP6B52\nSSISAwoGqZFff/9INu0q5HdvLqNNszTGDKx4iw0RqesUDFIjSUnGPWMHkLdrLz974TMym6YxokdG\n2GWJyCGkYwxSYw0bJPPoZVl0y0hn/NPzWJ6rC+BE6hMFgxyU5o0b8NRVwyIXwD0xl691AZxIvaFg\nkIO27wK4gr0lXPmkLoATqS8UDPKd9G7XjEcuH8KaLQVc/YwugBOpDxQM8p2N6JHBn8cOYM6abfz0\nhc90jYNIHaezkuSQGDOwIxt3FHLXtOUc1aE5157UI+ySROQgaY9BDpnxJ3RndP/23D19OTN0HweR\nOkvBIIeMmfHH8/vTs00TbnxugYbqFqmjYhoMZjbSzFaY2Uozu7WS+SeZ2Q4zWxg8/l8s65HYS09L\nYeKlQyguKePav8/TwWiROihmwWBmycCDwCigDzDOzPpU0vRjdx8YPH4bq3qk9nTPbMI9Fw5g0fod\n/Oa1nLDLEZEaiuUewzBgpbuvdvciYDIwJobvJ3Hk9L7tuP7kHjyfvY7n5qwNuxwRqYFYBkNHYF3U\n6/XBtIpGmNkiM5tmZn0rW5GZXWNm2WaWnZeXF4taJQZu+d4RHN8rg9+8lsPCddvDLkdEqinsg8/z\ngS7u3h+4H3i1skbuPsnds9w9KzMzs1YLlIOXnGTcd/Eg2jRL47q/z2Nr/t6wSxKRaohlMGwAOke9\n7hRM+5a773T3/OD5m0ADM9NQnfVIy/RUJl46hK0FRdz43AJKSsvCLklEDiCWwTAX6GVm3cwsFbgY\nmBrdwMzamZkFz4cF9WyNYU0SgqM6Nud/zzmKmau2cvfbK8IuR0QOIGZXPrt7iZndAEwHkoEn3D3H\nzCYE8ycCFwDXmlkJsAe42DWeQr00NqszC9dt55EPVzOwUwtG9WsfdkkiUgWra9/DWVlZnp2dHXYZ\nchD2lpRy0SOz+WLTLl674Vh6tmkadkkiCcPM5rl7VnXahn3wWRJIWkoyD186mEapyYx/Zh67CjVM\nt0g8UjBIrWrfvBH3jxvMl1t38/MXFmkkVpE4pGCQWje8R2t+Oao3b+XkMvHD1WGXIyIVKBgkFP91\nXLdvR2L95AuNxCoSTxQMEorokVhvmqyRWEXiiYJBQrNvJNYijcQqElcUDBKq6JFY73hdI7GKxAMF\ng4TujL7tuO6kHjw3Zx3Pz9VIrCJhUzBIXPjp6ZGRWG97LYdF6zUSq0iYFAwSF5KTjHsvHkRmkzSu\n/ft8thUUhV2SSMJSMEjcaJWeysOXDiYvfy83PbeA0jJd/CYSBgWDxJX+nVpw55i+fLJyC/doJFaR\nUCgYJO5cNLQL44Z15qEPVjE9JzfsckQSjoJB4tLtZ/dlQKfm/HTKZ6zKyw+7HJGEomCQuJSWksxD\nlw4hNSWJCc/Mo2BvSdgliSQMBYPErY4tGnH/uEGsysvnv1/SSKwitUXBIHHt2J4Z/PyM3vxz0UYe\n/2RN2OWIJAQFg8S9CSd2Z2Tfdvxh2nJmrdItwUViTcEgcc/MuHtsf7q2bsyNz81n4w6NxCoSSwoG\nqROaNmzAI5cNYU9RKdc9O5+9JRqJVSRWYhoMZjbSzFaY2Uozu3U/7YaaWYmZXRDLeqRu69mmKXeP\nHcCCtdsZN2k2m3cWhl2SSL0Us2Aws2TgQWAU0AcYZ2Z9qmj3R+DtWNUi9ceZ/drz4A8Gs2zjLs56\n4BMWrP0m7JJE6p1qBYOZja3OtAqGASvdfbW7FwGTgTGVtLsReAnYXJ1aRL7fvz0vXzeC1JQkLnpk\nNlOy14Vdkki9Ut09hl9Wc1q0jkD0/9j1wbRvmVlH4Fzg4f2tyMyuMbNsM8vOy8urRrlS3x3ZvhlT\nrz+Ood1a8t8vLuL2qTkUl5aFXZZIvZCyv5lmNgo4E+hoZvdFzWoGHIpLUf8P+IW7l5lZlY3cfRIw\nCSArK0tXOQkALdNT+dtVw7hr2nIe+2QNy3N38uAPBtO6SVrYpYnUaQfaY/gayAYKgXlRj6nAGQdY\ndgPQOep1p2BatCxgspl9CVwAPGRm51SrchEgJTmJX4/uw18uHMD8tds5+4EZLNmwI+yyROo0q84w\nA2bWwN2Lg+ctgc7uvugAy6QAnwOnEgmEucAP3L3SG/ua2VPAG+7+4v7Wm5WV5dnZ2QesWRLPovXb\nGf/MPL7ZXcQfz+/PmIEdD7yQSIIws3nunlWdttU9xvCOmTUzs1bAfOBRM/vr/hZw9xLgBmA6sAyY\n4u45ZjbBzCZU831Fqq1/pxZMveE4+nVszo8nL+QP05bpZj8iB6G6ewwL3H2Qmf2IyN7Cb8xskbv3\nj32J5WmPQQ6kqKSM376Rw99nr+X4Xhk8MG4wzRs3CLsskVDFYo8hxczaAxcCbxx0ZSK1IDUlif89\npx9/OK8fs1dv5ewHP+HzTbvCLkukzqhuMPyWSJfQKnefa2bdgS9iV5bIdzduWBcmX3MMu4tKueDh\nmeTu0JXSItVRrWBw9xfcvb+7Xxu8Xu3u58e2NJHvbshhrZgyfjh7S8r49auLdU8HkWqo7pXPnczs\nFTPbHDxeMrNOsS5O5FDolpHOT08/nHeXbeaNRRvDLkck7lW3K+lJItcudAgerwfTROqEHx7bjf6d\nmnP71By+KSgKuxyRuFbdYMh09yfdvSR4PAVkxrAukUMqJTmJP57fnx17irnzjaVhlyMS16obDFvN\n7FIzSw4elwK6lZbUKUe2b8Z1J/Xg5QUbeH+FxmwUqUp1g+GHRE5VzQU2Ehm+4soY1SQSM9ef0pOe\nbZrwPy8vJn/voRjuS6T+qcnpqle4e6a7tyESFHfEriyR2EhLSeaP5/dn485C/vTW8rDLEYlL1Q2G\n/u7+7R1R3H0bMCg2JYnE1pDDWnLliK48Pesr5qzZFnY5InGnusGQFAyeB0AwZtJ+h+wWiWc/O/0I\nOrVsxK0vLaKwWPePFolW3WC4B5hlZnea2Z3ATOBPsStLJLbS01L4w3n9WL2lgPve00X8ItGqe+Xz\n08B5wKbgcZ67PxPLwkRi7fhemYwd0olHPlqteziIRKnuHgPuvtTdHwgeOhFc6oVff78PrdJT+e8X\nF+nWoCKBageDSH3UvHED7hzTl6Ubd/Lox6vDLkckLigYJOGNPKo9o45qx/+9+wWr8/LDLkckdAoG\nEeCOMX1pmJLErS8tpkx3fZMEp2AQAdo0bchto/sw58ttPDtnbdjliIRKwSASuGBIJ47vlcFdby5j\nw/Y9YZcjEhoFg0jAzPj9uf0oc/ifV3RTH0lcMQ0GMxtpZivMbKWZ3VrJ/DFmtsjMFppZtpkdF8t6\nRA6kc6vG/PyMI/hgRR4vzFsfdjkioYhZMJhZMvAgMAroA4wzsz4Vmr0HDHD3gUQG5nssVvWIVNcV\nI7pydLdW3PrSIp6asSbsckRqXSz3GIYBK4P7QxcBk4Ex0Q3cPd//vb+eDmjfXUKXnGQ8edVQTj2y\nLbe/vpTbp+ZQqjOVJIHEMhg6AuuiXq8PppVjZuea2XLgn0T2Gv6DmV0TdDVl5+XlxaRYkWiNU1OY\neOkQfnRcN56a+SXjn8mmQPdvkAQR+sFnd3/F3XsD5wB3VtFmkrtnuXtWZqbuKCq1IznJ+PXoPtw5\npi//Wr6ZiybNYtPOwrDLEom5WAbDBqBz1OtOwbRKuftHQHczy4hhTSI1dtnwrjx+xVDW5BVw7oMz\nWLZxZ9glicRULINhLtDLzLqZWSpwMTA1uoGZ9TQzC54PBtLQvaQlDp3cuw1TJgynzGHsxFl8+Lm6\nNKX+ilkwuHsJcAMwHVgGTHH3HDObYGYTgmbnA0vMbCGRM5gucp08LnGqb4fmvHL9CLq0aswPn5rL\ns59+FXZJIjFhde17OCsry7Ozs8MuQxJY/t4SbvzHfN5fkcc1J3Tn1pG9SUqysMsS2S8zm+fuWdVp\nG/rBZ5G6pklaCo9ensXlww9j0keruf4f89lTpNuDSv2hYBA5CCnJSdxxdl9uG92Ht3JyufjR2eTt\n2ht2WSKHhIJB5CCZGf91XDcmXjqEFbk7OfehGSxavz3sskS+MwWDyHd0Rt92TBk/nJJS59yHZnLP\n2ysoKtFtQqXuUjCIHAL9O7Vg+s0ncO6gjtz/r5Wc/cAn5Hy9I+yyRA6KgkHkEGneqAF/HjuAx6/I\nYmtBEWMemMG9735Bcan2HqRuUTCIHGKnHtmWd24+gbMGdOCv737OOQ/OYHmurpaWukPBIBIDLRqn\n8teLBvLIZUPYtLOQs+7/hAffX0mJ9h6kDlAwiMTQGX3b8fbNJzLyqPbcPX0F5z08ky827Qq7LJH9\nUjCIxFir9FTuHzeIhy4ZzPpv9vD9+z5h4oerdI8HiVsKBpFacma/9rx98wmc0rsNd01bzgUTZ7Iq\nLz/sskT+g4JBpBZlNEnj4UsHc9+4QazZUsCZ937M5DlrqWtjlkn9pmAQqWVmxtkDOvD2zScwrFsr\nbn15MTdNXsiuwuKwSxMBFAwioWnTtCF/u2oYPz/jCN5cvJHR93/Ckg26KE7Cp2AQCVFSknH9yT2Z\nfM0xFJWUcd5DM3lqxhp1LUmoFAwicWBo11a8edPxHN8rg9tfX8r4Z+axY7e6liQcCgaRONEyPZXH\nrsjittF9eH/FZs6872PmffVN2GVJAlIwiMSRfUN5vzhhBElJcOEjs5j44SrKdM2D1CIFg0gcGtC5\nBf+86XhG9m3HXdOWc9VTc9marxsBSe1QMIjEqWYNG/DADwbxu3OPYtbqrYy692NmrdoadlmSAGIa\nDGY20sxWmNlKM7u1kvmXmNkiM1tsZjPNbEAs6xGpa8yMS44+jFevO5YmDVO45LHZ/Omt5RQW6x7T\nEjsxCwYzSwYeBEYBfYBxZtanQrM1wInu3g+4E5gUq3pE6rI+HZrx+g3Hcf7gTjz0wSpO/+tHfLBi\nc9hlST0Vyz2GYcBKd1/t7kXAZGBMdAN3n+nu+067mA10imE9InVaeloKd48dwD+uPpqUZOPKJ+dy\n3bPzyN1RGHZpUs/EMhg6AuuiXq8PplXlv4Bplc0ws2vMLNvMsvPy8g5hiSJ1z4geGUz78fH87PTD\neW/ZZk695wMe/2SN7vUgh0xcHHw2s5OJBMMvKpvv7pPcPcvdszIzM2u3OJE4lJaSzA2n9OKdm09k\naLdW3PnGUs5+YAYL1uq6B/nuYhkMG4DOUa87BdPKMbP+wGPAGHfXKRciNdCldWOevHIoD18ymG0F\nRZz38Ex+9cpiXTUt30ksg2Eu0MvMuplZKnAxMDW6gZl1AV4GLnP3z2NYi0i9ZWaM6teed396Ij88\nthvPz13HKfd8wMvz12vMJTkoMQsGdy8BbgCmA8uAKe6eY2YTzGxC0Oz/Aa2Bh8xsoZllx6oekfqu\nSVoKt43uw9QbjqVL68bcMuUzxj06m5WbdStRqRmra78osrKyPDtb+SGyP2VlzuS56/jjW8vZXVTC\nxUO7MOGkHnRs0Sjs0iQkZjbP3bOq1VbBIFJ/bcnfyz1vf86L8yInCF4wpDPXndSDzq0ah1yZ1DYF\ng4iUs2H7HiZ+sIrn566j1J3zBnXk+pN70jUjPezSpJYoGESkUrk7Cpn44Sqem7OW4tIyxgyMBETP\nNk3CLk1iTMEgIvu1eVchj360mr/PXkthSSmj+3fgxlN6cnjbpmGXJjGiYBCRatmav5fHPlnD0zO/\npKColFFHteOGU3rSt0PzsEuTQ0zBICI18k1BEU/OWMOTM75k194STu3dhrFZnTm5dyZpKclhlyeH\ngIJBRA7Kjj3FPDXjS56Z/SVb8oto1jCFM/u1Z8zAjhzdrRVJSRZ2iXKQFAwi8p2UlJYxY9VWXluw\ngbdyctldVEqH5g05a2AHzhnYkSPbNwu7RKkhBYOIHDK7i0p4Z+kmXlv4NR9+nkdpmXNE26aMGdSB\nMQM76qK5OkLBICIxsTV/L/9cvJFXF2xg/trtAAzr1opzBnbkzH7taNE4NeQKpSoKBhGJua+2FvDa\nwq95deEGVucVkJxkHN2tFWf0bcfpfdvSvrn2JOKJgkFEao27s3jDDqYtyeXtnFxW5RUA0L9T80hI\n9GlLzzZNMNOB6zApGEQkNCs35/P20lym52zis3WR7qbuGel8r29bzujbjoGdWujsphAoGEQkLuTu\nKOSdpblhUK+DAAANgUlEQVS8vXQTs1ZtpaTMadM0je/1acvpfdsxokdrGiTHxY0k6z0Fg4jEnR27\ni3l/xWam5+TywYo89hSXktEklbMHdOT8IR11tXWMKRhEJK4VFpfy4ed5vDJ/A+8t30RxqdO7XVMu\nGNKJMQM7ktk0LewS6x0Fg4jUGd8UFPH6oq95af4GPlu3neQk48TDMzl/cCdOPbINDRtoSI5DQcEg\nInXSys27eGn+Bl6Zv4HcnYU0a5jCWQM6cP6QTgzq3EJnNn0HCgYRqdNKy5yZq7bw0rz1vJWTS2Fx\nGd0z0hmb1ZkrR3SlUar2ImpKwSAi9cauwmKmLc7lxfnrmbNmG51bNeJ/z+nHiYdnhl1anVKTYIjp\neWJmNtLMVpjZSjO7tZL5vc1slpntNbOfxbIWEambmjZswIVDOzNl/HAmX3MMDZKTuOKJOdz03AI2\n7yoMu7x6KWbBYGbJwIPAKKAPMM7M+lRotg24CfhzrOoQkfrjmO6tmfbj4/nJab14a0kup93zIf/4\ndC1lZXWr5yPexXKPYRiw0t1Xu3sRMBkYE93A3Te7+1ygOIZ1iEg9kpaSzE9OO5xpPzmePh2a8atX\nFnPhI7P4fNOusEurN2IZDB2BdVGv1wfTaszMrjGzbDPLzsvLOyTFiUjd1iOzCc9dfQx3X9CflXn5\nnHnvx9w9fTmFxaVhl1bn1Ylr0d19krtnuXtWZqYOOIlIhJkxNqsz791yImcP7MCD76/ijP/7iE++\n2BJ2aXVaLINhA9A56nWnYJqIyCHVukkaf7lwIP/40dEkmXHp459y8/ML2ZK/N+zS6qRYBsNcoJeZ\ndTOzVOBiYGoM309EEtyInhlM+/Hx3HRKT95Y9DWn3vMhz89dS107LT9sMQsGdy8BbgCmA8uAKe6e\nY2YTzGwCgJm1M7P1wC3Ar81svZnpZrIictAaNkjmltOP4M2bjueItk35xUuLuWjSbFZuzg+7tDpD\nF7iJSL1VVuZMyV7H799cRmFxGdee1IPrTu5BWkriXTkdNxe4iYiEKSnJuHhYF9776UmMPKod9773\nBaPu/ZjZq7eGXVpcUzCISL2X2TSN+8YN4qmrhlJcWsbFk2bz8xc+45uCorBLi0sKBhFJGCcd0Ya3\nf3IiE07swcsLNnDaXz7klQXrdXC6AgWDiCSURqnJ3DqqN6/fcBydWjXm5uc/4/In5vDV1oKwS4sb\nCgYRSUh9OjTj5WtH8NsxfVmwdjun//UjHvpgJcWlZWGXFjoFg4gkrOQk4/LhXXn3lhM5+Yg2/Omt\nFYy+7xNenr+ebQl8/EGnq4qIBN5Zuok7Xs9h/Td7SDIY3KUlpxzZhlN7t+Xwtk3q9B3kdKMeEZGD\nVFbm5Hy9k/eWb+K9ZZtZvGEHAB1bNOLUI9twSu82HNO9dZ27F7WCQUTkENm0s5D3l2/m3WWb+WRl\nHoXFZTROTea4nhmcemQbTj6iDW2aNQy7zANSMIiIxEBhcSmzVm/lX8s2896yTXy9I3IHub4dmnFE\n26Z0y0inW2Z65N+MdBqnpoRc8b8pGEREYszdWZ67i38t38zMVVtYnVfAxh3lbzXarlnDb8Oie8a/\nA6Nzq8Y0SK7dc38UDCIiIdhdVMKXW3azZksBa7bks3pLAWu2FLA6r4Ade/59o8rkJKN984a0Tk+l\ndZM0WqWn0rpJauR1ehqtmqSSEfzbOj31kBzPqEkwxM9+johIHdc4NYU+HZrRp8N/DhL9TUHRt0Gx\nZks+G77Zw9aCIjbtLGTp1zvZVlBEURXXUKSnJtO6SRqXHXMYV5/QPdaboWAQEakNLdNTGZKeypDD\nWlY6393ZtbeEbflFbC3Yy9b8IrYWFLGtoIgt+XvZVlBEm2ZptVKrgkFEJA6YGc0aNqBZwwZ0zUgP\ntRZd+SwiIuUoGEREpBwFg4iIlKNgEBGRchQMIiJSTkyDwcxGmtkKM1tpZrdWMt/M7L5g/iIzGxzL\nekRE5MBiFgxmlgw8CIwC+gDjzKxPhWajgF7B4xrg4VjVIyIi1RPLPYZhwEp3X+3uRcBkYEyFNmOA\npz1iNtDCzNrHsCYRETmAWF7g1hFYF/V6PXB0Ndp0BDZGNzKza4jsUQDkm9mKg6wpA9hykMvWB4m8\n/Ym87ZDY269tjzisugvViSuf3X0SMOm7rsfMsqs7iFR9lMjbn8jbDom9/dr2mm97LLuSNgCdo153\nCqbVtI2IiNSiWAbDXKCXmXUzs1TgYmBqhTZTgcuDs5OOAXa4+8aKKxIRkdoTs64kdy8xsxuA6UAy\n8IS755jZhGD+ROBN4ExgJbAbuCpW9QS+c3dUHZfI25/I2w6Jvf3a9hqqczfqERGR2NKVzyIiUo6C\nQUREykmYYDjQ8Bz1mZl9aWaLzWyhmdX7G2ab2RNmttnMlkRNa2Vm75jZF8G/ld9Gq46rYttvN7MN\nwee/0MzODLPGWDGzzmb2vpktNbMcM/txMD1RPvuqtr/Gn39CHGMIhuf4HPgekYvo5gLj3H1pqIXV\nEjP7Eshy94S4yMfMTgDyiVxVf1Qw7U/ANne/K/hh0NLdfxFmnbFQxbbfDuS7+5/DrC3WglET2rv7\nfDNrCswDzgGuJDE++6q2/0Jq+Pknyh5DdYbnkHrC3T8CtlWYPAb4W/D8b0T+w9Q7VWx7QnD3je4+\nP3i+C1hGZCSFRPnsq9r+GkuUYKhq6I1E4cC7ZjYvGF4kEbWNukYmF2gbZjEhuDEYwfiJ+tqVEs3M\nugKDgE9JwM++wvZDDT//RAmGRHecuw8kMprt9UF3Q8LySP9p/e9D/beHge7AQCLjkN0TbjmxZWZN\ngJeAn7j7zuh5ifDZV7L9Nf78EyUYEnroDXffEPy7GXiFSNdaotm0b+Te4N/NIddTa9x9k7uXunsZ\n8Cj1+PM3swZEvhSfdfeXg8kJ89lXtv0H8/knSjBUZ3iOesnM0oMDUZhZOnA6sGT/S9VLU4ErgudX\nAK+FWEutqjCU/bnU08/fzAx4HFjm7n+JmpUQn31V238wn39CnJUEEJyi9X/8e3iO34VcUq0ws+5E\n9hIgMgTKP+r7tpvZc8BJRIYc3gT8BngVmAJ0Ab4CLnT3eneQtoptP4lIN4IDXwLj6+OYZGZ2HPAx\nsBgoCyb/ikg/eyJ89lVt/zhq+PknTDCIiEj1JEpXkoiIVJOCQUREylEwiIhIOQoGEREpR8EgIiLl\nKBgkrpjZzODfrmb2g0O87l9V9l6H+D1+YmaXx2C9Lczsuhisd7SZ/fZQr1fqNp2uKnHJzE4Cfubu\no2uwTIq7l+xnfr67NzkU9VX1/sB8YPD+6qjuuqLXEYx988a+EVMPleCiqPnAse6++1CuW+ou7TFI\nXDGz/ODpXcDxwfjxN5tZspndbWZzg8HAxgftTzKzj81sKrA0mPZqMGBgzr5BA83sLqBRsL5no9/L\nIu42syUWuW/FRVHr/sDMXjSz5Wb2bPBFipndFYx7v8jM9g1nfAowf98XerDsvcF7LjGzYcH09GAw\nszlmtsDMxgTTrzSzqWb2L+C9Cn+au4AewbruDtr/POrvcUcwrauZLTOzR4Ptf9vMGgXzboqqeTJ8\nO3bQB0C1A1gSgLvroUfcPIiMGw+Rq3XfiJp+DfDr4HkakA10C9oVAN2i2rYK/m1E5PL/1tHrruS9\nzgfeIXJVfFtgLdA+WPcOImNrJQGzgOOA1sAK/r3H3SL49w7gxqj1fwA8Gjw/AVgSPP89cOm+ZYnc\nKySdyH0D1u+rv0KtXfctH7w+nciN3i2o7Y3gPboCJcDAoN2UqPf6GkiLrjl4fglwf9ifvR7x89Ae\ng9QVpwOXm9lCIkMctAZ6BfPmuPuaqLY3mdlnwGwigyf2Yv+OA57zyEBjm4APgaFR617vkQHIFhL5\n4t0BFAKPm9l5wL4umPZAXoV1Pwff3iehmZm1CLbl1mBbPgAaEhmuAeAdr95wDacHjwVEuoJ6R23n\nGndfGDyfF9QMsAh41swuJRIe+2wGOlTjPSVBpIRdgEg1GZFf49PLTYwciyio8Po0YLi77zazD4h8\n8R6svVHPS4EUdy8JuoVOBS4AbiDSjbSnkveqeBDPg205391XVNiWo6O35QAM+IO7P1JhHV0rqblR\n8Pz7RPYqzgL+x8z6eaTbq2FQuwigYwwSv3YBTaNeTweuDYYVxswOD0aLrag58E0QCr2BY6LmFe9b\nvoKPgYuC4xiZRL4851RVmEXGu2/u7m8CNwMDglnLgJ4Vmu87XnEcsMPddwTbcmPU8YpBVb1XlMr+\nHj8MasHMOppZm/3UnAR0dvf3gV8Q+TvtOxB/OPV0xFU5ONpjkHi1CCgNuoSeAu4l0iUyP/hCzaPy\nWzS+BUwws2VEjgPMjpo3CVhkZvPd/ZKo6a8Aw4HPiPyi/293zw2CpTJNgdfMrCGRX+63BNOnAc9U\naFtoZguABsAPg2l3Ehnpd1Hwhb2GAxz8dfetZjbDzJYA09z952Z2JDAryJd84FIiewiVSQb+bmbN\ng5rvc/ftwbyTgV/u7/0lseh0VZFDyMxeIRIsXwTdWD9z9+yQy6qSmbUlMhT7qWHXIvFDXUkih9at\nRA5C1xVdgJ+GXYTEF+0xiIhIOdpjEBGRchQMIiJSjoJBRETKUTCIiEg5CgYRESnn/wPEZUN21uxQ\nIAAAAABJRU5ErkJggg==\n",
      "text/plain": [
       "<matplotlib.figure.Figure at 0x1ce4ed3ed30>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "layers_dims = [12288, 20, 7, 5, 1] #  5-layer model\n",
    "parameters = L_layer_model(train_x, train_y, layers_dims, num_iterations = 2500, print_cost = True,isPlot=True)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 33,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "准确度为：0.9952153110047847\n",
      "准确度为：0.78\n"
     ]
    }
   ],
   "source": [
    "pred_train = predict(train_x, train_y, parameters)\n",
    "pred_test = predict(test_x, test_y, parameters)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 36,
   "metadata": {},
   "outputs": [],
   "source": [
    "# 输出识别错误的图片\n",
    "def print_mislabeled_images(classes, X, y, p):\n",
    "    \"\"\"\n",
    "    X：数据集\n",
    "    y：标签\n",
    "    p：预测结果\n",
    "    \"\"\"\n",
    "    \n",
    "    a = y + p\n",
    "    mislabeled_indices = np.asarray(np.where(a==1))\n",
    "    plt.rcParams['figure.figsize'] = (40.0, 40.0) # set default size of plots\n",
    "    num_images = len(mislabeled_indices[0])\n",
    "    for i in range(num_images):\n",
    "        index = mislabeled_indices[1][i]\n",
    "\n",
    "        plt.subplot(2, num_images, i + 1)\n",
    "        plt.imshow(X[:,index].reshape(64,64,3), interpolation='nearest')\n",
    "        plt.axis('off')\n",
    "        plt.title(\"Prediction: \" + classes[int(p[0,index])].decode(\"utf-8\") + \" \\n Class: \" + classes[y[0,index]].decode(\"utf-8\"))\n",
    "\n",
    "\n",
    "print_mislabeled_images(classes, test_x, test_y, pred_test)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.2"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
