{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "\n",
    "Warm-up: numpy\n",
    "--------------\n",
    "\n",
    "A fully-connected ReLU network with one hidden layer and no biases, trained to\n",
    "predict y from x using Euclidean error.\n",
    "\n",
    "This implementation uses numpy to manually compute the forward pass, loss, and\n",
    "backward pass.\n",
    "\n",
    "A numpy array is a generic n-dimensional array; it does not know anything about\n",
    "deep learning or gradients or computational graphs, and is just a way to perform\n",
    "generic numeric computations.\n",
    "\n",
    "Source Link: http://pytorch.org/tutorials/beginner/examples_tensor/two_layer_net_numpy.html\n"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "<h1 style=\"background-image: linear-gradient( 135deg, #ABDCFF 10%, #0396FF 100%);\"> Orinal Tutorial code"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "%matplotlib inline"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0 35842896.9493\n",
      "1 36926496.1898\n",
      "2 39927512.8897\n",
      "3 37505190.5191\n",
      "4 26979563.0454\n",
      "5 14781183.8659\n",
      "6 6900877.24815\n",
      "7 3348573.92152\n",
      "8 1938770.58451\n",
      "9 1345128.42088\n",
      "10 1045031.01814\n",
      "11 858827.492635\n",
      "12 724960.665459\n",
      "13 620805.922668\n",
      "14 536506.847154\n",
      "15 466754.476944\n",
      "16 408271.611565\n",
      "17 358914.810717\n",
      "18 316919.43164\n",
      "19 280975.798864\n",
      "20 250039.266577\n",
      "21 223250.661898\n",
      "22 199924.895082\n",
      "23 179529.277034\n",
      "24 161640.251068\n",
      "25 145881.420247\n",
      "26 131951.06731\n",
      "27 119591.177854\n",
      "28 108609.427419\n",
      "29 98826.9647574\n",
      "30 90089.7980901\n",
      "31 82254.3828815\n",
      "32 75217.0933817\n",
      "33 68878.8287083\n",
      "34 63157.7077231\n",
      "35 57985.859831\n",
      "36 53304.7990595\n",
      "37 49057.3614446\n",
      "38 45198.9717883\n",
      "39 41686.7478221\n",
      "40 38482.1863749\n",
      "41 35558.424432\n",
      "42 32886.531268\n",
      "43 30441.9160859\n",
      "44 28203.4965169\n",
      "45 26150.4465715\n",
      "46 24265.1230081\n",
      "47 22532.5662279\n",
      "48 20938.8115925\n",
      "49 19472.9456718\n",
      "50 18123.9178633\n",
      "51 16880.4870371\n",
      "52 15733.0840412\n",
      "53 14672.2433359\n",
      "54 13690.9426765\n",
      "55 12782.9300764\n",
      "56 11941.4772086\n",
      "57 11161.4904719\n",
      "58 10438.1228888\n",
      "59 9767.1068225\n",
      "60 9143.93115688\n",
      "61 8564.50626167\n",
      "62 8026.79847429\n",
      "63 7525.9833521\n",
      "64 7059.59053251\n",
      "65 6625.20978525\n",
      "66 6220.51337375\n",
      "67 5843.12342746\n",
      "68 5490.88296924\n",
      "69 5162.00340491\n",
      "70 4854.54611653\n",
      "71 4567.21417499\n",
      "72 4298.71274197\n",
      "73 4047.55050362\n",
      "74 3812.30095673\n",
      "75 3592.25946907\n",
      "76 3386.22183667\n",
      "77 3193.09236158\n",
      "78 3011.97794725\n",
      "79 2842.03600438\n",
      "80 2682.61783856\n",
      "81 2532.9541266\n",
      "82 2392.56276902\n",
      "83 2260.63844613\n",
      "84 2136.65321727\n",
      "85 2020.05098781\n",
      "86 1910.34069317\n",
      "87 1807.13273312\n",
      "88 1709.96041133\n",
      "89 1618.45621876\n",
      "90 1532.32899598\n",
      "91 1451.34302418\n",
      "92 1374.9646858\n",
      "93 1302.94512109\n",
      "94 1235.08009235\n",
      "95 1171.12545921\n",
      "96 1110.90448506\n",
      "97 1054.00905503\n",
      "98 1000.27049276\n",
      "99 949.493875314\n",
      "100 901.492472412\n",
      "101 856.106142648\n",
      "102 813.175863365\n",
      "103 772.579056899\n",
      "104 734.163945159\n",
      "105 697.793423216\n",
      "106 663.35538542\n",
      "107 630.745498071\n",
      "108 599.847107491\n",
      "109 570.57993458\n",
      "110 542.838554686\n",
      "111 516.540249329\n",
      "112 491.604392073\n",
      "113 467.960839885\n",
      "114 445.528659868\n",
      "115 424.245858981\n",
      "116 404.043485986\n",
      "117 384.869133162\n",
      "118 366.665319808\n",
      "119 349.38427501\n",
      "120 332.965111893\n",
      "121 317.367800851\n",
      "122 302.546872378\n",
      "123 288.46399197\n",
      "124 275.075273578\n",
      "125 262.343903434\n",
      "126 250.238636241\n",
      "127 238.725718589\n",
      "128 227.773134145\n",
      "129 217.352274831\n",
      "130 207.435669666\n",
      "131 197.99806835\n",
      "132 189.015822787\n",
      "133 180.462193303\n",
      "134 172.318953248\n",
      "135 164.563509075\n",
      "136 157.176404224\n",
      "137 150.137192072\n",
      "138 143.43214527\n",
      "139 137.041491528\n",
      "140 130.95105274\n",
      "141 125.14475399\n",
      "142 119.609529596\n",
      "143 114.330960669\n",
      "144 109.297965498\n",
      "145 104.499019637\n",
      "146 99.9202942\n",
      "147 95.5514963778\n",
      "148 91.3825877483\n",
      "149 87.4036042757\n",
      "150 83.6064228362\n",
      "151 79.9822785504\n",
      "152 76.5225175428\n",
      "153 73.2197431008\n",
      "154 70.0646876134\n",
      "155 67.0521273953\n",
      "156 64.1749933881\n",
      "157 61.4263418453\n",
      "158 58.8006143026\n",
      "159 56.2919167993\n",
      "160 53.8956282761\n",
      "161 51.6051934414\n",
      "162 49.4158316019\n",
      "163 47.323238488\n",
      "164 45.3226702763\n",
      "165 43.4101071767\n",
      "166 41.5820047376\n",
      "167 39.8334680762\n",
      "168 38.1612422927\n",
      "169 36.5617504309\n",
      "170 35.0318581813\n",
      "171 33.5684322239\n",
      "172 32.1688190953\n",
      "173 30.8293572071\n",
      "174 29.5475591441\n",
      "175 28.3211232376\n",
      "176 27.1472088489\n",
      "177 26.0237523634\n",
      "178 24.9482800979\n",
      "179 23.9189457648\n",
      "180 22.9334284751\n",
      "181 21.989848572\n",
      "182 21.0863379954\n",
      "183 20.2210920117\n",
      "184 19.3925486292\n",
      "185 18.5989763775\n",
      "186 17.8391011624\n",
      "187 17.1111423497\n",
      "188 16.4136853644\n",
      "189 15.745548818\n",
      "190 15.1054261695\n",
      "191 14.4920940702\n",
      "192 13.9045247175\n",
      "193 13.341386926\n",
      "194 12.8016864715\n",
      "195 12.284427574\n",
      "196 11.7886454051\n",
      "197 11.313390769\n",
      "198 10.8578409088\n",
      "199 10.421141472\n",
      "200 10.0024562805\n",
      "201 9.60102829837\n",
      "202 9.21616542286\n",
      "203 8.84710219996\n",
      "204 8.49328093938\n",
      "205 8.1538817586\n",
      "206 7.8283661797\n",
      "207 7.51616525325\n",
      "208 7.21673047202\n",
      "209 6.92950403502\n",
      "210 6.65398498305\n",
      "211 6.38967762538\n",
      "212 6.13608592352\n",
      "213 5.89282465601\n",
      "214 5.65945619788\n",
      "215 5.43547036825\n",
      "216 5.22055846807\n",
      "217 5.01435370743\n",
      "218 4.81643745238\n",
      "219 4.62651779286\n",
      "220 4.44423586046\n",
      "221 4.26926967581\n",
      "222 4.10134220924\n",
      "223 3.94014675377\n",
      "224 3.78541554625\n",
      "225 3.63688742394\n",
      "226 3.49431231462\n",
      "227 3.35742479723\n",
      "228 3.22599717619\n",
      "229 3.09979921946\n",
      "230 2.97863885242\n",
      "231 2.86230120203\n",
      "232 2.7505849868\n",
      "233 2.64331111142\n",
      "234 2.54031559391\n",
      "235 2.44138569165\n",
      "236 2.34638362251\n",
      "237 2.25512877579\n",
      "238 2.16748181458\n",
      "239 2.08330218489\n",
      "240 2.00243702917\n",
      "241 1.92476097455\n",
      "242 1.85015288914\n",
      "243 1.77848646569\n",
      "244 1.70965228153\n",
      "245 1.64350674855\n",
      "246 1.57995510228\n",
      "247 1.51890704185\n",
      "248 1.46024607989\n",
      "249 1.40388273923\n",
      "250 1.34972766757\n",
      "251 1.29769169248\n",
      "252 1.2476910172\n",
      "253 1.19964070466\n",
      "254 1.15346755512\n",
      "255 1.10909560699\n",
      "256 1.06645341763\n",
      "257 1.02547233568\n",
      "258 0.986086819915\n",
      "259 0.948235530308\n",
      "260 0.911855839481\n",
      "261 0.876887060179\n",
      "262 0.843275267932\n",
      "263 0.810968766828\n",
      "264 0.779914825758\n",
      "265 0.750070089432\n",
      "266 0.721379668556\n",
      "267 0.693797540365\n",
      "268 0.667282971538\n",
      "269 0.641792929542\n",
      "270 0.617287043638\n",
      "271 0.593726168312\n",
      "272 0.571073300647\n",
      "273 0.549297901631\n",
      "274 0.528359232341\n",
      "275 0.508227162478\n",
      "276 0.488869358301\n",
      "277 0.470257840648\n",
      "278 0.452361223933\n",
      "279 0.435152101317\n",
      "280 0.418607464546\n",
      "281 0.402695518706\n",
      "282 0.387394035665\n",
      "283 0.372679208598\n",
      "284 0.358529848068\n",
      "285 0.344924454163\n",
      "286 0.331838572735\n",
      "287 0.319253870935\n",
      "288 0.307150026505\n",
      "289 0.295509327827\n",
      "290 0.284315533488\n",
      "291 0.273548288696\n",
      "292 0.263191225116\n",
      "293 0.253230612179\n",
      "294 0.243651276974\n",
      "295 0.234436187812\n",
      "296 0.22557174473\n",
      "297 0.217045269329\n",
      "298 0.208844246281\n",
      "299 0.200955192311\n",
      "300 0.193366435538\n",
      "301 0.186068469099\n",
      "302 0.17904747034\n",
      "303 0.172293008665\n",
      "304 0.165794788609\n",
      "305 0.159543133042\n",
      "306 0.153528813891\n",
      "307 0.147743255486\n",
      "308 0.142177247632\n",
      "309 0.136822053969\n",
      "310 0.131670710065\n",
      "311 0.126714664184\n",
      "312 0.121945612223\n",
      "313 0.117357617501\n",
      "314 0.112943571449\n",
      "315 0.108697001473\n",
      "316 0.104610669701\n",
      "317 0.100678743877\n",
      "318 0.0968956554974\n",
      "319 0.093255657171\n",
      "320 0.089753270153\n",
      "321 0.0863833797161\n",
      "322 0.0831405261643\n",
      "323 0.080020265434\n",
      "324 0.0770176355453\n",
      "325 0.0741284258941\n",
      "326 0.0713481226366\n",
      "327 0.0686728892882\n",
      "328 0.0660986345019\n",
      "329 0.0636213405762\n",
      "330 0.061237133107\n",
      "331 0.0589429112402\n",
      "332 0.0567351911317\n",
      "333 0.0546104294939\n",
      "334 0.0525657937616\n",
      "335 0.0505981684537\n",
      "336 0.0487045766765\n",
      "337 0.0468823688951\n",
      "338 0.0451284109704\n",
      "339 0.0434404838923\n",
      "340 0.0418159322564\n",
      "341 0.0402526406682\n",
      "342 0.0387478197432\n",
      "343 0.0372995442483\n",
      "344 0.0359056952338\n",
      "345 0.0345641904951\n",
      "346 0.0332730820917\n",
      "347 0.0320304929197\n",
      "348 0.0308344332722\n",
      "349 0.0296831774221\n",
      "350 0.0285751121619\n",
      "351 0.027508549133\n",
      "352 0.0264820379649\n",
      "353 0.0254940653824\n",
      "354 0.0245430996392\n",
      "355 0.0236276352085\n",
      "356 0.0227465240822\n",
      "357 0.0218984060407\n",
      "358 0.0210820291464\n",
      "359 0.0202962223445\n",
      "360 0.0195398721111\n",
      "361 0.018811771927\n",
      "362 0.018110881032\n",
      "363 0.01743627106\n",
      "364 0.0167868182314\n",
      "365 0.0161616176376\n",
      "366 0.0155598019431\n",
      "367 0.0149805628059\n",
      "368 0.0144229870433\n",
      "369 0.013886138724\n",
      "370 0.0133693483055\n",
      "371 0.0128718728958\n",
      "372 0.0123929987932\n",
      "373 0.0119320249666\n",
      "374 0.0114883704496\n",
      "375 0.0110611197625\n",
      "376 0.0106498706561\n",
      "377 0.0102538884537\n",
      "378 0.00987268784612\n",
      "379 0.00950573709344\n",
      "380 0.00915248692334\n",
      "381 0.00881237385122\n",
      "382 0.00848493889109\n",
      "383 0.00816971002171\n",
      "384 0.00786621760792\n",
      "385 0.00757403455293\n",
      "386 0.00729273257769\n",
      "387 0.00702193990863\n",
      "388 0.00676120540092\n",
      "389 0.00651019597005\n",
      "390 0.00626851268801\n",
      "391 0.00603583056796\n",
      "392 0.00581180822727\n",
      "393 0.00559613993251\n",
      "394 0.00538854242148\n",
      "395 0.00518862768609\n",
      "396 0.00499613568878\n",
      "397 0.00481080352372\n",
      "398 0.0046323826965\n",
      "399 0.00446058731162\n",
      "400 0.00429519289563\n",
      "401 0.00413592928456\n",
      "402 0.00398258527297\n",
      "403 0.0038349524263\n",
      "404 0.0036928066078\n",
      "405 0.00355593541647\n",
      "406 0.00342415404572\n",
      "407 0.00329727658583\n",
      "408 0.00317510406097\n",
      "409 0.00305746817689\n",
      "410 0.00294419647523\n",
      "411 0.00283513782449\n",
      "412 0.00273014186931\n",
      "413 0.00262904023312\n",
      "414 0.00253168058424\n",
      "415 0.00243793334316\n",
      "416 0.00234766235308\n",
      "417 0.0022607468094\n",
      "418 0.00217705249947\n",
      "419 0.00209647141857\n",
      "420 0.00201888008526\n",
      "421 0.00194416658181\n",
      "422 0.00187221653296\n",
      "423 0.00180293957748\n",
      "424 0.00173622799916\n",
      "425 0.00167198937376\n",
      "426 0.00161014042842\n",
      "427 0.00155058610084\n",
      "428 0.00149323159511\n",
      "429 0.00143800461394\n",
      "430 0.00138482358089\n",
      "431 0.00133361293893\n",
      "432 0.001284297625\n",
      "433 0.00123681457563\n",
      "434 0.00119108956126\n",
      "435 0.00114705725213\n",
      "436 0.00110465420517\n",
      "437 0.00106382328284\n",
      "438 0.00102450607965\n",
      "439 0.00098664349259\n",
      "440 0.00095018577173\n",
      "441 0.000915074904471\n",
      "442 0.000881264054127\n",
      "443 0.000848702258987\n",
      "444 0.000817347870995\n",
      "445 0.000787154109478\n",
      "446 0.000758079984325\n",
      "447 0.000730080257211\n",
      "448 0.000703113899538\n",
      "449 0.000677147206561\n",
      "450 0.000652141968044\n",
      "451 0.000628060541474\n",
      "452 0.000604868571594\n",
      "453 0.000582537686\n",
      "454 0.000561038530377\n",
      "455 0.000540327345037\n",
      "456 0.000520380406284\n",
      "457 0.000501172501985\n",
      "458 0.00048267341466\n",
      "459 0.000464859765718\n",
      "460 0.000447704534235\n",
      "461 0.000431182283768\n",
      "462 0.000415271666658\n",
      "463 0.000399948735582\n",
      "464 0.000385192989162\n",
      "465 0.000370981752766\n",
      "466 0.000357295786547\n",
      "467 0.00034411517557\n",
      "468 0.000331421272774\n",
      "469 0.000319196811676\n",
      "470 0.000307422863664\n",
      "471 0.000296083833807\n",
      "472 0.000285164141647\n",
      "473 0.000274649676667\n",
      "474 0.000264521912682\n",
      "475 0.000254767816389\n",
      "476 0.000245374729509\n",
      "477 0.00023632771103\n",
      "478 0.000227615591686\n",
      "479 0.000219224224317\n",
      "480 0.000211143484115\n",
      "481 0.000203360592962\n",
      "482 0.000195864856609\n",
      "483 0.000188645770107\n",
      "484 0.000181692506188\n",
      "485 0.000174996304148\n",
      "486 0.000168547448735\n",
      "487 0.000162336804991\n",
      "488 0.000156355140909\n",
      "489 0.000150594678251\n",
      "490 0.000145045740068\n",
      "491 0.000139701500429\n",
      "492 0.000134554724507\n",
      "493 0.00012959832245\n",
      "494 0.000124824588769\n",
      "495 0.000120226566219\n",
      "496 0.000115798120235\n",
      "497 0.000111532961107\n",
      "498 0.00010742532087\n",
      "499 0.000103468770422\n"
     ]
    }
   ],
   "source": [
    "import numpy as np\n",
    "\n",
    "# N is batch size; D_in is input dimension;\n",
    "# H is hidden dimension; D_out is output dimension.\n",
    "N, D_in, H, D_out = 64, 1000, 100, 10\n",
    "\n",
    "# Create random input and output data\n",
    "x = np.random.randn(N, D_in)\n",
    "y = np.random.randn(N, D_out)\n",
    "\n",
    "# Randomly initialize weights\n",
    "w1 = np.random.randn(D_in, H)\n",
    "w2 = np.random.randn(H, D_out)\n",
    "\n",
    "learning_rate = 1e-6\n",
    "for t in range(500):\n",
    "    # Forward pass: compute predicted y\n",
    "    h = x.dot(w1)\n",
    "    h_relu = np.maximum(h, 0)\n",
    "    y_pred = h_relu.dot(w2)\n",
    "\n",
    "    # Compute and print loss\n",
    "    loss = np.square(y_pred - y).sum()\n",
    "    print(t, loss)\n",
    "\n",
    "    # Backprop to compute gradients of w1 and w2 with respect to loss\n",
    "    grad_y_pred = 2.0 * (y_pred - y)\n",
    "    grad_w2 = h_relu.T.dot(grad_y_pred)\n",
    "    grad_h_relu = grad_y_pred.dot(w2.T)\n",
    "    grad_h = grad_h_relu.copy()\n",
    "    grad_h[h < 0] = 0\n",
    "    grad_w1 = x.T.dot(grad_h)\n",
    "\n",
    "    # Update weights\n",
    "    w1 -= learning_rate * grad_w1\n",
    "    w2 -= learning_rate * grad_w2"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "---"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "<h1 style=\"background-image: linear-gradient( 135deg, #ABDCFF 10%, #0396FF 100%);\"> An One-move Example"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Demo Setting"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "# Demo set\n",
    "N = 4      # N     is batch size\n",
    "D_in = 3   # D_in  is input dimension\n",
    "H = 2      # H     is hidden dimension\n",
    "D_out = 1  # D_out is output dimension\n",
    "\n",
    "# Create random input and output data\n",
    "x = np.random.randn(N, D_in)\n",
    "y = np.random.randn(N, D_out)\n",
    "\n",
    "# Randomly initialize weights\n",
    "w1 = np.random.randn(D_in, H)\n",
    "w2 = np.random.randn(H, D_out)\n",
    "\n",
    "# Learning rate\n",
    "learning_rate = 1e-6"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "x shape:  (4, 3)\n",
      "[[ 1.46630273 -0.28439528 -2.17351802]\n",
      " [ 0.9221081   0.67609038 -0.81827607]\n",
      " [-0.34743241 -0.33185863  0.10973094]\n",
      " [ 0.23806759 -1.19525403 -0.45219163]]\n",
      "----------------------------------------\n",
      "y shape:  (4, 1)\n",
      "[[ 0.38602718]\n",
      " [-1.62320251]\n",
      " [-0.02611463]\n",
      " [-1.12400456]]\n",
      "----------------------------------------\n",
      "----------------------------------------\n",
      "w1 shape:  (3, 2)\n",
      "[[-0.00765758 -0.40056082]\n",
      " [-1.0192426   0.98049355]\n",
      " [ 0.32065702  0.86188859]]\n",
      "----------------------------------------\n",
      "w2 shape:  (2, 1)\n",
      "[[-0.1960287 ]\n",
      " [ 0.54521736]]\n"
     ]
    }
   ],
   "source": [
    "# Check input and output\n",
    "print(\"x shape: \", x.shape)\n",
    "print(x)\n",
    "print(\"----------------------------------------\")\n",
    "print(\"y shape: \", y.shape)\n",
    "print(y)\n",
    "\n",
    "print(\"----------------------------------------\")\n",
    "print(\"----------------------------------------\")\n",
    "\n",
    "# Check initialized weights\n",
    "print(\"w1 shape: \", w1.shape)\n",
    "print(w1)\n",
    "print(\"----------------------------------------\")\n",
    "print(\"w2 shape: \", w2.shape)\n",
    "print(w2)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Forward Pass"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "3.62141684539\n"
     ]
    }
   ],
   "source": [
    "# Forward pass: compute predicted y\n",
    "h = x.dot(w1)\n",
    "h_relu = np.maximum(h, 0)\n",
    "y_pred = h_relu.dot(w2)\n",
    "\n",
    "# # Compute and print loss\n",
    "loss = np.square(y_pred - y).sum()\n",
    "print(loss)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Backprop"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "# Backprop to compute gradients of w1 and w2 with respect to loss\n",
    "grad_y_pred = 2.0 * (y_pred - y)\n",
    "grad_w2 = h_relu.T.dot(grad_y_pred)\n",
    "grad_h_relu = grad_y_pred.dot(w2.T)\n",
    "grad_h = grad_h_relu.copy()\n",
    "grad_h[h < 0] = 0\n",
    "grad_w1 = x.T.dot(grad_h)\n",
    "\n",
    "# Update weights\n",
    "w1 -= learning_rate * grad_w1\n",
    "w2 -= learning_rate * grad_w2"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Combined..?"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0 2.54744656042\n",
      "1 2.54737997933\n",
      "2 2.54731340047\n",
      "3 2.54724682385\n",
      "4 2.54718024947\n",
      "5 2.54711367732\n",
      "6 2.5470471074\n",
      "7 2.54698053972\n",
      "8 2.54691397427\n",
      "9 2.54684741106\n",
      "10 2.54678085008\n",
      "11 2.54671429134\n",
      "12 2.54664773483\n",
      "13 2.54658118055\n",
      "14 2.54651462851\n",
      "15 2.5464480787\n",
      "16 2.54638153113\n",
      "17 2.54631498579\n",
      "18 2.54624844268\n",
      "19 2.54618190181\n",
      "20 2.54611536316\n",
      "21 2.54604882676\n",
      "22 2.54598229258\n",
      "23 2.54591576064\n",
      "24 2.54584923093\n",
      "25 2.54578270345\n",
      "26 2.54571617821\n",
      "27 2.5456496552\n",
      "28 2.54558313442\n",
      "29 2.54551661587\n",
      "30 2.54545009956\n",
      "31 2.54538358548\n",
      "32 2.54531707363\n",
      "33 2.54525056401\n",
      "34 2.54518405662\n",
      "35 2.54511755146\n",
      "36 2.54505104854\n",
      "37 2.54498454785\n",
      "38 2.54491804939\n",
      "39 2.54485155316\n",
      "40 2.54478505916\n",
      "41 2.54471856739\n",
      "42 2.54465207786\n",
      "43 2.54458559055\n",
      "44 2.54451910547\n",
      "45 2.54445262263\n",
      "46 2.54438614202\n",
      "47 2.54431966363\n",
      "48 2.54425318748\n",
      "49 2.54418671356\n",
      "50 2.54412024186\n",
      "51 2.5440537724\n",
      "52 2.54398730517\n",
      "53 2.54392084016\n",
      "54 2.54385437739\n",
      "55 2.54378791685\n",
      "56 2.54372145853\n",
      "57 2.54365500245\n",
      "58 2.54358854859\n",
      "59 2.54352209696\n",
      "60 2.54345564757\n",
      "61 2.5433892004\n",
      "62 2.54332275546\n",
      "63 2.54325631275\n",
      "64 2.54318987226\n",
      "65 2.54312343401\n",
      "66 2.54305699798\n",
      "67 2.54299056419\n",
      "68 2.54292413262\n",
      "69 2.54285770328\n",
      "70 2.54279127616\n",
      "71 2.54272485128\n",
      "72 2.54265842862\n",
      "73 2.54259200819\n",
      "74 2.54252558999\n",
      "75 2.54245917402\n",
      "76 2.54239276027\n",
      "77 2.54232634875\n",
      "78 2.54225993946\n",
      "79 2.54219353239\n",
      "80 2.54212712755\n",
      "81 2.54206072494\n",
      "82 2.54199432456\n",
      "83 2.5419279264\n",
      "84 2.54186153047\n",
      "85 2.54179513676\n",
      "86 2.54172874528\n",
      "87 2.54166235603\n",
      "88 2.541595969\n",
      "89 2.5415295842\n",
      "90 2.54146320163\n",
      "91 2.54139682128\n",
      "92 2.54133044315\n",
      "93 2.54126406726\n",
      "94 2.54119769359\n",
      "95 2.54113132214\n",
      "96 2.54106495292\n",
      "97 2.54099858592\n",
      "98 2.54093222115\n",
      "99 2.5408658586\n",
      "100 2.54079949828\n",
      "101 2.54073314019\n",
      "102 2.54066678432\n",
      "103 2.54060043067\n",
      "104 2.54053407925\n",
      "105 2.54046773005\n",
      "106 2.54040138307\n",
      "107 2.54033503832\n",
      "108 2.5402686958\n",
      "109 2.5402023555\n",
      "110 2.54013601742\n",
      "111 2.54006968157\n",
      "112 2.54000334793\n",
      "113 2.53993701653\n",
      "114 2.53987068734\n",
      "115 2.53980436038\n",
      "116 2.53973803565\n",
      "117 2.53967171313\n",
      "118 2.53960539284\n",
      "119 2.53953907477\n",
      "120 2.53947275893\n",
      "121 2.53940644531\n",
      "122 2.53934013391\n",
      "123 2.53927382473\n",
      "124 2.53920751778\n",
      "125 2.53914121304\n",
      "126 2.53907491053\n",
      "127 2.53900861024\n",
      "128 2.53894231218\n",
      "129 2.53887601633\n",
      "130 2.53880972271\n",
      "131 2.53874343131\n",
      "132 2.53867714213\n",
      "133 2.53861085517\n",
      "134 2.53854457044\n",
      "135 2.53847828792\n",
      "136 2.53841200763\n",
      "137 2.53834572955\n",
      "138 2.5382794537\n",
      "139 2.53821318007\n",
      "140 2.53814690866\n",
      "141 2.53808063947\n",
      "142 2.5380143725\n",
      "143 2.53794810775\n",
      "144 2.53788184522\n",
      "145 2.53781558491\n",
      "146 2.53774932683\n",
      "147 2.53768307096\n",
      "148 2.53761681731\n",
      "149 2.53755056588\n",
      "150 2.53748431667\n",
      "151 2.53741806968\n",
      "152 2.53735182491\n",
      "153 2.53728558236\n",
      "154 2.53721934203\n",
      "155 2.53715310392\n",
      "156 2.53708686803\n",
      "157 2.53702063435\n",
      "158 2.5369544029\n",
      "159 2.53688817366\n",
      "160 2.53682194664\n",
      "161 2.53675572184\n",
      "162 2.53668949926\n",
      "163 2.5366232789\n",
      "164 2.53655706076\n",
      "165 2.53649084483\n",
      "166 2.53642463112\n",
      "167 2.53635841963\n",
      "168 2.53629221036\n",
      "169 2.53622600331\n",
      "170 2.53615979847\n",
      "171 2.53609359585\n",
      "172 2.53602739545\n",
      "173 2.53596119726\n",
      "174 2.5358950013\n",
      "175 2.53582880755\n",
      "176 2.53576261601\n",
      "177 2.5356964267\n",
      "178 2.5356302396\n",
      "179 2.53556405472\n",
      "180 2.53549787205\n",
      "181 2.5354316916\n",
      "182 2.53536551337\n",
      "183 2.53529933735\n",
      "184 2.53523316355\n",
      "185 2.53516699197\n",
      "186 2.5351008226\n",
      "187 2.53503465544\n",
      "188 2.53496849051\n",
      "189 2.53490232779\n",
      "190 2.53483616728\n",
      "191 2.53477000899\n",
      "192 2.53470385291\n",
      "193 2.53463769906\n",
      "194 2.53457154741\n",
      "195 2.53450539798\n",
      "196 2.53443925077\n",
      "197 2.53437310577\n",
      "198 2.53430696298\n",
      "199 2.53424082241\n",
      "200 2.53417468406\n",
      "201 2.53410854791\n",
      "202 2.53404241399\n",
      "203 2.53397628227\n",
      "204 2.53391015278\n",
      "205 2.53384402549\n",
      "206 2.53377790042\n",
      "207 2.53371177756\n",
      "208 2.53364565692\n",
      "209 2.53357953849\n",
      "210 2.53351342227\n",
      "211 2.53344730827\n",
      "212 2.53338119648\n",
      "213 2.53331508691\n",
      "214 2.53324897954\n",
      "215 2.53318287439\n",
      "216 2.53311677145\n",
      "217 2.53305067073\n",
      "218 2.53298457222\n",
      "219 2.53291847592\n",
      "220 2.53285238183\n",
      "221 2.53278628996\n",
      "222 2.53272020029\n",
      "223 2.53265411284\n",
      "224 2.53258802761\n",
      "225 2.53252194458\n",
      "226 2.53245586377\n",
      "227 2.53238978516\n",
      "228 2.53232370877\n",
      "229 2.53225763459\n",
      "230 2.53219156263\n",
      "231 2.53212549287\n",
      "232 2.53205942532\n",
      "233 2.53199335999\n",
      "234 2.53192729687\n",
      "235 2.53186123595\n",
      "236 2.53179517725\n",
      "237 2.53172912076\n",
      "238 2.53166306648\n",
      "239 2.53159701441\n",
      "240 2.53153096455\n",
      "241 2.5314649169\n",
      "242 2.53139887146\n",
      "243 2.53133282824\n",
      "244 2.53126678722\n",
      "245 2.53120074841\n",
      "246 2.53113471181\n",
      "247 2.53106867742\n",
      "248 2.53100264524\n",
      "249 2.53093661527\n",
      "250 2.53087058751\n",
      "251 2.53080456196\n",
      "252 2.53073853861\n",
      "253 2.53067251748\n",
      "254 2.53060649856\n",
      "255 2.53054048184\n",
      "256 2.53047446733\n",
      "257 2.53040845504\n",
      "258 2.53034244495\n",
      "259 2.53027643707\n",
      "260 2.53021043139\n",
      "261 2.53014442793\n",
      "262 2.53007842667\n",
      "263 2.53001242762\n",
      "264 2.52994643078\n",
      "265 2.52988043615\n",
      "266 2.52981444372\n",
      "267 2.52974845351\n",
      "268 2.5296824655\n",
      "269 2.5296164797\n",
      "270 2.5295504961\n",
      "271 2.52948451471\n",
      "272 2.52941853553\n",
      "273 2.52935255856\n",
      "274 2.52928658379\n",
      "275 2.52922061123\n",
      "276 2.52915464088\n",
      "277 2.52908867273\n",
      "278 2.52902270679\n",
      "279 2.52895674306\n",
      "280 2.52889078153\n",
      "281 2.52882482221\n",
      "282 2.52875886509\n",
      "283 2.52869291018\n",
      "284 2.52862695748\n",
      "285 2.52856100698\n",
      "286 2.52849505869\n",
      "287 2.5284291126\n",
      "288 2.52836316872\n",
      "289 2.52829722705\n",
      "290 2.52823128758\n",
      "291 2.52816535031\n",
      "292 2.52809941525\n",
      "293 2.5280334824\n",
      "294 2.52796755174\n",
      "295 2.5279016233\n",
      "296 2.52783569706\n",
      "297 2.52776977302\n",
      "298 2.52770385119\n",
      "299 2.52763793156\n",
      "300 2.52757201414\n",
      "301 2.52750609892\n",
      "302 2.5274401859\n",
      "303 2.52737427509\n",
      "304 2.52730836648\n",
      "305 2.52724246008\n",
      "306 2.52717655588\n",
      "307 2.52711065388\n",
      "308 2.52704475409\n",
      "309 2.5269788565\n",
      "310 2.52691296111\n",
      "311 2.52684706793\n",
      "312 2.52678117695\n",
      "313 2.52671528817\n",
      "314 2.5266494016\n",
      "315 2.52658351722\n",
      "316 2.52651763505\n",
      "317 2.52645175509\n",
      "318 2.52638587732\n",
      "319 2.52632000176\n",
      "320 2.5262541284\n",
      "321 2.52618825724\n",
      "322 2.52612238829\n",
      "323 2.52605652153\n",
      "324 2.52599065698\n",
      "325 2.52592479463\n",
      "326 2.52585893448\n",
      "327 2.52579307653\n",
      "328 2.52572722078\n",
      "329 2.52566136724\n",
      "330 2.52559551589\n",
      "331 2.52552966675\n",
      "332 2.52546381981\n",
      "333 2.52539797507\n",
      "334 2.52533213253\n",
      "335 2.52526629219\n",
      "336 2.52520045405\n",
      "337 2.52513461811\n",
      "338 2.52506878437\n",
      "339 2.52500295283\n",
      "340 2.52493712349\n",
      "341 2.52487129635\n",
      "342 2.52480547142\n",
      "343 2.52473964868\n",
      "344 2.52467382814\n",
      "345 2.5246080098\n",
      "346 2.52454219366\n",
      "347 2.52447637972\n",
      "348 2.52441056798\n",
      "349 2.52434475844\n",
      "350 2.52427895109\n",
      "351 2.52421314595\n",
      "352 2.524147343\n",
      "353 2.52408154226\n",
      "354 2.52401574371\n",
      "355 2.52394994736\n",
      "356 2.52388415321\n",
      "357 2.52381836126\n",
      "358 2.52375257151\n",
      "359 2.52368678395\n",
      "360 2.52362099859\n",
      "361 2.52355521544\n",
      "362 2.52348943447\n",
      "363 2.52342365571\n",
      "364 2.52335787914\n",
      "365 2.52329210478\n",
      "366 2.52322633261\n",
      "367 2.52316056263\n",
      "368 2.52309479486\n",
      "369 2.52302902928\n",
      "370 2.52296326589\n",
      "371 2.52289750471\n",
      "372 2.52283174572\n",
      "373 2.52276598893\n",
      "374 2.52270023433\n",
      "375 2.52263448194\n",
      "376 2.52256873173\n",
      "377 2.52250298373\n",
      "378 2.52243723792\n",
      "379 2.52237149431\n",
      "380 2.52230575289\n",
      "381 2.52224001367\n",
      "382 2.52217427664\n",
      "383 2.52210854181\n",
      "384 2.52204280918\n",
      "385 2.52197707874\n",
      "386 2.5219113505\n",
      "387 2.52184562445\n",
      "388 2.5217799006\n",
      "389 2.52171417894\n",
      "390 2.52164845948\n",
      "391 2.52158274221\n",
      "392 2.52151702714\n",
      "393 2.52145131426\n",
      "394 2.52138560358\n",
      "395 2.52131989509\n",
      "396 2.52125418879\n",
      "397 2.52118848469\n",
      "398 2.52112278279\n",
      "399 2.52105708308\n",
      "400 2.52099138556\n",
      "401 2.52092569024\n",
      "402 2.5208599971\n",
      "403 2.52079430617\n",
      "404 2.52072861743\n",
      "405 2.52066293088\n",
      "406 2.52059724652\n",
      "407 2.52053156436\n",
      "408 2.52046588439\n",
      "409 2.52040020661\n",
      "410 2.52033453103\n",
      "411 2.52026885764\n",
      "412 2.52020318644\n",
      "413 2.52013751744\n",
      "414 2.52007185062\n",
      "415 2.520006186\n",
      "416 2.51994052357\n",
      "417 2.51987486334\n",
      "418 2.5198092053\n",
      "419 2.51974354944\n",
      "420 2.51967789579\n",
      "421 2.51961224432\n",
      "422 2.51954659504\n",
      "423 2.51948094796\n",
      "424 2.51941530307\n",
      "425 2.51934966036\n",
      "426 2.51928401986\n",
      "427 2.51921838154\n",
      "428 2.51915274541\n",
      "429 2.51908711147\n",
      "430 2.51902147973\n",
      "431 2.51895585017\n",
      "432 2.51889022281\n",
      "433 2.51882459764\n",
      "434 2.51875897465\n",
      "435 2.51869335386\n",
      "436 2.51862773526\n",
      "437 2.51856211885\n",
      "438 2.51849650463\n",
      "439 2.51843089259\n",
      "440 2.51836528275\n",
      "441 2.5182996751\n",
      "442 2.51823406964\n",
      "443 2.51816846637\n",
      "444 2.51810286528\n",
      "445 2.51803726639\n",
      "446 2.51797166969\n",
      "447 2.51790607517\n",
      "448 2.51784048285\n",
      "449 2.51777489271\n",
      "450 2.51770930476\n",
      "451 2.517643719\n",
      "452 2.51757813543\n",
      "453 2.51751255405\n",
      "454 2.51744697486\n",
      "455 2.51738139785\n",
      "456 2.51731582303\n",
      "457 2.51725025041\n",
      "458 2.51718467996\n",
      "459 2.51711911171\n",
      "460 2.51705354565\n",
      "461 2.51698798177\n",
      "462 2.51692242008\n",
      "463 2.51685686058\n",
      "464 2.51679130327\n",
      "465 2.51672574814\n",
      "466 2.5166601952\n",
      "467 2.51659464445\n",
      "468 2.51652909588\n",
      "469 2.5164635495\n",
      "470 2.51639800531\n",
      "471 2.51633246331\n",
      "472 2.51626692349\n",
      "473 2.51620138586\n",
      "474 2.51613585041\n",
      "475 2.51607031716\n",
      "476 2.51600478608\n",
      "477 2.5159392572\n",
      "478 2.5158737305\n",
      "479 2.51580820598\n",
      "480 2.51574268365\n",
      "481 2.51567716351\n",
      "482 2.51561164556\n",
      "483 2.51554612978\n",
      "484 2.5154806162\n",
      "485 2.5154151048\n",
      "486 2.51534959558\n",
      "487 2.51528408855\n",
      "488 2.51521858371\n",
      "489 2.51515308105\n",
      "490 2.51508758057\n",
      "491 2.51502208228\n",
      "492 2.51495658617\n",
      "493 2.51489109225\n",
      "494 2.51482560052\n",
      "495 2.51476011096\n",
      "496 2.5146946236\n",
      "497 2.51462913841\n",
      "498 2.51456365541\n",
      "499 2.51449817459\n"
     ]
    }
   ],
   "source": [
    "# Demo set\n",
    "N = 2      # N     is batch size\n",
    "D_in = 5   # D_in  is input dimension\n",
    "H = 2      # H     is hidden dimension\n",
    "D_out = 1  # D_out is output dimension\n",
    "\n",
    "# Create random input and output data\n",
    "x = np.random.randn(N, D_in)\n",
    "y = np.random.randn(N, D_out)\n",
    "\n",
    "# Randomly initialize weights\n",
    "w1 = np.random.randn(D_in, H)\n",
    "w2 = np.random.randn(H, D_out)\n",
    "\n",
    "# Learning rate\n",
    "learning_rate = 1e-6\n",
    "\n",
    "\n",
    "for t in range(500):\n",
    "    # Forward pass: compute predicted y\n",
    "    h = x.dot(w1)\n",
    "    h_relu = np.maximum(h, 0)\n",
    "    y_pred = h_relu.dot(w2)\n",
    "\n",
    "    # Compute and print loss\n",
    "    loss = np.square(y_pred - y).sum()\n",
    "    print(t, loss)\n",
    "\n",
    "    # Backprop to compute gradients of w1 and w2 with respect to loss\n",
    "    grad_y_pred = 2.0 * (y_pred - y)\n",
    "    grad_w2 = h_relu.T.dot(grad_y_pred)\n",
    "    grad_h_relu = grad_y_pred.dot(w2.T)\n",
    "    grad_h = grad_h_relu.copy()\n",
    "    grad_h[h < 0] = 0\n",
    "    grad_w1 = x.T.dot(grad_h)\n",
    "\n",
    "    # Update weights\n",
    "    w1 -= learning_rate * grad_w1\n",
    "    w2 -= learning_rate * grad_w2"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.3"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 1
}
