{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [],
   "source": [
    "import tensorflow as tf\n",
    "import numpy as np"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "26514936.0\n",
      "19524868.0\n",
      "15322529.0\n",
      "12019982.0\n",
      "9195918.0\n",
      "6805892.5\n",
      "4928742.0\n",
      "3534993.8\n",
      "2551496.5\n",
      "1870914.8\n",
      "1404705.6\n",
      "1082058.8\n",
      "855130.9\n",
      "691904.9\n",
      "571054.0\n",
      "479052.1\n",
      "407110.38\n",
      "349588.12\n",
      "302707.88\n",
      "263944.12\n",
      "231465.25\n",
      "203959.06\n",
      "180467.3\n",
      "160270.58\n",
      "142796.78\n",
      "127605.375\n",
      "114340.125\n",
      "102718.3\n",
      "92493.67\n",
      "83470.266\n",
      "75482.83\n",
      "68393.3\n",
      "62088.625\n",
      "56471.3\n",
      "51446.164\n",
      "46940.734\n",
      "42890.664\n",
      "39245.36\n",
      "35955.984\n",
      "32984.39\n",
      "30293.855\n",
      "27854.434\n",
      "25639.191\n",
      "23624.307\n",
      "21792.76\n",
      "20121.19\n",
      "18596.484\n",
      "17202.727\n",
      "15927.594\n",
      "14759.803\n",
      "13687.406\n",
      "12701.176\n",
      "11793.742\n",
      "10958.146\n",
      "10188.092\n",
      "9477.413\n",
      "8821.324\n",
      "8214.984\n",
      "7654.196\n",
      "7134.8843\n",
      "6653.921\n",
      "6208.2256\n",
      "5794.9395\n",
      "5411.3223\n",
      "5055.1465\n",
      "4724.0645\n",
      "4416.282\n",
      "4129.99\n",
      "3863.5537\n",
      "3615.4614\n",
      "3384.4546\n",
      "3169.2515\n",
      "2968.558\n",
      "2781.3481\n",
      "2606.715\n",
      "2443.7332\n",
      "2291.5156\n",
      "2149.2783\n",
      "2016.3435\n",
      "1892.1638\n",
      "1776.0232\n",
      "1667.4966\n",
      "1565.9639\n",
      "1470.9521\n",
      "1381.999\n",
      "1298.698\n",
      "1220.6333\n",
      "1147.5104\n",
      "1078.968\n",
      "1014.7272\n",
      "954.48004\n",
      "897.9895\n",
      "845.06165\n",
      "795.3736\n",
      "748.7426\n",
      "704.9544\n",
      "663.84906\n",
      "625.24475\n",
      "588.9745\n",
      "554.89734\n",
      "522.87866\n",
      "492.80255\n",
      "464.52136\n",
      "437.91925\n",
      "412.9172\n",
      "389.39566\n",
      "367.26324\n",
      "346.43542\n",
      "326.84003\n",
      "308.4004\n",
      "291.04333\n",
      "274.69452\n",
      "259.3031\n",
      "244.8052\n",
      "231.14784\n",
      "218.29106\n",
      "206.16898\n",
      "194.75096\n",
      "183.98488\n",
      "173.84102\n",
      "164.27686\n",
      "155.25562\n",
      "146.74944\n",
      "138.72668\n",
      "131.15695\n",
      "124.01932\n",
      "117.28116\n",
      "110.92059\n",
      "104.91942\n",
      "99.25691\n",
      "93.91127\n",
      "88.86293\n",
      "84.092926\n",
      "79.59096\n",
      "75.33908\n",
      "71.32254\n",
      "67.52525\n",
      "63.939724\n",
      "60.550682\n",
      "57.348824\n",
      "54.321693\n",
      "51.45713\n",
      "48.75238\n",
      "46.19245\n",
      "43.773277\n",
      "41.48446\n",
      "39.319023\n",
      "37.270126\n",
      "35.332497\n",
      "33.49987\n",
      "31.764767\n",
      "30.122326\n",
      "28.567715\n",
      "27.096313\n",
      "25.702782\n",
      "24.385006\n",
      "23.135529\n",
      "21.952396\n",
      "20.832043\n",
      "19.770275\n",
      "18.764723\n",
      "17.812761\n",
      "16.910152\n",
      "16.053898\n",
      "15.242964\n",
      "14.474619\n",
      "13.746427\n",
      "13.055534\n",
      "12.400714\n",
      "11.779509\n",
      "11.190569\n",
      "10.632227\n",
      "10.102381\n",
      "9.60031\n",
      "9.123226\n",
      "8.670825\n",
      "8.24142\n",
      "7.834263\n",
      "7.4475794\n",
      "7.081009\n",
      "6.7326536\n",
      "6.401787\n",
      "6.0879507\n",
      "5.7901464\n",
      "5.507056\n",
      "5.2380056\n",
      "4.9828653\n",
      "4.7402973\n",
      "4.510073\n",
      "4.2914853\n",
      "4.0833883\n",
      "3.8857882\n",
      "3.6982794\n",
      "3.5199895\n",
      "3.3503299\n",
      "3.1892624\n",
      "3.036211\n",
      "2.8905032\n",
      "2.7522318\n",
      "2.6207633\n",
      "2.4953897\n",
      "2.3763242\n",
      "2.2634227\n",
      "2.1558757\n",
      "2.0531745\n",
      "1.9559704\n",
      "1.8632722\n",
      "1.7751354\n",
      "1.6913493\n",
      "1.611572\n",
      "1.5356061\n",
      "1.4634115\n",
      "1.3945439\n",
      "1.3291769\n",
      "1.2668581\n",
      "1.2075202\n",
      "1.1511459\n",
      "1.0973567\n",
      "1.0461936\n",
      "0.99744743\n",
      "0.95097375\n",
      "0.9067708\n",
      "0.8647212\n",
      "0.82467186\n",
      "0.78641\n",
      "0.7500959\n",
      "0.7154025\n",
      "0.68242097\n",
      "0.6509592\n",
      "0.6210662\n",
      "0.5925565\n",
      "0.56516856\n",
      "0.5393885\n",
      "0.5146034\n",
      "0.49100652\n",
      "0.4685312\n",
      "0.44715294\n",
      "0.42675278\n",
      "0.40729624\n",
      "0.38872376\n",
      "0.37101802\n",
      "0.3542655\n",
      "0.33814347\n",
      "0.3228119\n",
      "0.30815342\n",
      "0.29427117\n",
      "0.28094205\n",
      "0.26825488\n",
      "0.25611526\n",
      "0.24457105\n",
      "0.23357283\n",
      "0.2230385\n",
      "0.21306169\n",
      "0.20348123\n",
      "0.19433603\n",
      "0.185631\n",
      "0.17735606\n",
      "0.16941682\n",
      "0.16181277\n",
      "0.15461515\n",
      "0.14775258\n",
      "0.14116354\n",
      "0.13486399\n",
      "0.12886137\n",
      "0.12314841\n",
      "0.11767881\n",
      "0.112479165\n",
      "0.10752201\n",
      "0.102791876\n",
      "0.09826136\n",
      "0.09391147\n",
      "0.089788206\n",
      "0.0858317\n",
      "0.082064494\n",
      "0.07849058\n",
      "0.075032115\n",
      "0.071731254\n",
      "0.06860505\n",
      "0.065607026\n",
      "0.062742725\n",
      "0.059995666\n",
      "0.057398\n",
      "0.054882035\n",
      "0.052502327\n",
      "0.050237797\n",
      "0.048050765\n",
      "0.04594428\n",
      "0.043963857\n",
      "0.042070728\n",
      "0.040238015\n",
      "0.03848152\n",
      "0.036837466\n",
      "0.035243303\n",
      "0.03369986\n",
      "0.03226074\n",
      "0.030863795\n",
      "0.029541466\n",
      "0.028278355\n",
      "0.027061146\n",
      "0.025901444\n",
      "0.024790185\n",
      "0.02375029\n",
      "0.02273976\n",
      "0.02176781\n",
      "0.02084827\n",
      "0.01995645\n",
      "0.019115841\n",
      "0.018296383\n",
      "0.017520146\n",
      "0.01677917\n",
      "0.01606499\n",
      "0.0153838955\n",
      "0.0147353755\n",
      "0.014127195\n",
      "0.013530338\n",
      "0.0129896235\n",
      "0.0124247335\n",
      "0.011909455\n",
      "0.011418125\n",
      "0.010942186\n",
      "0.010489898\n",
      "0.010061354\n",
      "0.009640105\n",
      "0.009238647\n",
      "0.0088588055\n",
      "0.008502245\n",
      "0.008160276\n",
      "0.007821736\n",
      "0.0075080255\n",
      "0.0072013536\n",
      "0.0069066156\n",
      "0.006628414\n",
      "0.006365006\n",
      "0.0061054192\n",
      "0.0058635385\n",
      "0.0056340448\n",
      "0.0054035406\n",
      "0.005189752\n",
      "0.004989109\n",
      "0.0047919364\n",
      "0.0045977933\n",
      "0.004422253\n",
      "0.0042459667\n",
      "0.004085063\n",
      "0.003925968\n",
      "0.0037758043\n",
      "0.0036321245\n",
      "0.00349227\n",
      "0.003363973\n",
      "0.0032327892\n",
      "0.0031160004\n",
      "0.0029993835\n",
      "0.0028894558\n",
      "0.0027830135\n",
      "0.002681358\n",
      "0.002578262\n",
      "0.002485126\n",
      "0.0023959405\n",
      "0.0023100087\n",
      "0.0022285352\n",
      "0.0021473526\n",
      "0.0020694055\n",
      "0.001994932\n",
      "0.001926325\n",
      "0.0018584175\n",
      "0.0017943137\n",
      "0.0017322528\n",
      "0.0016743413\n",
      "0.0016161597\n",
      "0.0015640933\n",
      "0.0015103722\n",
      "0.0014577949\n",
      "0.0014104494\n",
      "0.0013655992\n",
      "0.0013210406\n",
      "0.0012770416\n",
      "0.001234144\n",
      "0.0011943174\n",
      "0.0011583728\n",
      "0.0011203431\n",
      "0.0010846971\n",
      "0.0010516478\n",
      "0.0010192657\n",
      "0.0009894355\n",
      "0.00095715246\n",
      "0.0009288247\n",
      "0.0008991781\n",
      "0.00087367307\n",
      "0.00084751716\n",
      "0.0008219717\n",
      "0.0007965053\n",
      "0.00077225966\n",
      "0.0007491661\n",
      "0.00072780973\n",
      "0.0007072546\n",
      "0.00068790856\n",
      "0.00066841964\n",
      "0.0006492304\n",
      "0.00063093647\n",
      "0.00061232643\n",
      "0.0005962688\n",
      "0.00057982805\n",
      "0.0005641638\n",
      "0.0005484029\n",
      "0.00053437776\n",
      "0.0005197964\n",
      "0.00050431665\n",
      "0.0004913708\n",
      "0.000479298\n",
      "0.00046811416\n",
      "0.00045630365\n",
      "0.000443893\n",
      "0.0004326324\n",
      "0.00042279589\n",
      "0.00041262677\n",
      "0.00040176063\n",
      "0.00039152807\n",
      "0.00038264052\n",
      "0.00037335197\n",
      "0.00036412058\n",
      "0.0003561973\n",
      "0.00034772782\n",
      "0.00033884577\n",
      "0.00033145148\n",
      "0.0003234575\n",
      "0.00031640532\n",
      "0.0003098236\n",
      "0.0003025475\n",
      "0.00029606724\n",
      "0.0002902206\n",
      "0.00028263513\n",
      "0.0002781144\n",
      "0.00027116825\n",
      "0.00026636646\n",
      "0.0002600014\n",
      "0.00025416203\n",
      "0.00024892186\n",
      "0.00024299441\n",
      "0.00023861992\n",
      "0.00023486115\n",
      "0.00022912674\n",
      "0.00022513545\n",
      "0.00022033286\n",
      "0.00021575451\n",
      "0.0002110105\n",
      "0.00020773336\n",
      "0.00020295853\n",
      "0.00019961747\n",
      "0.00019565789\n",
      "0.00019158296\n",
      "0.00018853418\n",
      "0.0001843484\n",
      "0.00018051559\n",
      "0.00017737222\n",
      "0.00017425815\n",
      "0.0001709947\n",
      "0.00016776059\n",
      "0.00016482381\n",
      "0.0001616303\n",
      "0.0001595223\n",
      "0.00015607265\n",
      "0.00015406865\n",
      "0.00015081349\n",
      "0.00014863038\n",
      "0.00014587387\n",
      "0.00014335712\n",
      "0.00014079356\n",
      "0.00013868882\n",
      "0.00013665434\n",
      "0.00013429686\n",
      "0.0001320689\n",
      "0.00013012443\n",
      "0.00012783892\n",
      "0.00012619006\n",
      "0.00012335129\n",
      "0.00012180799\n",
      "0.0001199624\n",
      "0.000117929376\n",
      "0.00011606703\n",
      "0.00011402084\n",
      "0.00011214467\n",
      "0.00011046446\n",
      "0.00010895438\n",
      "0.00010751448\n",
      "0.00010527778\n",
      "0.000103980245\n",
      "0.00010252332\n",
      "0.00010046787\n",
      "9.923325e-05\n",
      "9.803347e-05\n",
      "9.648399e-05\n",
      "9.549531e-05\n",
      "9.394537e-05\n",
      "9.2358874e-05\n",
      "9.186407e-05\n",
      "9.020361e-05\n",
      "8.9002555e-05\n",
      "8.747731e-05\n",
      "8.594796e-05\n",
      "8.4545914e-05\n"
     ]
    }
   ],
   "source": [
    "N, D_in, H, D_out = 64, 1000, 100, 10\n",
    "learning_rate = 1e-6\n",
    "\n",
    "def two_layer_net(x, y, w1, w2):\n",
    "    h = tf.matmul(x, w1)\n",
    "    h_relu = tf.maximum(h, tf.zeros(1))\n",
    "    y_pred = tf.matmul(h_relu, w2)\n",
    "    loss = tf.reduce_sum((y_pred - y) ** 2.0)\n",
    "    return loss\n",
    "\n",
    "with tf.device('/cpu:0'):\n",
    "    x = tf.placeholder(tf.float32, shape=(None, D_in))\n",
    "    y = tf.placeholder(tf.float32, shape=(None, D_out))\n",
    "\n",
    "    w1 = tf.Variable(tf.random_normal((D_in, H)))\n",
    "    w2 = tf.Variable(tf.random_normal((H, D_out)))\n",
    "    \n",
    "    loss = two_layer_net(x, y, w1, w2)\n",
    "    \n",
    "    grad_w1, grad_w2 = tf.gradients(loss, [w1, w2])\n",
    "    \n",
    "    new_w1 = w1.assign(w1 - learning_rate * grad_w1)\n",
    "    new_w2 = w2.assign(w2 - learning_rate * grad_w2)\n",
    "    \n",
    "with tf.Session() as sess:\n",
    "    sess.run(tf.global_variables_initializer())\n",
    "    \n",
    "    x_val = np.random.randn(N, D_in)\n",
    "    y_val = np.random.randn(N, D_out)\n",
    "    for _ in range(500):\n",
    "        loss_val, _, _ = sess.run([loss, new_w1, new_w2], feed_dict={x: x_val, y: y_val})\n",
    "        print(loss_val)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.4"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
