{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "\n",
    "TensorFlow: Static Graphs\n",
    "-------------------------\n",
    "\n",
    "A fully-connected ReLU network with one hidden layer and no biases, trained to\n",
    "predict y from x by minimizing squared Euclidean distance.\n",
    "\n",
    "This implementation uses basic TensorFlow operations to set up a computational\n",
    "graph, then executes the graph many times to actually train the network.\n",
    "\n",
    "<p style=\"color:red\">One of the main differences between TensorFlow and PyTorch is that TensorFlow\n",
    "uses static computational graphs while PyTorch uses dynamic computational\n",
    "graphs.</p>\n",
    "\n",
    "In TensorFlow we first set up the computational graph, then execute the same\n",
    "graph many times.\n",
    "\n",
    "Source Link: http://pytorch.org/tutorials/beginner/examples_autograd/tf_two_layer_net.html"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [],
   "source": [
    "%matplotlib inline"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "<h1 style=\"background-image: linear-gradient( 135deg, #ABDCFF 10%, #0396FF 100%);\"> Orinal Tutorial code"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "3.42794e+07\n",
      "3.31399e+07\n",
      "4.0607e+07\n",
      "5.68759e+07\n",
      "6.49585e+07\n",
      "4.8438e+07\n",
      "2.10492e+07\n",
      "6.16047e+06\n",
      "2.13148e+06\n",
      "1.20643e+06\n",
      "907197.0\n",
      "743926.0\n",
      "625882.0\n",
      "532665.0\n",
      "456912.0\n",
      "394395.0\n",
      "342362.0\n",
      "298747.0\n",
      "261990.0\n",
      "230826.0\n",
      "204253.0\n",
      "181457.0\n",
      "161794.0\n",
      "144777.0\n",
      "129994.0\n",
      "117064.0\n",
      "105723.0\n",
      "95733.2\n",
      "86896.5\n",
      "79055.0\n",
      "72067.5\n",
      "65823.2\n",
      "60230.9\n",
      "55207.1\n",
      "50683.5\n",
      "46598.9\n",
      "42903.9\n",
      "39556.7\n",
      "36516.1\n",
      "33750.0\n",
      "31229.9\n",
      "28929.7\n",
      "26825.8\n",
      "24899.8\n",
      "23134.8\n",
      "21514.1\n",
      "20024.6\n",
      "18653.6\n",
      "17390.7\n",
      "16225.8\n",
      "15150.2\n",
      "14155.6\n",
      "13235.2\n",
      "12383.0\n",
      "11592.9\n",
      "10860.7\n",
      "10181.0\n",
      "9549.42\n",
      "8962.21\n",
      "8416.18\n",
      "7907.55\n",
      "7433.49\n",
      "6991.56\n",
      "6579.33\n",
      "6194.39\n",
      "5835.22\n",
      "5500.06\n",
      "5186.59\n",
      "4893.04\n",
      "4618.26\n",
      "4360.84\n",
      "4119.55\n",
      "3893.19\n",
      "3680.81\n",
      "3481.51\n",
      "3294.24\n",
      "3118.13\n",
      "2952.64\n",
      "2796.98\n",
      "2650.55\n",
      "2512.68\n",
      "2382.85\n",
      "2260.5\n",
      "2145.15\n",
      "2036.44\n",
      "1933.99\n",
      "1837.31\n",
      "1746.02\n",
      "1659.81\n",
      "1578.36\n",
      "1501.38\n",
      "1428.57\n",
      "1359.71\n",
      "1294.55\n",
      "1232.85\n",
      "1174.47\n",
      "1119.11\n",
      "1066.67\n",
      "1016.96\n",
      "969.844\n",
      "925.142\n",
      "882.7\n",
      "842.429\n",
      "804.207\n",
      "767.894\n",
      "733.406\n",
      "700.624\n",
      "669.453\n",
      "639.806\n",
      "611.611\n",
      "584.8\n",
      "559.285\n",
      "534.99\n",
      "511.85\n",
      "489.808\n",
      "468.812\n",
      "448.822\n",
      "429.836\n",
      "411.728\n",
      "394.448\n",
      "377.976\n",
      "362.249\n",
      "347.231\n",
      "332.896\n",
      "319.207\n",
      "306.137\n",
      "293.638\n",
      "281.696\n",
      "270.283\n",
      "259.374\n",
      "248.941\n",
      "238.955\n",
      "229.407\n",
      "220.266\n",
      "211.521\n",
      "203.149\n",
      "195.136\n",
      "187.459\n",
      "180.108\n",
      "173.074\n",
      "166.342\n",
      "159.89\n",
      "153.707\n",
      "147.78\n",
      "142.099\n",
      "136.649\n",
      "131.422\n",
      "126.408\n",
      "121.599\n",
      "116.984\n",
      "112.552\n",
      "108.3\n",
      "104.22\n",
      "100.3\n",
      "96.5378\n",
      "92.9266\n",
      "89.4582\n",
      "86.1266\n",
      "82.9252\n",
      "79.8486\n",
      "76.893\n",
      "74.0533\n",
      "71.324\n",
      "68.6992\n",
      "66.1745\n",
      "63.7494\n",
      "61.4161\n",
      "59.1731\n",
      "57.0147\n",
      "54.9388\n",
      "52.9434\n",
      "51.0225\n",
      "49.1732\n",
      "47.3945\n",
      "45.6835\n",
      "44.0372\n",
      "42.452\n",
      "40.9252\n",
      "39.4572\n",
      "38.0431\n",
      "36.6821\n",
      "35.3706\n",
      "34.1081\n",
      "32.8924\n",
      "31.7212\n",
      "30.5939\n",
      "29.5071\n",
      "28.461\n",
      "27.453\n",
      "26.4819\n",
      "25.5465\n",
      "24.6453\n",
      "23.7767\n",
      "22.9391\n",
      "22.1328\n",
      "21.3559\n",
      "20.6066\n",
      "19.8841\n",
      "19.1878\n",
      "18.5167\n",
      "17.8696\n",
      "17.246\n",
      "16.6445\n",
      "16.0649\n",
      "15.5056\n",
      "14.9661\n",
      "14.446\n",
      "13.945\n",
      "13.4612\n",
      "12.9948\n",
      "12.5449\n",
      "12.111\n",
      "11.6926\n",
      "11.2889\n",
      "10.8992\n",
      "10.5233\n",
      "10.1611\n",
      "9.81116\n",
      "9.47379\n",
      "9.14797\n",
      "8.83393\n",
      "8.53078\n",
      "8.23814\n",
      "7.95578\n",
      "7.68348\n",
      "7.42025\n",
      "7.16662\n",
      "6.92158\n",
      "6.68549\n",
      "6.45705\n",
      "6.2367\n",
      "6.02424\n",
      "5.81902\n",
      "5.62097\n",
      "5.42954\n",
      "5.2449\n",
      "5.0667\n",
      "4.89455\n",
      "4.72856\n",
      "4.56802\n",
      "4.4132\n",
      "4.26373\n",
      "4.11923\n",
      "3.9796\n",
      "3.84504\n",
      "3.71517\n",
      "3.58952\n",
      "3.4682\n",
      "3.35117\n",
      "3.23807\n",
      "3.12881\n",
      "3.02342\n",
      "2.92153\n",
      "2.82314\n",
      "2.72808\n",
      "2.63627\n",
      "2.54761\n",
      "2.46202\n",
      "2.37919\n",
      "2.29944\n",
      "2.22216\n",
      "2.14748\n",
      "2.07553\n",
      "2.00599\n",
      "1.93869\n",
      "1.87387\n",
      "1.81103\n",
      "1.75041\n",
      "1.69183\n",
      "1.63531\n",
      "1.5806\n",
      "1.52783\n",
      "1.47676\n",
      "1.42744\n",
      "1.37989\n",
      "1.33379\n",
      "1.28928\n",
      "1.2463\n",
      "1.20474\n",
      "1.16459\n",
      "1.12584\n",
      "1.08841\n",
      "1.05218\n",
      "1.01715\n",
      "0.98325\n",
      "0.950599\n",
      "0.918996\n",
      "0.888456\n",
      "0.858972\n",
      "0.830475\n",
      "0.802899\n",
      "0.776367\n",
      "0.750564\n",
      "0.72573\n",
      "0.70163\n",
      "0.678303\n",
      "0.655833\n",
      "0.634122\n",
      "0.61314\n",
      "0.592831\n",
      "0.573227\n",
      "0.554274\n",
      "0.535961\n",
      "0.518223\n",
      "0.501148\n",
      "0.48454\n",
      "0.468553\n",
      "0.453147\n",
      "0.438155\n",
      "0.423669\n",
      "0.40971\n",
      "0.396208\n",
      "0.383126\n",
      "0.370495\n",
      "0.358322\n",
      "0.34652\n",
      "0.335097\n",
      "0.324048\n",
      "0.313415\n",
      "0.303053\n",
      "0.293101\n",
      "0.283483\n",
      "0.274126\n",
      "0.265138\n",
      "0.256394\n",
      "0.247979\n",
      "0.239852\n",
      "0.231942\n",
      "0.22434\n",
      "0.217022\n",
      "0.209885\n",
      "0.203013\n",
      "0.196358\n",
      "0.189899\n",
      "0.183692\n",
      "0.17768\n",
      "0.171823\n",
      "0.166198\n",
      "0.160736\n",
      "0.155492\n",
      "0.150409\n",
      "0.14548\n",
      "0.140718\n",
      "0.136104\n",
      "0.131651\n",
      "0.127363\n",
      "0.123218\n",
      "0.119168\n",
      "0.115284\n",
      "0.11152\n",
      "0.107867\n",
      "0.104366\n",
      "0.100946\n",
      "0.097651\n",
      "0.0944684\n",
      "0.0914035\n",
      "0.0884294\n",
      "0.0855437\n",
      "0.0827626\n",
      "0.0800635\n",
      "0.0774464\n",
      "0.074947\n",
      "0.0724915\n",
      "0.0701493\n",
      "0.0678595\n",
      "0.0656579\n",
      "0.0635197\n",
      "0.0614675\n",
      "0.0594809\n",
      "0.0575265\n",
      "0.0556571\n",
      "0.0538632\n",
      "0.0521038\n",
      "0.0504117\n",
      "0.0487763\n",
      "0.0472\n",
      "0.0456855\n",
      "0.0442098\n",
      "0.0427625\n",
      "0.0413908\n",
      "0.040061\n",
      "0.0387731\n",
      "0.0375206\n",
      "0.0363013\n",
      "0.0351392\n",
      "0.0340113\n",
      "0.0329194\n",
      "0.0318677\n",
      "0.030832\n",
      "0.0298421\n",
      "0.0288843\n",
      "0.0279561\n",
      "0.0270584\n",
      "0.0261906\n",
      "0.0253439\n",
      "0.0245362\n",
      "0.023756\n",
      "0.0229877\n",
      "0.0222692\n",
      "0.0215566\n",
      "0.0208613\n",
      "0.0202047\n",
      "0.0195609\n",
      "0.01894\n",
      "0.0183304\n",
      "0.0177567\n",
      "0.0171869\n",
      "0.0166426\n",
      "0.0161179\n",
      "0.0156015\n",
      "0.0151102\n",
      "0.0146419\n",
      "0.0141709\n",
      "0.0137285\n",
      "0.0132934\n",
      "0.0128779\n",
      "0.0124779\n",
      "0.0120866\n",
      "0.0117078\n",
      "0.0113389\n",
      "0.0109839\n",
      "0.0106471\n",
      "0.0103107\n",
      "0.00999243\n",
      "0.0096801\n",
      "0.00938287\n",
      "0.00908932\n",
      "0.00880603\n",
      "0.00853287\n",
      "0.00827315\n",
      "0.00801596\n",
      "0.00777026\n",
      "0.00752949\n",
      "0.00729691\n",
      "0.00707774\n",
      "0.00686058\n",
      "0.00665188\n",
      "0.00645187\n",
      "0.00625745\n",
      "0.00606701\n",
      "0.00588445\n",
      "0.00570636\n",
      "0.00553473\n",
      "0.00537114\n",
      "0.00520918\n",
      "0.00505279\n",
      "0.00490431\n",
      "0.00475926\n",
      "0.00462107\n",
      "0.0044856\n",
      "0.00435656\n",
      "0.0042272\n",
      "0.0041028\n",
      "0.00398765\n",
      "0.0038722\n",
      "0.00375816\n",
      "0.00365247\n",
      "0.003544\n",
      "0.00344299\n",
      "0.00334506\n",
      "0.00325089\n",
      "0.00315981\n",
      "0.00306924\n",
      "0.00298233\n",
      "0.00289907\n",
      "0.00281969\n",
      "0.00274046\n",
      "0.00266334\n",
      "0.00258998\n",
      "0.00251985\n",
      "0.00245259\n",
      "0.00238458\n",
      "0.00232139\n",
      "0.00225597\n",
      "0.00219629\n",
      "0.00213516\n",
      "0.00207942\n",
      "0.00202235\n",
      "0.00196852\n",
      "0.001918\n",
      "0.0018668\n",
      "0.00181846\n",
      "0.00177047\n",
      "0.0017257\n",
      "0.00167934\n",
      "0.00163787\n",
      "0.0015976\n",
      "0.00155877\n",
      "0.00151865\n",
      "0.00148013\n",
      "0.00144412\n",
      "0.00140619\n",
      "0.00137078\n",
      "0.0013389\n",
      "0.00130593\n",
      "0.00127337\n",
      "0.00124311\n",
      "0.00121232\n",
      "0.0011829\n",
      "0.00115557\n"
     ]
    }
   ],
   "source": [
    "import tensorflow as tf\n",
    "import numpy as np\n",
    "\n",
    "# First we set up the computational graph:\n",
    "\n",
    "# N is batch size; D_in is input dimension;\n",
    "# H is hidden dimension; D_out is output dimension.\n",
    "N, D_in, H, D_out = 64, 1000, 100, 10\n",
    "\n",
    "# Create placeholders for the input and target data; these will be filled\n",
    "# with real data when we execute the graph.\n",
    "x = tf.placeholder(tf.float32, shape=(None, D_in))\n",
    "y = tf.placeholder(tf.float32, shape=(None, D_out))\n",
    "\n",
    "# Create Variables for the weights and initialize them with random data.\n",
    "# A TensorFlow Variable persists its value across executions of the graph.\n",
    "w1 = tf.Variable(tf.random_normal((D_in, H)))\n",
    "w2 = tf.Variable(tf.random_normal((H, D_out)))\n",
    "\n",
    "# Forward pass: Compute the predicted y using operations on TensorFlow Tensors.\n",
    "# Note that this code does not actually perform any numeric operations; it\n",
    "# merely sets up the computational graph that we will later execute.\n",
    "h = tf.matmul(x, w1)\n",
    "h_relu = tf.maximum(h, tf.zeros(1))\n",
    "y_pred = tf.matmul(h_relu, w2)\n",
    "\n",
    "# Compute loss using operations on TensorFlow Tensors\n",
    "loss = tf.reduce_sum((y - y_pred) ** 2.0)\n",
    "\n",
    "# Compute gradient of the loss with respect to w1 and w2.\n",
    "grad_w1, grad_w2 = tf.gradients(loss, [w1, w2])\n",
    "\n",
    "# Update the weights using gradient descent. To actually update the weights\n",
    "# we need to evaluate new_w1 and new_w2 when executing the graph. Note that\n",
    "# in TensorFlow the the act of updating the value of the weights is part of\n",
    "# the computational graph; in PyTorch this happens outside the computational\n",
    "# graph.\n",
    "learning_rate = 1e-6\n",
    "new_w1 = w1.assign(w1 - learning_rate * grad_w1)\n",
    "new_w2 = w2.assign(w2 - learning_rate * grad_w2)\n",
    "\n",
    "# Now we have built our computational graph, so we enter a TensorFlow session to\n",
    "# actually execute the graph.\n",
    "with tf.Session() as sess:\n",
    "    # Run the graph once to initialize the Variables w1 and w2.\n",
    "    sess.run(tf.global_variables_initializer())\n",
    "\n",
    "    # Create numpy arrays holding the actual data for the inputs x and targets\n",
    "    # y\n",
    "    x_value = np.random.randn(N, D_in)\n",
    "    y_value = np.random.randn(N, D_out)\n",
    "    for _ in range(500):\n",
    "        # Execute the graph many times. Each time it executes we want to bind\n",
    "        # x_value to x and y_value to y, specified with the feed_dict argument.\n",
    "        # Each time we execute the graph we want to compute the values for loss,\n",
    "        # new_w1, and new_w2; the values of these Tensors are returned as numpy\n",
    "        # arrays.\n",
    "        loss_value, _, _ = sess.run([loss, new_w1, new_w2],\n",
    "                                    feed_dict={x: x_value, y: y_value})\n",
    "        print(loss_value)"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.3"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 1
}
