{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "1.2.1\n"
     ]
    }
   ],
   "source": [
    "import tensorflow as tf\n",
    "print(tf.__version__)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## Hello Tensorflow!"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "b'Hello, Tensorflow!'\n"
     ]
    }
   ],
   "source": [
    "hello = tf.constant(\"Hello, Tensorflow!\")\n",
    "sess = tf.Session()\n",
    "print(sess.run(hello))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "node1 = tf.constant(3.0, tf.float32)\n",
    "node2 = tf.constant(4.0)\n",
    "node3 = tf.add(node1, node2)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "node1: Tensor(\"Const_6:0\", shape=(), dtype=float32) node2: Tensor(\"Const_7:0\", shape=(), dtype=float32)\n",
      "node3: Tensor(\"Add_1:0\", shape=(), dtype=float32)\n"
     ]
    }
   ],
   "source": [
    "print(\"node1:\", node1, \"node2:\", node2)\n",
    "print(\"node3:\", node3)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "sess.run(node1, node2):  [3.0, 4.0]\n",
      "sess.run(node3):  7.0\n"
     ]
    }
   ],
   "source": [
    "sess = tf.Session()\n",
    "print(\"sess.run(node1, node2): \", sess.run([node1, node2]))\n",
    "print(\"sess.run(node3): \", sess.run(node3))"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## Placeholder"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "7.5\n",
      "[ 3.  7.]\n"
     ]
    }
   ],
   "source": [
    "a = tf.placeholder(tf.float32)\n",
    "b = tf.placeholder(tf.float32)\n",
    "adder_node = a + b # + provides a shortcut for tf.add(a, b)\n",
    "print(sess.run(adder_node, feed_dict={a: 3, b: 4.5}))\n",
    "print(sess.run(adder_node, feed_dict={a: [1, 3], b: [2, 4]}))"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## One Example"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "    0             6.4507527351379395    0.7729181051254272   0.12029736489057541\n",
      "   50             0.0345444530248642    1.7815378904342651    0.4891480505466461\n",
      "  100           0.027115657925605774    1.8087401390075684    0.4347575306892395\n",
      "  150            0.02131538651883602     1.830432415008545    0.3854668438434601\n",
      "  200           0.016755858436226845    1.8496583700180054   0.34176188707351685\n",
      "  250           0.013171636499464512    1.8667043447494507    0.3030121922492981\n",
      "  300           0.010354110971093178    1.8818178176879883   0.26865601539611816\n",
      "  350           0.008139262907207012    1.8952174186706543    0.2381952852010727\n",
      "  400           0.006398200523108244    1.9070980548858643   0.21118803322315216\n",
      "  450           0.005029577296227217    1.9176312685012817   0.18724310398101807\n",
      "  500           0.003953710664063692    1.9269704818725586   0.16601312160491943\n",
      "  550          0.0031079694163054228    1.9352507591247559    0.1471901386976242\n",
      "  600          0.0024431480560451746    1.9425922632217407     0.130501389503479\n",
      "  650          0.0019205367425456643     1.949101209640503   0.11570478230714798\n",
      "  700          0.0015097158029675484    1.9548722505569458   0.10258594155311584\n",
      "  750          0.0011867702705785632    1.9599891901016235    0.0909544825553894\n",
      "  800           0.000932907743845135    1.9645256996154785   0.08064168691635132\n",
      "  850           0.000733344117179513    1.9685478210449219   0.07149823009967804\n",
      "  900          0.0005764767411164939    1.9721139669418335    0.0633915513753891\n",
      "  950         0.00045316247269511223    1.9752756357192993   0.05620409920811653\n",
      " 1000          0.0003562284109648317     1.978078842163086  0.049831707030534744\n",
      " 1050          0.0002800301881507039    1.9805645942687988  0.044181838631629944\n",
      " 1100         0.00022012785484548658    1.9827680587768555  0.039172228425741196\n",
      " 1150         0.00017304001084994525    1.9847220182418823  0.034730736166238785\n",
      " 1200          0.0001360254391329363    1.9864541292190552    0.0307928454130888\n",
      " 1250         0.00010692805517464876    1.9879900217056274  0.027301505208015442\n",
      " 1300          8.405566768487915e-05    1.9893516302108765   0.02420603297650814\n",
      " 1350          6.607299292227253e-05    1.9905592203140259   0.02146138809621334\n",
      " 1400         5.1939510740339756e-05    1.9916296005249023  0.019027983769774437\n",
      " 1450          4.083027670276351e-05    1.9925786256790161  0.016870534047484398\n",
      " 1500          3.209519854863174e-05     1.993420124053955  0.014957722276449203\n",
      " 1550         2.5231036488548853e-05    1.9941661357879639  0.013261738233268261\n",
      " 1600          1.983338916033972e-05    1.9948277473449707  0.011758042499423027\n",
      " 1650         1.5590749171678908e-05     1.995414137840271   0.01042481791228056\n",
      " 1700         1.2255084584467113e-05     1.995934009552002  0.009242849424481392\n",
      " 1750            9.6348267106805e-06    1.9963949918746948  0.008194959722459316\n",
      " 1800          7.573100901936414e-06    1.9968037605285645  0.007265892345458269\n",
      " 1850          5.954103016847512e-06    1.9971660375595093  0.0064420937560498714\n",
      " 1900          4.680131041823188e-06    1.9974873065948486  0.0057117692194879055\n",
      " 1950          3.679012479551602e-06     1.997772216796875  0.005064274184405804\n",
      " 2000         2.8923916488565737e-06    1.9980247020721436  0.004490154329687357\n"
     ]
    }
   ],
   "source": [
    "import tensorflow as tf\n",
    "\n",
    "# X and Y data\n",
    "x_train = [1, 2, 3]\n",
    "y_train = [2, 4, 6]\n",
    "\n",
    "W = tf.Variable(tf.random_normal([1]), name=\"weight\")\n",
    "b = tf.Variable(tf.random_normal([1]), name=\"bias\")\n",
    "\n",
    "# Our hypothesis XW+b\n",
    "hypothesis = x_train * W + b\n",
    "\n",
    "# cost/loss function\n",
    "cost = tf.reduce_mean(tf.square(hypothesis - y_train))\n",
    "\n",
    "# Minimize\n",
    "optimizer = tf.train.GradientDescentOptimizer(learning_rate=0.01)\n",
    "train = optimizer.minimize(cost)\n",
    "\n",
    "# Launch the graph in a session\n",
    "sess = tf.Session()\n",
    "# Initialize global variables in the group\n",
    "sess.run(tf.global_variables_initializer())\n",
    "\n",
    "# Fit the line\n",
    "for step in range(2001):\n",
    "    sess.run(train)\n",
    "    if step % 50 == 0:\n",
    "        line  = '{:>5} {:>30}  {:>20}  {:>20}'.format(step,sess.run(cost), sess.run(W)[0], sess.run(b)[0] )\n",
    "        print(line)\n"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## Placeholder Again"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0                  8.040029525756836    0.2799628973007202         0.3466492295265198\n",
      "20              0.027633318677544594    0.8900262713432312           0.48641237616539\n",
      "40              0.023989683017134666    0.8997710347175598         0.4618101716041565\n",
      "60              0.020950349047780037    0.9063466787338257          0.438118040561676\n",
      "80               0.01829608902335167    0.9124800562858582         0.4159744381904602\n",
      "100             0.015978118404746056    0.9182118773460388        0.39528101682662964\n",
      "120             0.013953832909464836    0.9235682487487793         0.3759428560733795\n",
      "140              0.01218597125262022    0.9285738468170166         0.3578711450099945\n",
      "160             0.010642101988196373    0.9332516193389893        0.34098291397094727\n",
      "180              0.00929381512105465    0.9376229643821716         0.3252006769180298\n",
      "200             0.008116357028484344    0.9417080879211426        0.31045210361480713\n",
      "220              0.00708808284252882    0.9455257058143616         0.2966694235801697\n",
      "240             0.006190077867358923     0.949093222618103        0.28378939628601074\n",
      "260             0.005405822303146124    0.9524272084236145        0.27175286412239075\n",
      "280             0.004720946773886681    0.9555427432060242         0.2605046033859253\n",
      "300            0.0041228472255170345    0.9584543704986572         0.2499929964542389\n",
      "320             0.003600510535761714    0.9611752033233643        0.24016980826854706\n",
      "340            0.0031443454790860415    0.9637178182601929         0.2309899777173996\n",
      "360             0.002745990874245763    0.9660939574241638         0.2224113643169403\n",
      "380             0.002398092532530427    0.9683144688606262        0.21439458429813385\n",
      "400            0.0020942736882716417    0.9703896045684814        0.20690281689167023\n",
      "420            0.0018289408180862665     0.972328782081604        0.19990164041519165\n",
      "440            0.0015972319524735212    0.9741410613059998        0.19335903227329254\n",
      "460            0.0013948630075901747    0.9758345484733582        0.18724481761455536\n",
      "480              0.00121815234888345    0.9774171710014343        0.18153107166290283\n",
      "500            0.0010638150852173567    0.9788960814476013         0.1761915385723114\n",
      "520            0.0009290425223298371    0.9802782535552979        0.17120173573493958\n",
      "540            0.0008113404619507492    0.9815698862075806        0.16653867065906525\n",
      "560            0.0007085452671162784    0.9827768802642822        0.16218098998069763\n",
      "580            0.0006187849212437868    0.9839047193527222         0.1581086963415146\n",
      "600            0.0005403871764428914    0.9849589467048645        0.15430313348770142\n",
      "620            0.0004719255375675857    0.9859439134597778        0.15074679255485535\n",
      "640            0.0004121334641240537     0.986864447593689        0.14742335677146912\n",
      "660           0.00035991930053569376    0.9877247214317322        0.14431758224964142\n",
      "680            0.0003143201756756753    0.9885286092758179        0.14141516387462616\n",
      "700           0.00027450075140222907    0.9892798662185669          0.138702854514122\n",
      "720           0.00023971908376552165     0.989982008934021        0.13616816699504852\n",
      "740           0.00020935032807756215    0.9906380772590637         0.1337994486093521\n",
      "760           0.00018282743985764682    0.9912511706352234        0.13158589601516724\n",
      "780            0.0001596651563886553    0.9918241500854492         0.1295173019170761\n",
      "800            0.0001394376449752599    0.9923595786094666        0.12758423388004303\n",
      "820             0.000121771648991853     0.992859959602356        0.12577775120735168\n",
      "840           0.00010634405771270394    0.9933275580406189         0.1240895614027977\n",
      "860            9.287114517064765e-05    0.9937645196914673        0.12251190096139908\n",
      "880            8.110500493785366e-05    0.9941729307174683         0.1210375651717186\n",
      "900            7.082850788719952e-05    0.9945544600486755        0.11965981125831604\n",
      "920            6.185595702845603e-05    0.9949111938476562        0.11837226152420044\n",
      "940            5.402002716436982e-05    0.9952444434165955        0.11716902256011963\n",
      "960             4.71762650704477e-05    0.9955559372901917        0.11604457348585129\n",
      "980           4.1198589315172285e-05    0.9958469271659851        0.11499375849962234\n",
      "1000           3.597832619561814e-05    0.9961189031600952        0.11401180177927017\n",
      "1020          3.1420273444382474e-05    0.9963730573654175        0.11309415102005005\n",
      "1040           2.744023186096456e-05    0.9966106414794922        0.11223659664392471\n",
      "1060           2.396321542619262e-05    0.9968326091766357        0.11143520474433899\n",
      "1080           2.092666909447871e-05    0.9970400333404541        0.11068631708621979\n",
      "1100           1.827591404435225e-05    0.9972338676452637        0.10998645424842834\n",
      "1120          1.5960929886205122e-05    0.9974150061607361        0.10933245718479156\n",
      "1140          1.3938113625044934e-05     0.997584342956543        0.10872126370668411\n",
      "1160          1.2172643437224906e-05    0.9977425336837769        0.10815010964870453\n",
      "1180          1.0630144970491529e-05    0.9978903532028198         0.1076163500547409\n",
      "1200           9.283750841859728e-06    0.9980285167694092        0.10711754858493805\n",
      "1220            8.10721394373104e-06    0.9981576800346375         0.1066514253616333\n",
      "1240           7.080020623106975e-06    0.9982783198356628        0.10621580481529236\n",
      "1260           6.183037385198986e-06    0.9983910918235779        0.10580869764089584\n",
      "1280           5.399662768468261e-06     0.998496413230896        0.10542827844619751\n",
      "1300           4.715638624475105e-06    0.9985948801040649        0.10507278889417648\n",
      "1320           4.118316155654611e-06    0.9986869096755981        0.10474059730768204\n",
      "1340          3.5966136238130275e-06    0.9987728595733643        0.10443011671304703\n",
      "1360           3.140850822092034e-06    0.9988532662391663        0.10413998365402222\n",
      "1380           2.743133109106566e-06    0.9989283680915833        0.10386883467435837\n",
      "1400           2.395528099441435e-06    0.9989985227584839        0.10361547768115997\n",
      "1420          2.0919837879773695e-06    0.9990640878677368        0.10337873548269272\n",
      "1440          1.8270862938152277e-06    0.9991254210472107        0.10315747559070587\n",
      "1460          1.5955718026816612e-06    0.9991826415061951        0.10295069962739944\n",
      "1480          1.3932282172390842e-06    0.9992361664772034        0.10275746881961823\n",
      "1500          1.2167778322691447e-06    0.9992862343788147        0.10257692635059357\n",
      "1520          1.0627384199324297e-06    0.9993329048156738        0.10240817070007324\n",
      "1540           9.280086601393123e-07     0.999376654624939        0.10225044935941696\n",
      "1560           8.104358357741148e-07    0.9994174242019653         0.1021030843257904\n",
      "1580           7.078212433953013e-07    0.9994555711746216        0.10196535289287567\n",
      "1600           6.180984541970247e-07     0.999491274356842        0.10183663666248322\n",
      "1620           5.397647555582807e-07    0.9995245337486267        0.10171634703874588\n",
      "1640          4.7144968107204477e-07    0.9995557069778442        0.10160394757986069\n",
      "1660           4.118854519674642e-07    0.9995847940444946        0.10149895399808884\n",
      "1680          3.5957151567345136e-07    0.9996119141578674        0.10140078514814377\n",
      "1700          3.1402478839481773e-07    0.9996373653411865        0.10130906850099564\n",
      "1720          2.7431855187387555e-07    0.9996611475944519        0.10122335702180862\n",
      "1740          2.3949897354214045e-07    0.9996832609176636         0.1011432483792305\n",
      "1760          2.0917691756494605e-07    0.9997040033340454         0.1010683998465538\n",
      "1780              1.826886091294e-07    0.9997234344482422        0.10099845379590988\n",
      "1800           1.595048786384723e-07    0.9997415542602539        0.10093308240175247\n",
      "1820          1.3936630693933694e-07    0.9997584223747253        0.10087200254201889\n",
      "1840           1.217431986333395e-07    0.9997742176055908        0.10081492364406586\n",
      "1860          1.0632005853494775e-07    0.9997889399528503        0.10076159983873367\n",
      "1880           9.284819668664568e-08    0.9998027682304382        0.10071175545454025\n",
      "1900           8.111034333069256e-08    0.9998157024383545        0.10066518932580948\n",
      "1920           7.084673825374921e-08    0.9998277425765991        0.10062169283628464\n",
      "1940           6.185259593394221e-08    0.9998390078544617        0.10058099031448364\n",
      "1960            5.40001110493904e-08    0.9998495578765869        0.10054296255111694\n",
      "1980          4.7192500574055884e-08    0.9998593330383301          0.100507453083992\n",
      "2000           4.118823326848542e-08     0.999868631362915        0.10047423839569092\n"
     ]
    }
   ],
   "source": [
    "import tensorflow as tf\n",
    "\n",
    "W = tf.Variable(tf.random_normal([1]), name=\"weight\")\n",
    "b = tf.Variable(tf.random_normal([1]), name=\"bias\")\n",
    "\n",
    "X = tf.placeholder(tf.float32, shape=[None])\n",
    "Y = tf.placeholder(tf.float32, shape=[None])\n",
    "\n",
    "# Our hypothesis XW+b\n",
    "hypothesis = X * W + b\n",
    "\n",
    "# cost/loss function\n",
    "cost = tf.reduce_mean(tf.square(hypothesis - Y))\n",
    "\n",
    "# Minimize\n",
    "optimizer = tf.train.GradientDescentOptimizer(learning_rate=0.01)\n",
    "train = optimizer.minimize(cost)\n",
    "\n",
    "# Launch the graph in a session\n",
    "sess = tf.Session()\n",
    "# Initialize global variables in the group\n",
    "sess.run(tf.global_variables_initializer())\n",
    "\n",
    "# Fit the line\n",
    "for step in range(2001):\n",
    "    cost_val, W_val, b_val, _ = sess.run([cost, W, b, train], \n",
    "                                         feed_dict={X: [1, 2, 3, 4, 5], Y: [1.1, 2.1, 3.1, 4.1, 5.1]})\n",
    "    if step % 20 == 0:\n",
    "        line  = '{:<5} {:>30}  {:>20}  {:>25}'.format(step, cost_val, W_val[0], b_val[0])\n",
    "        print(line)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.3"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
