{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "\n",
    "PyTorch: Control Flow + Weight Sharing\n",
    "--------------------------------------\n",
    "\n",
    "To showcase the power of PyTorch dynamic graphs, we will implement a very strange\n",
    "model: <p style=\"color:red\">a fully-connected ReLU network that on each forward pass randomly chooses\n",
    "a number between 1 and 4 and has that many hidden layers, </p> reusing the same weights multiple times to compute the innermost hidden layers.\n",
    "\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "%matplotlib inline"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "<h1 style=\"background-image: linear-gradient( 135deg, #ABDCFF 10%, #0396FF 100%);\"> Orinal Tutorial code"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0 644.5894775390625\n",
      "1 639.4033813476562\n",
      "2 638.3150024414062\n",
      "3 670.4744262695312\n",
      "4 635.1463012695312\n",
      "5 587.4576416015625\n",
      "6 634.0379638671875\n",
      "7 630.0640258789062\n",
      "8 439.10614013671875\n",
      "9 627.6533813476562\n",
      "10 626.1864013671875\n",
      "11 624.9271240234375\n",
      "12 301.4223327636719\n",
      "13 622.37158203125\n",
      "14 617.2005004882812\n",
      "15 601.5762939453125\n",
      "16 592.2183837890625\n",
      "17 605.8775634765625\n",
      "18 599.1980590820312\n",
      "19 539.9949951171875\n",
      "20 580.2672119140625\n",
      "21 567.6670532226562\n",
      "22 595.6627197265625\n",
      "23 536.1651000976562\n",
      "24 144.83700561523438\n",
      "25 133.03121948242188\n",
      "26 482.430419921875\n",
      "27 99.84098815917969\n",
      "28 364.9731140136719\n",
      "29 515.7755737304688\n",
      "30 396.20147705078125\n",
      "31 366.62249755859375\n",
      "32 333.6767272949219\n",
      "33 401.99188232421875\n",
      "34 240.6501922607422\n",
      "35 133.25538635253906\n",
      "36 335.3799743652344\n",
      "37 296.30657958984375\n",
      "38 110.04197692871094\n",
      "39 217.27403259277344\n",
      "40 74.6769027709961\n",
      "41 167.05450439453125\n",
      "42 147.43907165527344\n",
      "43 125.50924682617188\n",
      "44 242.14285278320312\n",
      "45 109.07363891601562\n",
      "46 252.27027893066406\n",
      "47 90.68605041503906\n",
      "48 92.59556579589844\n",
      "49 311.5611877441406\n",
      "50 262.8333435058594\n",
      "51 147.62608337402344\n",
      "52 163.28953552246094\n",
      "53 131.94967651367188\n",
      "54 253.73654174804688\n",
      "55 229.00221252441406\n",
      "56 124.68148040771484\n",
      "57 217.80731201171875\n",
      "58 194.7680206298828\n",
      "59 211.8444366455078\n",
      "60 200.88360595703125\n",
      "61 186.54022216796875\n",
      "62 142.44534301757812\n",
      "63 87.2152328491211\n",
      "64 83.6458740234375\n",
      "65 175.51730346679688\n",
      "66 100.840087890625\n",
      "67 47.882598876953125\n",
      "68 48.8625373840332\n",
      "69 55.73220443725586\n",
      "70 39.638301849365234\n",
      "71 137.59376525878906\n",
      "72 29.58913803100586\n",
      "73 165.44496154785156\n",
      "74 25.48832130432129\n",
      "75 32.679325103759766\n",
      "76 31.626670837402344\n",
      "77 29.94361114501953\n",
      "78 104.82805633544922\n",
      "79 18.18242073059082\n",
      "80 77.34294128417969\n",
      "81 21.407733917236328\n",
      "82 31.840303421020508\n",
      "83 30.844602584838867\n",
      "84 29.983734130859375\n",
      "85 47.41806411743164\n",
      "86 23.273366928100586\n",
      "87 26.894851684570312\n",
      "88 15.806925773620605\n",
      "89 21.87834358215332\n",
      "90 9.850272178649902\n",
      "91 51.10830307006836\n",
      "92 32.081398010253906\n",
      "93 36.676570892333984\n",
      "94 11.703764915466309\n",
      "95 12.95169448852539\n",
      "96 35.93577575683594\n",
      "97 34.420528411865234\n",
      "98 10.475223541259766\n",
      "99 37.35980224609375\n",
      "100 10.705766677856445\n",
      "101 26.645875930786133\n",
      "102 27.17351722717285\n",
      "103 14.537962913513184\n",
      "104 24.06525421142578\n",
      "105 15.799962043762207\n",
      "106 14.393028259277344\n",
      "107 24.110342025756836\n",
      "108 14.439888000488281\n",
      "109 11.066558837890625\n",
      "110 6.138638496398926\n",
      "111 6.138677597045898\n",
      "112 11.279731750488281\n",
      "113 12.470135688781738\n",
      "114 7.5423583984375\n",
      "115 3.9931657314300537\n",
      "116 9.707573890686035\n",
      "117 4.668452262878418\n",
      "118 4.2023773193359375\n",
      "119 6.030608177185059\n",
      "120 5.9962663650512695\n",
      "121 5.3848557472229\n",
      "122 3.735081672668457\n",
      "123 17.0473690032959\n",
      "124 4.792425155639648\n",
      "125 4.498193740844727\n",
      "126 13.048755645751953\n",
      "127 3.1621110439300537\n",
      "128 4.804751396179199\n",
      "129 4.957192420959473\n",
      "130 9.673398971557617\n",
      "131 4.218665599822998\n",
      "132 7.129064083099365\n",
      "133 6.938426971435547\n",
      "134 6.093203544616699\n",
      "135 2.2153499126434326\n",
      "136 5.926475524902344\n",
      "137 4.506303310394287\n",
      "138 4.268919467926025\n",
      "139 3.97977352142334\n",
      "140 3.467273473739624\n",
      "141 3.0076656341552734\n",
      "142 2.4325337409973145\n",
      "143 2.34617018699646\n",
      "144 5.905941009521484\n",
      "145 3.5206849575042725\n",
      "146 4.190763473510742\n",
      "147 5.295313358306885\n",
      "148 1.6042219400405884\n",
      "149 4.927402973175049\n",
      "150 3.9011404514312744\n",
      "151 1.6318823099136353\n",
      "152 1.8693619966506958\n",
      "153 2.2433230876922607\n",
      "154 1.8620675802230835\n",
      "155 3.0360872745513916\n",
      "156 2.611496686935425\n",
      "157 2.1834492683410645\n",
      "158 1.2052607536315918\n",
      "159 1.2690227031707764\n",
      "160 1.8801299333572388\n",
      "161 8.217716217041016\n",
      "162 0.8748463988304138\n",
      "163 1.8114030361175537\n",
      "164 2.4404385089874268\n",
      "165 2.273172378540039\n",
      "166 0.8698174357414246\n",
      "167 8.229072570800781\n",
      "168 4.177408218383789\n",
      "169 5.217184066772461\n",
      "170 8.202778816223145\n",
      "171 3.214500665664673\n",
      "172 5.6323933601379395\n",
      "173 3.033926486968994\n",
      "174 4.7644548416137695\n",
      "175 2.5419182777404785\n",
      "176 5.061861991882324\n",
      "177 3.493112564086914\n",
      "178 2.2709169387817383\n",
      "179 1.5191733837127686\n",
      "180 4.715404033660889\n",
      "181 1.9317446947097778\n",
      "182 1.2766023874282837\n",
      "183 2.463890552520752\n",
      "184 2.331646203994751\n",
      "185 0.7448781132698059\n",
      "186 10.333822250366211\n",
      "187 2.179875373840332\n",
      "188 5.244312286376953\n",
      "189 1.6582167148590088\n",
      "190 1.1955057382583618\n",
      "191 2.485564708709717\n",
      "192 4.3670172691345215\n",
      "193 2.4589266777038574\n",
      "194 2.5633294582366943\n",
      "195 3.3428549766540527\n",
      "196 3.804525852203369\n",
      "197 2.3058643341064453\n",
      "198 1.17978036403656\n",
      "199 5.325077533721924\n",
      "200 4.744513511657715\n",
      "201 1.4481887817382812\n",
      "202 1.2486845254898071\n",
      "203 6.358272552490234\n",
      "204 0.7577555775642395\n",
      "205 1.6603704690933228\n",
      "206 2.3668336868286133\n",
      "207 1.8935948610305786\n",
      "208 1.1522635221481323\n",
      "209 2.306366443634033\n",
      "210 1.4471715688705444\n",
      "211 0.5031273365020752\n",
      "212 2.095813512802124\n",
      "213 0.34166085720062256\n",
      "214 2.011462688446045\n",
      "215 2.044264078140259\n",
      "216 1.9303709268569946\n",
      "217 0.6344671249389648\n",
      "218 2.4840197563171387\n",
      "219 5.493138313293457\n",
      "220 1.3427574634552002\n",
      "221 3.246253252029419\n",
      "222 3.5537772178649902\n",
      "223 0.65740966796875\n",
      "224 1.4860278367996216\n",
      "225 1.789150357246399\n",
      "226 4.184706211090088\n",
      "227 2.1589019298553467\n",
      "228 1.3442343473434448\n",
      "229 2.0348970890045166\n",
      "230 1.9650530815124512\n",
      "231 3.7141735553741455\n",
      "232 2.620252847671509\n",
      "233 0.7310512065887451\n",
      "234 1.424752950668335\n",
      "235 3.2103567123413086\n",
      "236 3.499809503555298\n",
      "237 2.6127512454986572\n",
      "238 1.43972647190094\n",
      "239 0.5225221514701843\n",
      "240 1.3186808824539185\n",
      "241 1.6595628261566162\n",
      "242 1.5167120695114136\n",
      "243 3.152939796447754\n",
      "244 0.6148574352264404\n",
      "245 0.5666162967681885\n",
      "246 3.8564870357513428\n",
      "247 0.9333427548408508\n",
      "248 1.9786362648010254\n",
      "249 1.0786069631576538\n",
      "250 1.2727738618850708\n",
      "251 0.9299025535583496\n",
      "252 0.34814688563346863\n",
      "253 1.077824592590332\n",
      "254 1.2163254022598267\n",
      "255 0.7611970901489258\n",
      "256 0.843935489654541\n",
      "257 2.362119436264038\n",
      "258 0.7225409150123596\n",
      "259 0.29550379514694214\n",
      "260 0.5816428661346436\n",
      "261 0.28170451521873474\n",
      "262 2.18863582611084\n",
      "263 0.639942467212677\n",
      "264 0.8080781698226929\n",
      "265 0.5335939526557922\n",
      "266 0.5084010362625122\n",
      "267 1.7016098499298096\n",
      "268 0.5506588220596313\n",
      "269 1.560807466506958\n",
      "270 0.6764866709709167\n",
      "271 1.2507671117782593\n",
      "272 0.6026809811592102\n",
      "273 0.25898149609565735\n",
      "274 0.6666762232780457\n",
      "275 0.6802883744239807\n",
      "276 2.0125339031219482\n",
      "277 0.5551610589027405\n",
      "278 0.30264952778816223\n",
      "279 1.218165636062622\n",
      "280 0.2847461402416229\n",
      "281 3.7630691528320312\n",
      "282 0.7076034545898438\n",
      "283 0.6484886407852173\n",
      "284 6.097440242767334\n",
      "285 0.9183681607246399\n",
      "286 0.8267761468887329\n",
      "287 1.0331549644470215\n",
      "288 1.4138396978378296\n",
      "289 1.256657361984253\n",
      "290 8.332043647766113\n",
      "291 1.3026880025863647\n",
      "292 7.573310375213623\n",
      "293 3.568385362625122\n",
      "294 1.8962745666503906\n",
      "295 1.1696597337722778\n",
      "296 0.737970232963562\n",
      "297 1.5441551208496094\n",
      "298 1.6903986930847168\n",
      "299 0.4117388427257538\n",
      "300 0.4661448001861572\n",
      "301 0.471502423286438\n",
      "302 0.2755710184574127\n",
      "303 0.9743053913116455\n",
      "304 0.27519625425338745\n",
      "305 1.615442156791687\n",
      "306 1.178392767906189\n",
      "307 0.6244319081306458\n",
      "308 0.5473693013191223\n",
      "309 0.8445659279823303\n",
      "310 0.5233350396156311\n",
      "311 2.0721895694732666\n",
      "312 1.6363483667373657\n",
      "313 1.0256520509719849\n",
      "314 0.4204646646976471\n",
      "315 1.607802391052246\n",
      "316 0.7034971714019775\n",
      "317 0.7133800983428955\n",
      "318 0.8717623949050903\n",
      "319 0.8823215365409851\n",
      "320 1.4117668867111206\n",
      "321 1.59757399559021\n",
      "322 2.2390761375427246\n",
      "323 1.7341396808624268\n",
      "324 0.8628361821174622\n",
      "325 0.6561332941055298\n",
      "326 1.3995803594589233\n",
      "327 3.718745470046997\n",
      "328 0.9255829453468323\n",
      "329 2.2931597232818604\n",
      "330 2.5508639812469482\n",
      "331 1.4010423421859741\n",
      "332 1.1855590343475342\n",
      "333 1.605191946029663\n",
      "334 0.5754172205924988\n",
      "335 1.1590903997421265\n",
      "336 0.5507657527923584\n",
      "337 1.0730152130126953\n",
      "338 0.9127992391586304\n",
      "339 1.700661063194275\n",
      "340 0.9954304099082947\n",
      "341 0.8598686456680298\n",
      "342 0.7460465431213379\n",
      "343 0.5445670485496521\n",
      "344 0.7095814943313599\n",
      "345 0.5925920009613037\n",
      "346 0.3941842317581177\n",
      "347 0.8214141726493835\n",
      "348 0.8274108171463013\n",
      "349 1.2855846881866455\n",
      "350 0.6429855227470398\n",
      "351 0.9746942520141602\n",
      "352 1.0388435125350952\n",
      "353 0.5147882699966431\n",
      "354 1.1870168447494507\n",
      "355 0.8957431316375732\n",
      "356 0.7627952098846436\n",
      "357 0.6150514483451843\n",
      "358 1.6353123188018799\n",
      "359 4.967571258544922\n",
      "360 0.8702995181083679\n",
      "361 1.8445883989334106\n",
      "362 4.702449321746826\n",
      "363 0.413740873336792\n",
      "364 0.24813002347946167\n",
      "365 1.2423712015151978\n",
      "366 3.294370651245117\n",
      "367 0.8150647878646851\n",
      "368 0.34786897897720337\n",
      "369 6.449117183685303\n",
      "370 1.5678068399429321\n",
      "371 0.615065336227417\n",
      "372 1.4621039628982544\n",
      "373 0.8588904738426208\n",
      "374 5.277946949005127\n",
      "375 6.233215808868408\n",
      "376 1.5523759126663208\n",
      "377 2.2652504444122314\n",
      "378 9.289872169494629\n",
      "379 31.799680709838867\n",
      "380 0.5492620468139648\n",
      "381 2.2280750274658203\n",
      "382 55.934200286865234\n",
      "383 19.93022346496582\n",
      "384 0.4713277816772461\n",
      "385 53.690364837646484\n",
      "386 1.1393684148788452\n",
      "387 19.45939064025879\n",
      "388 2.0596158504486084\n",
      "389 9.711466789245605\n",
      "390 4.821805477142334\n",
      "391 6.763852119445801\n",
      "392 17.301593780517578\n",
      "393 3.2019712924957275\n",
      "394 1.2005407810211182\n",
      "395 12.170655250549316\n",
      "396 1.7445344924926758\n",
      "397 5.9385085105896\n",
      "398 7.3313822746276855\n",
      "399 15.560710906982422\n",
      "400 13.314248085021973\n",
      "401 3.7703216075897217\n",
      "402 4.797130584716797\n",
      "403 1.7364201545715332\n",
      "404 2.886693000793457\n",
      "405 1.5402151346206665\n",
      "406 3.3616933822631836\n",
      "407 1.20307195186615\n",
      "408 3.267069101333618\n",
      "409 2.0315754413604736\n",
      "410 15.17187213897705\n",
      "411 4.430215835571289\n",
      "412 1.556403398513794\n",
      "413 4.963619232177734\n",
      "414 2.223811626434326\n",
      "415 4.230879783630371\n",
      "416 4.862227916717529\n",
      "417 1.5348466634750366\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "418 1.1911224126815796\n",
      "419 0.8813117146492004\n",
      "420 1.302135944366455\n",
      "421 0.7241997122764587\n",
      "422 8.525102615356445\n",
      "423 15.13372802734375\n",
      "424 2.0926566123962402\n",
      "425 8.081398963928223\n",
      "426 16.25365447998047\n",
      "427 13.61452865600586\n",
      "428 6.380634784698486\n",
      "429 3.8261148929595947\n",
      "430 3.3459224700927734\n",
      "431 6.9500041007995605\n",
      "432 11.443480491638184\n",
      "433 7.508928298950195\n",
      "434 3.414703845977783\n",
      "435 1.4115219116210938\n",
      "436 1.6969949007034302\n",
      "437 1.3115109205245972\n",
      "438 2.016249179840088\n",
      "439 30.04707145690918\n",
      "440 15.225565910339355\n",
      "441 2.0710134506225586\n",
      "442 3.3934998512268066\n",
      "443 17.490060806274414\n",
      "444 17.14543914794922\n",
      "445 13.245141983032227\n",
      "446 7.75592565536499\n",
      "447 2.808720588684082\n",
      "448 52.353885650634766\n",
      "449 2.649913787841797\n",
      "450 1.0520647764205933\n",
      "451 15.007600784301758\n",
      "452 2.204399585723877\n",
      "453 5.307397365570068\n",
      "454 9.032097816467285\n",
      "455 8.293466567993164\n",
      "456 0.9743533730506897\n",
      "457 1.6548889875411987\n",
      "458 1.7362059354782104\n",
      "459 2.461306095123291\n",
      "460 3.49342679977417\n",
      "461 6.878848552703857\n",
      "462 1.8787566423416138\n",
      "463 2.4974610805511475\n",
      "464 3.206477403640747\n",
      "465 3.147373914718628\n",
      "466 0.7278113961219788\n",
      "467 1.2890630960464478\n",
      "468 1.2695194482803345\n",
      "469 4.548561096191406\n",
      "470 1.4962730407714844\n",
      "471 1.3257161378860474\n",
      "472 2.832949161529541\n",
      "473 1.19533371925354\n",
      "474 5.3267130851745605\n",
      "475 0.528583288192749\n",
      "476 0.7647038102149963\n",
      "477 1.6283109188079834\n",
      "478 2.439342737197876\n",
      "479 2.7473716735839844\n",
      "480 1.3611985445022583\n",
      "481 1.8302134275436401\n",
      "482 0.40356701612472534\n",
      "483 2.0062150955200195\n",
      "484 1.274535894393921\n",
      "485 0.8792392015457153\n",
      "486 1.1402006149291992\n",
      "487 0.5743080973625183\n",
      "488 0.8350138068199158\n",
      "489 0.5466452240943909\n",
      "490 0.5750575661659241\n",
      "491 0.3513561487197876\n",
      "492 8.70141887664795\n",
      "493 0.5746507048606873\n",
      "494 1.6245496273040771\n",
      "495 2.3665122985839844\n",
      "496 0.35075053572654724\n",
      "497 13.029436111450195\n",
      "498 0.5249580144882202\n",
      "499 4.01080846786499\n"
     ]
    }
   ],
   "source": [
    "import random\n",
    "import torch\n",
    "from torch.autograd import Variable\n",
    "\n",
    "\n",
    "class DynamicNet(torch.nn.Module):\n",
    "    def __init__(self, D_in, H, D_out):\n",
    "        \"\"\"\n",
    "        In the constructor we construct three nn.Linear instances that we will use\n",
    "        in the forward pass.\n",
    "        \"\"\"\n",
    "        super(DynamicNet, self).__init__()\n",
    "        self.input_linear = torch.nn.Linear(D_in, H)\n",
    "        self.middle_linear = torch.nn.Linear(H, H)\n",
    "        self.output_linear = torch.nn.Linear(H, D_out)\n",
    "\n",
    "    def forward(self, x):\n",
    "        \"\"\"\n",
    "        For the forward pass of the model, we randomly choose either 0, 1, 2, or 3\n",
    "        and reuse the middle_linear Module that many times to compute hidden layer\n",
    "        representations.\n",
    "\n",
    "        Since each forward pass builds a dynamic computation graph, we can use normal\n",
    "        Python control-flow operators like loops or conditional statements when\n",
    "        defining the forward pass of the model.\n",
    "\n",
    "        Here we also see that it is perfectly safe to reuse the same Module many\n",
    "        times when defining a computational graph. This is a big improvement from Lua\n",
    "        Torch, where each Module could be used only once.\n",
    "        \"\"\"\n",
    "        h_relu = self.input_linear(x).clamp(min=0)\n",
    "        for _ in range(random.randint(0, 3)):\n",
    "            h_relu = self.middle_linear(h_relu).clamp(min=0)\n",
    "        y_pred = self.output_linear(h_relu)\n",
    "        return y_pred\n",
    "\n",
    "\n",
    "# N is batch size; D_in is input dimension;\n",
    "# H is hidden dimension; D_out is output dimension.\n",
    "N, D_in, H, D_out = 64, 1000, 100, 10\n",
    "\n",
    "# Create random Tensors to hold inputs and outputs, and wrap them in Variables\n",
    "x = Variable(torch.randn(N, D_in))\n",
    "y = Variable(torch.randn(N, D_out), requires_grad=False)\n",
    "\n",
    "# Construct our model by instantiating the class defined above\n",
    "model = DynamicNet(D_in, H, D_out)\n",
    "\n",
    "# Construct our loss function and an Optimizer. Training this strange model with\n",
    "# vanilla stochastic gradient descent is tough, so we use momentum\n",
    "criterion = torch.nn.MSELoss(size_average=False)\n",
    "optimizer = torch.optim.SGD(model.parameters(), lr=1e-4, momentum=0.9)\n",
    "for t in range(500):\n",
    "    # Forward pass: Compute predicted y by passing x to the model\n",
    "    y_pred = model(x)\n",
    "\n",
    "    # Compute and print loss\n",
    "    loss = criterion(y_pred, y)\n",
    "    print(t, loss.data[0])\n",
    "\n",
    "    # Zero gradients, perform a backward pass, and update the weights.\n",
    "    optimizer.zero_grad()\n",
    "    loss.backward()\n",
    "    optimizer.step()"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.3"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 1
}
