{
 "cells": [
  {
   "cell_type": "markdown",
   "source": [
    "# 手写数字识别"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%% md\n"
    }
   }
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "60000\n",
      "938\n",
      "训练次数: 0 [0/60000 (0%)]\tLoss: 2.336472\n",
      "训练次数: 0 [640/60000 (1%)]\tLoss: 1.679021\n",
      "训练次数: 0 [1280/60000 (2%)]\tLoss: 1.351189\n",
      "训练次数: 0 [1920/60000 (3%)]\tLoss: 1.143830\n",
      "训练次数: 0 [2560/60000 (4%)]\tLoss: 0.758618\n",
      "训练次数: 0 [3200/60000 (5%)]\tLoss: 0.663166\n",
      "训练次数: 0 [3840/60000 (6%)]\tLoss: 0.588599\n",
      "训练次数: 0 [4480/60000 (7%)]\tLoss: 0.434324\n",
      "训练次数: 0 [5120/60000 (9%)]\tLoss: 0.534081\n",
      "训练次数: 0 [5760/60000 (10%)]\tLoss: 0.431672\n",
      "训练次数: 0 [6400/60000 (11%)]\tLoss: 0.369155\n",
      "训练次数: 0 [7040/60000 (12%)]\tLoss: 0.428464\n",
      "训练次数: 0 [7680/60000 (13%)]\tLoss: 0.543975\n",
      "训练次数: 0 [8320/60000 (14%)]\tLoss: 0.388125\n",
      "训练次数: 0 [8960/60000 (15%)]\tLoss: 0.243284\n",
      "训练次数: 0 [9600/60000 (16%)]\tLoss: 0.452001\n",
      "训练次数: 0 [10240/60000 (17%)]\tLoss: 0.480195\n",
      "训练次数: 0 [10880/60000 (18%)]\tLoss: 0.667452\n",
      "训练次数: 0 [11520/60000 (19%)]\tLoss: 0.330840\n",
      "训练次数: 0 [12160/60000 (20%)]\tLoss: 0.607703\n",
      "训练次数: 0 [12800/60000 (21%)]\tLoss: 0.375975\n",
      "训练次数: 0 [13440/60000 (22%)]\tLoss: 0.321847\n",
      "训练次数: 0 [14080/60000 (23%)]\tLoss: 0.333224\n",
      "训练次数: 0 [14720/60000 (25%)]\tLoss: 0.300310\n",
      "训练次数: 0 [15360/60000 (26%)]\tLoss: 0.262927\n",
      "训练次数: 0 [16000/60000 (27%)]\tLoss: 0.302508\n",
      "训练次数: 0 [16640/60000 (28%)]\tLoss: 0.304269\n",
      "训练次数: 0 [17280/60000 (29%)]\tLoss: 0.211742\n",
      "训练次数: 0 [17920/60000 (30%)]\tLoss: 0.394214\n",
      "训练次数: 0 [18560/60000 (31%)]\tLoss: 0.309211\n",
      "训练次数: 0 [19200/60000 (32%)]\tLoss: 0.233181\n",
      "训练次数: 0 [19840/60000 (33%)]\tLoss: 0.359757\n",
      "训练次数: 0 [20480/60000 (34%)]\tLoss: 0.237675\n",
      "训练次数: 0 [21120/60000 (35%)]\tLoss: 0.409881\n",
      "训练次数: 0 [21760/60000 (36%)]\tLoss: 0.544524\n",
      "训练次数: 0 [22400/60000 (37%)]\tLoss: 0.250324\n",
      "训练次数: 0 [23040/60000 (38%)]\tLoss: 0.235920\n",
      "训练次数: 0 [23680/60000 (39%)]\tLoss: 0.306971\n",
      "训练次数: 0 [24320/60000 (41%)]\tLoss: 0.209432\n",
      "训练次数: 0 [24960/60000 (42%)]\tLoss: 0.221593\n",
      "训练次数: 0 [25600/60000 (43%)]\tLoss: 0.278639\n",
      "训练次数: 0 [26240/60000 (44%)]\tLoss: 0.469409\n",
      "训练次数: 0 [26880/60000 (45%)]\tLoss: 0.398497\n",
      "训练次数: 0 [27520/60000 (46%)]\tLoss: 0.396101\n",
      "训练次数: 0 [28160/60000 (47%)]\tLoss: 0.198684\n",
      "训练次数: 0 [28800/60000 (48%)]\tLoss: 0.297434\n",
      "训练次数: 0 [29440/60000 (49%)]\tLoss: 0.382240\n",
      "训练次数: 0 [30080/60000 (50%)]\tLoss: 0.307325\n",
      "训练次数: 0 [30720/60000 (51%)]\tLoss: 0.266366\n",
      "训练次数: 0 [31360/60000 (52%)]\tLoss: 0.146516\n",
      "训练次数: 0 [32000/60000 (53%)]\tLoss: 0.368307\n",
      "训练次数: 0 [32640/60000 (54%)]\tLoss: 0.253559\n",
      "训练次数: 0 [33280/60000 (55%)]\tLoss: 0.206360\n",
      "训练次数: 0 [33920/60000 (57%)]\tLoss: 0.230942\n",
      "训练次数: 0 [34560/60000 (58%)]\tLoss: 0.259280\n",
      "训练次数: 0 [35200/60000 (59%)]\tLoss: 0.353504\n",
      "训练次数: 0 [35840/60000 (60%)]\tLoss: 0.259421\n",
      "训练次数: 0 [36480/60000 (61%)]\tLoss: 0.207339\n",
      "训练次数: 0 [37120/60000 (62%)]\tLoss: 0.351206\n",
      "训练次数: 0 [37760/60000 (63%)]\tLoss: 0.262103\n",
      "训练次数: 0 [38400/60000 (64%)]\tLoss: 0.192041\n",
      "训练次数: 0 [39040/60000 (65%)]\tLoss: 0.279156\n",
      "训练次数: 0 [39680/60000 (66%)]\tLoss: 0.208164\n",
      "训练次数: 0 [40320/60000 (67%)]\tLoss: 0.201401\n",
      "训练次数: 0 [40960/60000 (68%)]\tLoss: 0.290704\n",
      "训练次数: 0 [41600/60000 (69%)]\tLoss: 0.249490\n",
      "训练次数: 0 [42240/60000 (70%)]\tLoss: 0.166795\n",
      "训练次数: 0 [42880/60000 (71%)]\tLoss: 0.125859\n",
      "训练次数: 0 [43520/60000 (72%)]\tLoss: 0.238525\n",
      "训练次数: 0 [44160/60000 (74%)]\tLoss: 0.243261\n",
      "训练次数: 0 [44800/60000 (75%)]\tLoss: 0.293648\n",
      "训练次数: 0 [45440/60000 (76%)]\tLoss: 0.113595\n",
      "训练次数: 0 [46080/60000 (77%)]\tLoss: 0.233549\n",
      "训练次数: 0 [46720/60000 (78%)]\tLoss: 0.401574\n",
      "训练次数: 0 [47360/60000 (79%)]\tLoss: 0.323083\n",
      "训练次数: 0 [48000/60000 (80%)]\tLoss: 0.261474\n",
      "训练次数: 0 [48640/60000 (81%)]\tLoss: 0.383459\n",
      "训练次数: 0 [49280/60000 (82%)]\tLoss: 0.339531\n",
      "训练次数: 0 [49920/60000 (83%)]\tLoss: 0.269854\n",
      "训练次数: 0 [50560/60000 (84%)]\tLoss: 0.272506\n",
      "训练次数: 0 [51200/60000 (85%)]\tLoss: 0.158945\n",
      "训练次数: 0 [51840/60000 (86%)]\tLoss: 0.173342\n",
      "训练次数: 0 [52480/60000 (87%)]\tLoss: 0.255968\n",
      "训练次数: 0 [53120/60000 (88%)]\tLoss: 0.193152\n",
      "训练次数: 0 [53760/60000 (90%)]\tLoss: 0.229850\n",
      "训练次数: 0 [54400/60000 (91%)]\tLoss: 0.291730\n",
      "训练次数: 0 [55040/60000 (92%)]\tLoss: 0.233477\n",
      "训练次数: 0 [55680/60000 (93%)]\tLoss: 0.224455\n",
      "训练次数: 0 [56320/60000 (94%)]\tLoss: 0.143910\n",
      "训练次数: 0 [56960/60000 (95%)]\tLoss: 0.155208\n",
      "训练次数: 0 [57600/60000 (96%)]\tLoss: 0.288262\n",
      "训练次数: 0 [58240/60000 (97%)]\tLoss: 0.273048\n",
      "训练次数: 0 [58880/60000 (98%)]\tLoss: 0.184147\n",
      "训练次数: 0 [59520/60000 (99%)]\tLoss: 0.403941\n",
      "\n",
      "测试结果: Avg. 损失: 0.2063, 准确率: 9407/10000 (94.07%)\n",
      "\n",
      "60000\n",
      "938\n",
      "训练次数: 1 [0/60000 (0%)]\tLoss: 0.127989\n",
      "训练次数: 1 [640/60000 (1%)]\tLoss: 0.141916\n",
      "训练次数: 1 [1280/60000 (2%)]\tLoss: 0.155300\n",
      "训练次数: 1 [1920/60000 (3%)]\tLoss: 0.062093\n",
      "训练次数: 1 [2560/60000 (4%)]\tLoss: 0.201578\n",
      "训练次数: 1 [3200/60000 (5%)]\tLoss: 0.268994\n",
      "训练次数: 1 [3840/60000 (6%)]\tLoss: 0.466052\n",
      "训练次数: 1 [4480/60000 (7%)]\tLoss: 0.211899\n",
      "训练次数: 1 [5120/60000 (9%)]\tLoss: 0.161566\n",
      "训练次数: 1 [5760/60000 (10%)]\tLoss: 0.235942\n",
      "训练次数: 1 [6400/60000 (11%)]\tLoss: 0.204791\n",
      "训练次数: 1 [7040/60000 (12%)]\tLoss: 0.218258\n",
      "训练次数: 1 [7680/60000 (13%)]\tLoss: 0.242258\n",
      "训练次数: 1 [8320/60000 (14%)]\tLoss: 0.206986\n",
      "训练次数: 1 [8960/60000 (15%)]\tLoss: 0.187001\n",
      "训练次数: 1 [9600/60000 (16%)]\tLoss: 0.120337\n",
      "训练次数: 1 [10240/60000 (17%)]\tLoss: 0.152773\n",
      "训练次数: 1 [10880/60000 (18%)]\tLoss: 0.130910\n",
      "训练次数: 1 [11520/60000 (19%)]\tLoss: 0.152460\n",
      "训练次数: 1 [12160/60000 (20%)]\tLoss: 0.389028\n",
      "训练次数: 1 [12800/60000 (21%)]\tLoss: 0.165290\n",
      "训练次数: 1 [13440/60000 (22%)]\tLoss: 0.146876\n",
      "训练次数: 1 [14080/60000 (23%)]\tLoss: 0.230678\n",
      "训练次数: 1 [14720/60000 (25%)]\tLoss: 0.160101\n",
      "训练次数: 1 [15360/60000 (26%)]\tLoss: 0.141390\n",
      "训练次数: 1 [16000/60000 (27%)]\tLoss: 0.237490\n",
      "训练次数: 1 [16640/60000 (28%)]\tLoss: 0.187563\n",
      "训练次数: 1 [17280/60000 (29%)]\tLoss: 0.359716\n",
      "训练次数: 1 [17920/60000 (30%)]\tLoss: 0.295035\n",
      "训练次数: 1 [18560/60000 (31%)]\tLoss: 0.255642\n",
      "训练次数: 1 [19200/60000 (32%)]\tLoss: 0.131947\n",
      "训练次数: 1 [19840/60000 (33%)]\tLoss: 0.082976\n",
      "训练次数: 1 [20480/60000 (34%)]\tLoss: 0.056362\n",
      "训练次数: 1 [21120/60000 (35%)]\tLoss: 0.166228\n",
      "训练次数: 1 [21760/60000 (36%)]\tLoss: 0.180607\n",
      "训练次数: 1 [22400/60000 (37%)]\tLoss: 0.113430\n",
      "训练次数: 1 [23040/60000 (38%)]\tLoss: 0.112063\n",
      "训练次数: 1 [23680/60000 (39%)]\tLoss: 0.133859\n",
      "训练次数: 1 [24320/60000 (41%)]\tLoss: 0.168546\n",
      "训练次数: 1 [24960/60000 (42%)]\tLoss: 0.168979\n",
      "训练次数: 1 [25600/60000 (43%)]\tLoss: 0.114862\n",
      "训练次数: 1 [26240/60000 (44%)]\tLoss: 0.115828\n",
      "训练次数: 1 [26880/60000 (45%)]\tLoss: 0.105336\n",
      "训练次数: 1 [27520/60000 (46%)]\tLoss: 0.422753\n",
      "训练次数: 1 [28160/60000 (47%)]\tLoss: 0.069437\n",
      "训练次数: 1 [28800/60000 (48%)]\tLoss: 0.124428\n",
      "训练次数: 1 [29440/60000 (49%)]\tLoss: 0.348649\n",
      "训练次数: 1 [30080/60000 (50%)]\tLoss: 0.198613\n",
      "训练次数: 1 [30720/60000 (51%)]\tLoss: 0.180220\n",
      "训练次数: 1 [31360/60000 (52%)]\tLoss: 0.222140\n",
      "训练次数: 1 [32000/60000 (53%)]\tLoss: 0.403829\n",
      "训练次数: 1 [32640/60000 (54%)]\tLoss: 0.167621\n",
      "训练次数: 1 [33280/60000 (55%)]\tLoss: 0.106707\n",
      "训练次数: 1 [33920/60000 (57%)]\tLoss: 0.235889\n",
      "训练次数: 1 [34560/60000 (58%)]\tLoss: 0.172221\n",
      "训练次数: 1 [35200/60000 (59%)]\tLoss: 0.188273\n",
      "训练次数: 1 [35840/60000 (60%)]\tLoss: 0.212085\n",
      "训练次数: 1 [36480/60000 (61%)]\tLoss: 0.137650\n",
      "训练次数: 1 [37120/60000 (62%)]\tLoss: 0.198541\n",
      "训练次数: 1 [37760/60000 (63%)]\tLoss: 0.226306\n",
      "训练次数: 1 [38400/60000 (64%)]\tLoss: 0.296135\n",
      "训练次数: 1 [39040/60000 (65%)]\tLoss: 0.082736\n",
      "训练次数: 1 [39680/60000 (66%)]\tLoss: 0.352831\n",
      "训练次数: 1 [40320/60000 (67%)]\tLoss: 0.200176\n",
      "训练次数: 1 [40960/60000 (68%)]\tLoss: 0.262173\n",
      "训练次数: 1 [41600/60000 (69%)]\tLoss: 0.298254\n",
      "训练次数: 1 [42240/60000 (70%)]\tLoss: 0.160364\n",
      "训练次数: 1 [42880/60000 (71%)]\tLoss: 0.241180\n",
      "训练次数: 1 [43520/60000 (72%)]\tLoss: 0.197150\n",
      "训练次数: 1 [44160/60000 (74%)]\tLoss: 0.176512\n",
      "训练次数: 1 [44800/60000 (75%)]\tLoss: 0.138169\n",
      "训练次数: 1 [45440/60000 (76%)]\tLoss: 0.055776\n",
      "训练次数: 1 [46080/60000 (77%)]\tLoss: 0.423145\n",
      "训练次数: 1 [46720/60000 (78%)]\tLoss: 0.116458\n",
      "训练次数: 1 [47360/60000 (79%)]\tLoss: 0.261541\n",
      "训练次数: 1 [48000/60000 (80%)]\tLoss: 0.115356\n",
      "训练次数: 1 [48640/60000 (81%)]\tLoss: 0.165596\n",
      "训练次数: 1 [49280/60000 (82%)]\tLoss: 0.187842\n",
      "训练次数: 1 [49920/60000 (83%)]\tLoss: 0.232339\n",
      "训练次数: 1 [50560/60000 (84%)]\tLoss: 0.187695\n",
      "训练次数: 1 [51200/60000 (85%)]\tLoss: 0.258225\n",
      "训练次数: 1 [51840/60000 (86%)]\tLoss: 0.145604\n",
      "训练次数: 1 [52480/60000 (87%)]\tLoss: 0.082255\n",
      "训练次数: 1 [53120/60000 (88%)]\tLoss: 0.131968\n",
      "训练次数: 1 [53760/60000 (90%)]\tLoss: 0.162539\n",
      "训练次数: 1 [54400/60000 (91%)]\tLoss: 0.295792\n",
      "训练次数: 1 [55040/60000 (92%)]\tLoss: 0.231815\n",
      "训练次数: 1 [55680/60000 (93%)]\tLoss: 0.192844\n",
      "训练次数: 1 [56320/60000 (94%)]\tLoss: 0.104736\n",
      "训练次数: 1 [56960/60000 (95%)]\tLoss: 0.379157\n",
      "训练次数: 1 [57600/60000 (96%)]\tLoss: 0.162790\n",
      "训练次数: 1 [58240/60000 (97%)]\tLoss: 0.182984\n",
      "训练次数: 1 [58880/60000 (98%)]\tLoss: 0.066731\n",
      "训练次数: 1 [59520/60000 (99%)]\tLoss: 0.046356\n",
      "\n",
      "测试结果: Avg. 损失: 0.1668, 准确率: 9518/10000 (95.18%)\n",
      "\n",
      "60000\n",
      "938\n",
      "训练次数: 2 [0/60000 (0%)]\tLoss: 0.141011\n",
      "训练次数: 2 [640/60000 (1%)]\tLoss: 0.092439\n",
      "训练次数: 2 [1280/60000 (2%)]\tLoss: 0.231853\n",
      "训练次数: 2 [1920/60000 (3%)]\tLoss: 0.194484\n",
      "训练次数: 2 [2560/60000 (4%)]\tLoss: 0.514473\n",
      "训练次数: 2 [3200/60000 (5%)]\tLoss: 0.184304\n",
      "训练次数: 2 [3840/60000 (6%)]\tLoss: 0.123068\n",
      "训练次数: 2 [4480/60000 (7%)]\tLoss: 0.173209\n",
      "训练次数: 2 [5120/60000 (9%)]\tLoss: 0.288269\n",
      "训练次数: 2 [5760/60000 (10%)]\tLoss: 0.208454\n",
      "训练次数: 2 [6400/60000 (11%)]\tLoss: 0.098317\n",
      "训练次数: 2 [7040/60000 (12%)]\tLoss: 0.318530\n",
      "训练次数: 2 [7680/60000 (13%)]\tLoss: 0.289296\n",
      "训练次数: 2 [8320/60000 (14%)]\tLoss: 0.169057\n",
      "训练次数: 2 [8960/60000 (15%)]\tLoss: 0.125397\n",
      "训练次数: 2 [9600/60000 (16%)]\tLoss: 0.097649\n",
      "训练次数: 2 [10240/60000 (17%)]\tLoss: 0.162390\n",
      "训练次数: 2 [10880/60000 (18%)]\tLoss: 0.020544\n",
      "训练次数: 2 [11520/60000 (19%)]\tLoss: 0.088257\n",
      "训练次数: 2 [12160/60000 (20%)]\tLoss: 0.109536\n",
      "训练次数: 2 [12800/60000 (21%)]\tLoss: 0.051511\n",
      "训练次数: 2 [13440/60000 (22%)]\tLoss: 0.094774\n",
      "训练次数: 2 [14080/60000 (23%)]\tLoss: 0.446137\n",
      "训练次数: 2 [14720/60000 (25%)]\tLoss: 0.255393\n",
      "训练次数: 2 [15360/60000 (26%)]\tLoss: 0.089063\n",
      "训练次数: 2 [16000/60000 (27%)]\tLoss: 0.071800\n",
      "训练次数: 2 [16640/60000 (28%)]\tLoss: 0.099372\n",
      "训练次数: 2 [17280/60000 (29%)]\tLoss: 0.071626\n",
      "训练次数: 2 [17920/60000 (30%)]\tLoss: 0.159938\n",
      "训练次数: 2 [18560/60000 (31%)]\tLoss: 0.124345\n",
      "训练次数: 2 [19200/60000 (32%)]\tLoss: 0.055706\n",
      "训练次数: 2 [19840/60000 (33%)]\tLoss: 0.144096\n",
      "训练次数: 2 [20480/60000 (34%)]\tLoss: 0.136470\n",
      "训练次数: 2 [21120/60000 (35%)]\tLoss: 0.164498\n",
      "训练次数: 2 [21760/60000 (36%)]\tLoss: 0.215267\n",
      "训练次数: 2 [22400/60000 (37%)]\tLoss: 0.136987\n",
      "训练次数: 2 [23040/60000 (38%)]\tLoss: 0.322661\n",
      "训练次数: 2 [23680/60000 (39%)]\tLoss: 0.080479\n",
      "训练次数: 2 [24320/60000 (41%)]\tLoss: 0.070262\n",
      "训练次数: 2 [24960/60000 (42%)]\tLoss: 0.058446\n",
      "训练次数: 2 [25600/60000 (43%)]\tLoss: 0.088926\n",
      "训练次数: 2 [26240/60000 (44%)]\tLoss: 0.174423\n",
      "训练次数: 2 [26880/60000 (45%)]\tLoss: 0.144977\n",
      "训练次数: 2 [27520/60000 (46%)]\tLoss: 0.180522\n",
      "训练次数: 2 [28160/60000 (47%)]\tLoss: 0.061289\n",
      "训练次数: 2 [28800/60000 (48%)]\tLoss: 0.268588\n",
      "训练次数: 2 [29440/60000 (49%)]\tLoss: 0.100543\n",
      "训练次数: 2 [30080/60000 (50%)]\tLoss: 0.257866\n",
      "训练次数: 2 [30720/60000 (51%)]\tLoss: 0.155833\n",
      "训练次数: 2 [31360/60000 (52%)]\tLoss: 0.189916\n",
      "训练次数: 2 [32000/60000 (53%)]\tLoss: 0.035962\n",
      "训练次数: 2 [32640/60000 (54%)]\tLoss: 0.054231\n",
      "训练次数: 2 [33280/60000 (55%)]\tLoss: 0.121188\n",
      "训练次数: 2 [33920/60000 (57%)]\tLoss: 0.065147\n",
      "训练次数: 2 [34560/60000 (58%)]\tLoss: 0.035238\n",
      "训练次数: 2 [35200/60000 (59%)]\tLoss: 0.094697\n",
      "训练次数: 2 [35840/60000 (60%)]\tLoss: 0.115566\n",
      "训练次数: 2 [36480/60000 (61%)]\tLoss: 0.113065\n",
      "训练次数: 2 [37120/60000 (62%)]\tLoss: 0.234583\n",
      "训练次数: 2 [37760/60000 (63%)]\tLoss: 0.045102\n",
      "训练次数: 2 [38400/60000 (64%)]\tLoss: 0.108866\n",
      "训练次数: 2 [39040/60000 (65%)]\tLoss: 0.095906\n",
      "训练次数: 2 [39680/60000 (66%)]\tLoss: 0.099544\n",
      "训练次数: 2 [40320/60000 (67%)]\tLoss: 0.151386\n",
      "训练次数: 2 [40960/60000 (68%)]\tLoss: 0.158733\n",
      "训练次数: 2 [41600/60000 (69%)]\tLoss: 0.151825\n",
      "训练次数: 2 [42240/60000 (70%)]\tLoss: 0.069017\n",
      "训练次数: 2 [42880/60000 (71%)]\tLoss: 0.119364\n",
      "训练次数: 2 [43520/60000 (72%)]\tLoss: 0.168943\n",
      "训练次数: 2 [44160/60000 (74%)]\tLoss: 0.189950\n",
      "训练次数: 2 [44800/60000 (75%)]\tLoss: 0.067689\n",
      "训练次数: 2 [45440/60000 (76%)]\tLoss: 0.301310\n",
      "训练次数: 2 [46080/60000 (77%)]\tLoss: 0.050373\n",
      "训练次数: 2 [46720/60000 (78%)]\tLoss: 0.362317\n",
      "训练次数: 2 [47360/60000 (79%)]\tLoss: 0.136803\n",
      "训练次数: 2 [48000/60000 (80%)]\tLoss: 0.118878\n",
      "训练次数: 2 [48640/60000 (81%)]\tLoss: 0.253311\n",
      "训练次数: 2 [49280/60000 (82%)]\tLoss: 0.049398\n",
      "训练次数: 2 [49920/60000 (83%)]\tLoss: 0.095562\n",
      "训练次数: 2 [50560/60000 (84%)]\tLoss: 0.184580\n",
      "训练次数: 2 [51200/60000 (85%)]\tLoss: 0.217150\n",
      "训练次数: 2 [51840/60000 (86%)]\tLoss: 0.081915\n",
      "训练次数: 2 [52480/60000 (87%)]\tLoss: 0.067923\n",
      "训练次数: 2 [53120/60000 (88%)]\tLoss: 0.111482\n",
      "训练次数: 2 [53760/60000 (90%)]\tLoss: 0.209571\n",
      "训练次数: 2 [54400/60000 (91%)]\tLoss: 0.039766\n",
      "训练次数: 2 [55040/60000 (92%)]\tLoss: 0.111330\n",
      "训练次数: 2 [55680/60000 (93%)]\tLoss: 0.102554\n",
      "训练次数: 2 [56320/60000 (94%)]\tLoss: 0.094700\n",
      "训练次数: 2 [56960/60000 (95%)]\tLoss: 0.215986\n",
      "训练次数: 2 [57600/60000 (96%)]\tLoss: 0.178325\n",
      "训练次数: 2 [58240/60000 (97%)]\tLoss: 0.193800\n",
      "训练次数: 2 [58880/60000 (98%)]\tLoss: 0.198436\n",
      "训练次数: 2 [59520/60000 (99%)]\tLoss: 0.076338\n",
      "\n",
      "测试结果: Avg. 损失: 0.1487, 准确率: 9564/10000 (95.64%)\n",
      "\n",
      "60000\n",
      "938\n",
      "训练次数: 3 [0/60000 (0%)]\tLoss: 0.246309\n",
      "训练次数: 3 [640/60000 (1%)]\tLoss: 0.067414\n",
      "训练次数: 3 [1280/60000 (2%)]\tLoss: 0.260008\n",
      "训练次数: 3 [1920/60000 (3%)]\tLoss: 0.173181\n",
      "训练次数: 3 [2560/60000 (4%)]\tLoss: 0.154557\n",
      "训练次数: 3 [3200/60000 (5%)]\tLoss: 0.056988\n",
      "训练次数: 3 [3840/60000 (6%)]\tLoss: 0.063144\n",
      "训练次数: 3 [4480/60000 (7%)]\tLoss: 0.104569\n",
      "训练次数: 3 [5120/60000 (9%)]\tLoss: 0.045610\n",
      "训练次数: 3 [5760/60000 (10%)]\tLoss: 0.050805\n",
      "训练次数: 3 [6400/60000 (11%)]\tLoss: 0.057447\n",
      "训练次数: 3 [7040/60000 (12%)]\tLoss: 0.089151\n",
      "训练次数: 3 [7680/60000 (13%)]\tLoss: 0.055603\n",
      "训练次数: 3 [8320/60000 (14%)]\tLoss: 0.314228\n",
      "训练次数: 3 [8960/60000 (15%)]\tLoss: 0.089801\n",
      "训练次数: 3 [9600/60000 (16%)]\tLoss: 0.133573\n",
      "训练次数: 3 [10240/60000 (17%)]\tLoss: 0.040859\n",
      "训练次数: 3 [10880/60000 (18%)]\tLoss: 0.120319\n",
      "训练次数: 3 [11520/60000 (19%)]\tLoss: 0.043319\n",
      "训练次数: 3 [12160/60000 (20%)]\tLoss: 0.162880\n",
      "训练次数: 3 [12800/60000 (21%)]\tLoss: 0.017786\n",
      "训练次数: 3 [13440/60000 (22%)]\tLoss: 0.145072\n",
      "训练次数: 3 [14080/60000 (23%)]\tLoss: 0.161653\n",
      "训练次数: 3 [14720/60000 (25%)]\tLoss: 0.124791\n",
      "训练次数: 3 [15360/60000 (26%)]\tLoss: 0.082578\n",
      "训练次数: 3 [16000/60000 (27%)]\tLoss: 0.154305\n",
      "训练次数: 3 [16640/60000 (28%)]\tLoss: 0.069769\n",
      "训练次数: 3 [17280/60000 (29%)]\tLoss: 0.055725\n",
      "训练次数: 3 [17920/60000 (30%)]\tLoss: 0.119328\n",
      "训练次数: 3 [18560/60000 (31%)]\tLoss: 0.352531\n",
      "训练次数: 3 [19200/60000 (32%)]\tLoss: 0.041623\n",
      "训练次数: 3 [19840/60000 (33%)]\tLoss: 0.139357\n",
      "训练次数: 3 [20480/60000 (34%)]\tLoss: 0.176683\n",
      "训练次数: 3 [21120/60000 (35%)]\tLoss: 0.127581\n",
      "训练次数: 3 [21760/60000 (36%)]\tLoss: 0.257314\n",
      "训练次数: 3 [22400/60000 (37%)]\tLoss: 0.117975\n",
      "训练次数: 3 [23040/60000 (38%)]\tLoss: 0.216395\n",
      "训练次数: 3 [23680/60000 (39%)]\tLoss: 0.107729\n",
      "训练次数: 3 [24320/60000 (41%)]\tLoss: 0.118308\n",
      "训练次数: 3 [24960/60000 (42%)]\tLoss: 0.159724\n",
      "训练次数: 3 [25600/60000 (43%)]\tLoss: 0.078744\n",
      "训练次数: 3 [26240/60000 (44%)]\tLoss: 0.112084\n",
      "训练次数: 3 [26880/60000 (45%)]\tLoss: 0.087375\n",
      "训练次数: 3 [27520/60000 (46%)]\tLoss: 0.052892\n",
      "训练次数: 3 [28160/60000 (47%)]\tLoss: 0.056337\n",
      "训练次数: 3 [28800/60000 (48%)]\tLoss: 0.113617\n",
      "训练次数: 3 [29440/60000 (49%)]\tLoss: 0.133460\n",
      "训练次数: 3 [30080/60000 (50%)]\tLoss: 0.125269\n",
      "训练次数: 3 [30720/60000 (51%)]\tLoss: 0.170829\n",
      "训练次数: 3 [31360/60000 (52%)]\tLoss: 0.109652\n",
      "训练次数: 3 [32000/60000 (53%)]\tLoss: 0.098644\n",
      "训练次数: 3 [32640/60000 (54%)]\tLoss: 0.049471\n",
      "训练次数: 3 [33280/60000 (55%)]\tLoss: 0.346981\n",
      "训练次数: 3 [33920/60000 (57%)]\tLoss: 0.256737\n",
      "训练次数: 3 [34560/60000 (58%)]\tLoss: 0.085538\n",
      "训练次数: 3 [35200/60000 (59%)]\tLoss: 0.243947\n",
      "训练次数: 3 [35840/60000 (60%)]\tLoss: 0.253856\n",
      "训练次数: 3 [36480/60000 (61%)]\tLoss: 0.120522\n",
      "训练次数: 3 [37120/60000 (62%)]\tLoss: 0.225447\n",
      "训练次数: 3 [37760/60000 (63%)]\tLoss: 0.052211\n",
      "训练次数: 3 [38400/60000 (64%)]\tLoss: 0.055213\n",
      "训练次数: 3 [39040/60000 (65%)]\tLoss: 0.042558\n",
      "训练次数: 3 [39680/60000 (66%)]\tLoss: 0.197548\n",
      "训练次数: 3 [40320/60000 (67%)]\tLoss: 0.212007\n",
      "训练次数: 3 [40960/60000 (68%)]\tLoss: 0.216007\n",
      "训练次数: 3 [41600/60000 (69%)]\tLoss: 0.060239\n",
      "训练次数: 3 [42240/60000 (70%)]\tLoss: 0.041006\n",
      "训练次数: 3 [42880/60000 (71%)]\tLoss: 0.345476\n",
      "训练次数: 3 [43520/60000 (72%)]\tLoss: 0.089587\n",
      "训练次数: 3 [44160/60000 (74%)]\tLoss: 0.052160\n",
      "训练次数: 3 [44800/60000 (75%)]\tLoss: 0.058722\n",
      "训练次数: 3 [45440/60000 (76%)]\tLoss: 0.072085\n",
      "训练次数: 3 [46080/60000 (77%)]\tLoss: 0.314768\n",
      "训练次数: 3 [46720/60000 (78%)]\tLoss: 0.137245\n",
      "训练次数: 3 [47360/60000 (79%)]\tLoss: 0.046387\n",
      "训练次数: 3 [48000/60000 (80%)]\tLoss: 0.110497\n",
      "训练次数: 3 [48640/60000 (81%)]\tLoss: 0.162904\n",
      "训练次数: 3 [49280/60000 (82%)]\tLoss: 0.151135\n",
      "训练次数: 3 [49920/60000 (83%)]\tLoss: 0.114546\n",
      "训练次数: 3 [50560/60000 (84%)]\tLoss: 0.211225\n",
      "训练次数: 3 [51200/60000 (85%)]\tLoss: 0.036666\n",
      "训练次数: 3 [51840/60000 (86%)]\tLoss: 0.097788\n",
      "训练次数: 3 [52480/60000 (87%)]\tLoss: 0.070236\n",
      "训练次数: 3 [53120/60000 (88%)]\tLoss: 0.069643\n",
      "训练次数: 3 [53760/60000 (90%)]\tLoss: 0.041471\n",
      "训练次数: 3 [54400/60000 (91%)]\tLoss: 0.052968\n",
      "训练次数: 3 [55040/60000 (92%)]\tLoss: 0.108493\n",
      "训练次数: 3 [55680/60000 (93%)]\tLoss: 0.120368\n",
      "训练次数: 3 [56320/60000 (94%)]\tLoss: 0.087901\n",
      "训练次数: 3 [56960/60000 (95%)]\tLoss: 0.085942\n",
      "训练次数: 3 [57600/60000 (96%)]\tLoss: 0.185661\n",
      "训练次数: 3 [58240/60000 (97%)]\tLoss: 0.052695\n",
      "训练次数: 3 [58880/60000 (98%)]\tLoss: 0.115324\n",
      "训练次数: 3 [59520/60000 (99%)]\tLoss: 0.215823\n",
      "\n",
      "测试结果: Avg. 损失: 0.1354, 准确率: 9593/10000 (95.93%)\n",
      "\n",
      "60000\n",
      "938\n",
      "训练次数: 4 [0/60000 (0%)]\tLoss: 0.103216\n",
      "训练次数: 4 [640/60000 (1%)]\tLoss: 0.105037\n",
      "训练次数: 4 [1280/60000 (2%)]\tLoss: 0.140064\n",
      "训练次数: 4 [1920/60000 (3%)]\tLoss: 0.133167\n",
      "训练次数: 4 [2560/60000 (4%)]\tLoss: 0.065402\n",
      "训练次数: 4 [3200/60000 (5%)]\tLoss: 0.027044\n",
      "训练次数: 4 [3840/60000 (6%)]\tLoss: 0.037565\n",
      "训练次数: 4 [4480/60000 (7%)]\tLoss: 0.152197\n",
      "训练次数: 4 [5120/60000 (9%)]\tLoss: 0.221047\n",
      "训练次数: 4 [5760/60000 (10%)]\tLoss: 0.150916\n",
      "训练次数: 4 [6400/60000 (11%)]\tLoss: 0.048280\n",
      "训练次数: 4 [7040/60000 (12%)]\tLoss: 0.066021\n",
      "训练次数: 4 [7680/60000 (13%)]\tLoss: 0.066052\n",
      "训练次数: 4 [8320/60000 (14%)]\tLoss: 0.091067\n",
      "训练次数: 4 [8960/60000 (15%)]\tLoss: 0.048947\n",
      "训练次数: 4 [9600/60000 (16%)]\tLoss: 0.112601\n",
      "训练次数: 4 [10240/60000 (17%)]\tLoss: 0.070838\n",
      "训练次数: 4 [10880/60000 (18%)]\tLoss: 0.083757\n",
      "训练次数: 4 [11520/60000 (19%)]\tLoss: 0.164100\n",
      "训练次数: 4 [12160/60000 (20%)]\tLoss: 0.050240\n",
      "训练次数: 4 [12800/60000 (21%)]\tLoss: 0.034564\n",
      "训练次数: 4 [13440/60000 (22%)]\tLoss: 0.138829\n",
      "训练次数: 4 [14080/60000 (23%)]\tLoss: 0.152232\n",
      "训练次数: 4 [14720/60000 (25%)]\tLoss: 0.129587\n",
      "训练次数: 4 [15360/60000 (26%)]\tLoss: 0.219824\n",
      "训练次数: 4 [16000/60000 (27%)]\tLoss: 0.139971\n",
      "训练次数: 4 [16640/60000 (28%)]\tLoss: 0.147525\n",
      "训练次数: 4 [17280/60000 (29%)]\tLoss: 0.064670\n",
      "训练次数: 4 [17920/60000 (30%)]\tLoss: 0.138067\n",
      "训练次数: 4 [18560/60000 (31%)]\tLoss: 0.142304\n",
      "训练次数: 4 [19200/60000 (32%)]\tLoss: 0.113559\n",
      "训练次数: 4 [19840/60000 (33%)]\tLoss: 0.215553\n",
      "训练次数: 4 [20480/60000 (34%)]\tLoss: 0.115681\n",
      "训练次数: 4 [21120/60000 (35%)]\tLoss: 0.091857\n",
      "训练次数: 4 [21760/60000 (36%)]\tLoss: 0.175000\n",
      "训练次数: 4 [22400/60000 (37%)]\tLoss: 0.068291\n",
      "训练次数: 4 [23040/60000 (38%)]\tLoss: 0.085197\n",
      "训练次数: 4 [23680/60000 (39%)]\tLoss: 0.084615\n",
      "训练次数: 4 [24320/60000 (41%)]\tLoss: 0.121580\n",
      "训练次数: 4 [24960/60000 (42%)]\tLoss: 0.081867\n",
      "训练次数: 4 [25600/60000 (43%)]\tLoss: 0.087956\n",
      "训练次数: 4 [26240/60000 (44%)]\tLoss: 0.311846\n",
      "训练次数: 4 [26880/60000 (45%)]\tLoss: 0.246615\n",
      "训练次数: 4 [27520/60000 (46%)]\tLoss: 0.021150\n",
      "训练次数: 4 [28160/60000 (47%)]\tLoss: 0.036595\n",
      "训练次数: 4 [28800/60000 (48%)]\tLoss: 0.137852\n",
      "训练次数: 4 [29440/60000 (49%)]\tLoss: 0.075634\n",
      "训练次数: 4 [30080/60000 (50%)]\tLoss: 0.041338\n",
      "训练次数: 4 [30720/60000 (51%)]\tLoss: 0.040192\n",
      "训练次数: 4 [31360/60000 (52%)]\tLoss: 0.113219\n",
      "训练次数: 4 [32000/60000 (53%)]\tLoss: 0.124715\n",
      "训练次数: 4 [32640/60000 (54%)]\tLoss: 0.064807\n",
      "训练次数: 4 [33280/60000 (55%)]\tLoss: 0.047398\n",
      "训练次数: 4 [33920/60000 (57%)]\tLoss: 0.075889\n",
      "训练次数: 4 [34560/60000 (58%)]\tLoss: 0.065479\n",
      "训练次数: 4 [35200/60000 (59%)]\tLoss: 0.104869\n",
      "训练次数: 4 [35840/60000 (60%)]\tLoss: 0.223840\n",
      "训练次数: 4 [36480/60000 (61%)]\tLoss: 0.054977\n",
      "训练次数: 4 [37120/60000 (62%)]\tLoss: 0.119906\n",
      "训练次数: 4 [37760/60000 (63%)]\tLoss: 0.027537\n",
      "训练次数: 4 [38400/60000 (64%)]\tLoss: 0.220793\n",
      "训练次数: 4 [39040/60000 (65%)]\tLoss: 0.088776\n",
      "训练次数: 4 [39680/60000 (66%)]\tLoss: 0.185243\n",
      "训练次数: 4 [40320/60000 (67%)]\tLoss: 0.030268\n",
      "训练次数: 4 [40960/60000 (68%)]\tLoss: 0.034619\n",
      "训练次数: 4 [41600/60000 (69%)]\tLoss: 0.029417\n",
      "训练次数: 4 [42240/60000 (70%)]\tLoss: 0.177558\n",
      "训练次数: 4 [42880/60000 (71%)]\tLoss: 0.168960\n",
      "训练次数: 4 [43520/60000 (72%)]\tLoss: 0.050454\n",
      "训练次数: 4 [44160/60000 (74%)]\tLoss: 0.147399\n",
      "训练次数: 4 [44800/60000 (75%)]\tLoss: 0.133030\n",
      "训练次数: 4 [45440/60000 (76%)]\tLoss: 0.085791\n",
      "训练次数: 4 [46080/60000 (77%)]\tLoss: 0.054196\n",
      "训练次数: 4 [46720/60000 (78%)]\tLoss: 0.044618\n",
      "训练次数: 4 [47360/60000 (79%)]\tLoss: 0.095536\n",
      "训练次数: 4 [48000/60000 (80%)]\tLoss: 0.152067\n",
      "训练次数: 4 [48640/60000 (81%)]\tLoss: 0.066052\n",
      "训练次数: 4 [49280/60000 (82%)]\tLoss: 0.074265\n",
      "训练次数: 4 [49920/60000 (83%)]\tLoss: 0.066788\n",
      "训练次数: 4 [50560/60000 (84%)]\tLoss: 0.062612\n",
      "训练次数: 4 [51200/60000 (85%)]\tLoss: 0.125131\n",
      "训练次数: 4 [51840/60000 (86%)]\tLoss: 0.179265\n",
      "训练次数: 4 [52480/60000 (87%)]\tLoss: 0.071428\n",
      "训练次数: 4 [53120/60000 (88%)]\tLoss: 0.227314\n",
      "训练次数: 4 [53760/60000 (90%)]\tLoss: 0.115665\n",
      "训练次数: 4 [54400/60000 (91%)]\tLoss: 0.114081\n",
      "训练次数: 4 [55040/60000 (92%)]\tLoss: 0.099036\n",
      "训练次数: 4 [55680/60000 (93%)]\tLoss: 0.064281\n",
      "训练次数: 4 [56320/60000 (94%)]\tLoss: 0.102211\n",
      "训练次数: 4 [56960/60000 (95%)]\tLoss: 0.053933\n",
      "训练次数: 4 [57600/60000 (96%)]\tLoss: 0.021693\n",
      "训练次数: 4 [58240/60000 (97%)]\tLoss: 0.179857\n",
      "训练次数: 4 [58880/60000 (98%)]\tLoss: 0.088264\n",
      "训练次数: 4 [59520/60000 (99%)]\tLoss: 0.118196\n",
      "\n",
      "测试结果: Avg. 损失: 0.1290, 准确率: 9629/10000 (96.29%)\n",
      "\n"
     ]
    }
   ],
   "source": [
    "import torchvision\n",
    "from torch import nn\n",
    "import torch\n",
    "from torch import optim\n",
    "from torch.utils.data import Dataset, DataLoader\n",
    "import torch.nn.functional as F\n",
    "import os\n",
    "\n",
    "train_batch_size = 64\n",
    "test_batch_size = 1000\n",
    "img_size = 28\n",
    "\n",
    "\n",
    "def get_dataloader(mode):\n",
    "    \"\"\"\n",
    "    获取数据集加载\n",
    "    :param mode:\n",
    "    :return:\n",
    "    \"\"\"\n",
    "    #准备数据迭代器\n",
    "    # 这里我已经下载好了，所以是否需要下载写的是false\n",
    "    #准备数据集，其中0.1307，0.3081为MNIST数据的均值和标准差，这样操作能够对其进行标准化\n",
    "    #因为MNIST只有一个通道（黑白图片）,所以元组中只有一个值\n",
    "    dataset = torchvision.datasets.MNIST('./mini', train=mode, download=False,\n",
    "                                         transform=torchvision.transforms.Compose([\n",
    "                                             torchvision.transforms.ToTensor(),\n",
    "                                             torchvision.transforms.Normalize(\n",
    "                                                 (0.1307,), (0.3081,))\n",
    "                                         ]))\n",
    "\n",
    "    return DataLoader(dataset, batch_size=64, shuffle=True)\n",
    "\n",
    "\n",
    "class MnistNet(nn.Module):\n",
    "    def __init__(self):\n",
    "        super(MnistNet, self).__init__()\n",
    "        self.fc1 = nn.Linear(28 * 28 * 1, 28)\n",
    "        self.fc2 = nn.Linear(28, 10)\n",
    "\n",
    "    def forward(self, x):\n",
    "        x = x.view(-1, 28 * 28 * 1)\n",
    "        # 第一个全连接层\n",
    "        x = self.fc1(x)  #[batch_size,28]\n",
    "        x = F.relu(x)  #[batch_size,28]\n",
    "        x = self.fc2(x)  #[batch_size,10]\n",
    "        # return x\n",
    "        # 使用softmax 函数来计算出最后的结果\n",
    "        return F.log_softmax(x, dim=-1)\n",
    "\n",
    "\n",
    "mnist_net = MnistNet()\n",
    "optimizer = optim.Adam(mnist_net.parameters(), lr=0.001)\n",
    "\n",
    "if os.path.exists(\"./model/mnist_net.pkl\"):\n",
    "    mnist_net.load_state_dict(torch.load(\"./model/mnist_net.pkl\"))\n",
    "    optimizer.load_state_dict(torch.load(\"./model/mnist_optimizer.pkl\"))\n",
    "\n",
    "train_loss_list = []\n",
    "train_count_list = []\n",
    "\n",
    "\n",
    "def train(epoch):\n",
    "    \"\"\"\n",
    "    训练\n",
    "    :param epoch: 训练的次数\n",
    "    :return:\n",
    "    \"\"\"\n",
    "    mode = True\n",
    "    mnist_net.train(mode=mode)\n",
    "    # 加载数据集\n",
    "    train_dataloader = get_dataloader(mode)\n",
    "    print(len(train_dataloader.dataset))\n",
    "    print(len(train_dataloader))\n",
    "    # 开始迭代\n",
    "    for idx, (data, target) in enumerate(train_dataloader):\n",
    "        # 梯度为零\n",
    "        optimizer.zero_grad()\n",
    "        # 获取输出\n",
    "        output = mnist_net(data)\n",
    "        # 获取损失函数\n",
    "        loss = F.nll_loss(output, target)  #对数似然损失\n",
    "        # 损失反馈\n",
    "        loss.backward()\n",
    "        # 进行梯度的更新\n",
    "        optimizer.step()\n",
    "        if idx % 10 == 0:\n",
    "            print('训练次数: {} [{}/{} ({:.0f}%)]\\tLoss: {:.6f}'.format(\n",
    "                epoch,\n",
    "                idx * len(data),\n",
    "                len(train_dataloader.dataset),\n",
    "                100. * idx / len(train_dataloader),\n",
    "                loss.item()))\n",
    "            # 添加损失函数\n",
    "            train_loss_list.append(loss.item())\n",
    "            train_count_list.append(idx * train_batch_size + (epoch - 1) * len(train_dataloader))\n",
    "            torch.save(mnist_net.state_dict(), \"./model/mnist_net.pkl\")\n",
    "            torch.save(optimizer.state_dict(), './model/mnist_optimizer.pkl')\n",
    "\n",
    "\n",
    "def test():\n",
    "    \"\"\"\n",
    "    进行测试\n",
    "    :return:\n",
    "    \"\"\"\n",
    "    test_loss = 0\n",
    "    correct = 0\n",
    "    mnist_net.eval()\n",
    "    test_dataloader = get_dataloader(False)\n",
    "    with torch.no_grad():\n",
    "        for data, target in test_dataloader:\n",
    "            output = mnist_net(data)\n",
    "            test_loss += F.nll_loss(output, target, reduction='sum').item()\n",
    "            pred = output.data.max(1, keepdim=True)[1]  #获取最大值的位置,[batch_size,1]\n",
    "            # 正确率\n",
    "            correct += pred.eq(target.data.view_as(pred)).sum()\n",
    "\n",
    "    test_loss /= len(test_dataloader.dataset)\n",
    "    print('\\n测试结果: Avg. 损失: {:.4f}, 准确率: {}/{} ({:.2f}%)\\n'.format(\n",
    "        test_loss,\n",
    "        correct,\n",
    "        len(test_dataloader.dataset),\n",
    "        100. * correct / len(test_dataloader.dataset)))\n",
    "\n",
    "\n",
    "for i in range(5):  #模型训练5轮\n",
    "    train(i)\n",
    "    test()\n"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n"
    }
   }
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "outputs": [],
   "source": [],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n"
    }
   }
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 2
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython2",
   "version": "2.7.6"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 0
}