{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### 3.3 线性回归的简洁实现"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [],
   "source": [
    "import torch\n",
    "import numpy as np"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [],
   "source": [
    "num_inputs = 2\n",
    "num_examples = 1000\n",
    "true_w = [2, -3.4]\n",
    "true_b = 4.2"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [],
   "source": [
    "features = torch.tensor(np.random.normal(0, 1, (num_examples, num_inputs)), dtype=torch.float)\n",
    "labels = true_w[0] * features[:, 0] + true_w[1] * features[:, 1] + true_b\n",
    "labels += torch.tensor(np.random.normal(0, 0.01, size=labels.size()), dtype=torch.float)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(tensor([[ 0.2726,  0.4186],\n",
       "         [ 0.4858,  1.9092],\n",
       "         [-0.2006, -1.3578],\n",
       "         ...,\n",
       "         [-0.4347,  1.2020],\n",
       "         [-0.2668, -0.2339],\n",
       "         [-0.5975,  2.0307]]),\n",
       " tensor([ 3.3263, -1.3223,  8.4135,  2.6396,  5.5291,  7.2151,  4.5935,  7.2606,\n",
       "          0.8648,  8.7459,  2.6338,  7.9645,  6.4390,  2.2030,  5.4965, -2.3306,\n",
       "          1.7474,  4.5364,  2.0691,  2.4392,  4.1533,  3.7931, 10.9128,  4.2089,\n",
       "          6.3588,  6.0818,  0.9466, 12.9528, -0.0923,  2.3987,  5.2105, -0.0230,\n",
       "          4.0032,  9.6399,  4.6839,  5.5886,  1.5849,  0.4033,  4.8989,  9.4596,\n",
       "          9.3624,  1.3300,  3.4468,  1.5110,  1.3936,  6.7268,  6.8675,  6.1764,\n",
       "          6.7246, -0.9851,  7.4956, -0.8978,  8.2912,  2.4187,  2.9892, 10.0836,\n",
       "         -0.6439, -1.2945, -3.4395, -0.7548,  0.6112,  5.0288,  3.4673,  6.0751,\n",
       "          3.9987,  0.4543,  1.1047,  7.6557,  0.2994,  0.7721,  3.8643, -5.5156,\n",
       "         -7.0812,  4.1835,  3.3382,  4.0624, 11.8215,  8.9918,  6.8081, -1.2002,\n",
       "         14.1911,  4.3591,  3.8557,  3.6032,  5.6307,  5.0636,  2.6124,  1.5151,\n",
       "          3.3416, -0.3568, 10.8209,  8.9659,  1.0585, 10.1858, -0.5358,  4.1031,\n",
       "          8.2589,  1.5698,  7.0662,  3.2230,  0.7374,  5.2079,  6.5407,  5.7715,\n",
       "          6.8395,  3.8784,  8.2760,  9.6503,  4.8553,  8.1241,  5.2136,  7.4898,\n",
       "          5.3610,  9.0906,  2.9752,  5.5809,  6.2123,  7.5688,  5.0413, 10.1033,\n",
       "          3.7952,  6.9556,  5.2926,  4.8111,  6.1817,  7.2541,  7.5050,  6.2924,\n",
       "          4.7816,  5.3927,  5.6550,  0.3450,  9.3204,  0.6765,  0.0266,  7.2788,\n",
       "          8.4725,  3.5410, -1.8905,  2.2692,  1.5318, -1.6115,  6.6978,  3.7780,\n",
       "         -1.0339,  0.9297,  5.3185,  2.9227, -2.8218,  4.6235,  9.1609,  3.4057,\n",
       "          2.0524,  6.0817,  6.1546,  4.1254,  6.0187,  4.2480, -1.0328,  1.3517,\n",
       "          1.7939,  3.1707,  7.6146,  6.7628,  1.8797,  5.3615,  5.2785,  0.1479,\n",
       "          6.4759,  3.5443, -0.5853, -4.5371, -1.2353,  7.1359,  1.8961,  0.0193,\n",
       "          2.6136,  4.9912,  4.5179,  3.8923,  5.1695,  1.4326,  3.5842,  5.3442,\n",
       "         -3.9779, 11.5032,  2.2895, -1.0328,  3.7455,  2.8557,  4.5954,  2.3333,\n",
       "         12.1118, -3.3083,  1.7627,  3.6206,  8.4037,  7.0760,  7.2135,  7.4472,\n",
       "         -4.6345,  0.6345,  4.1157,  0.8011,  6.4435,  4.1052,  5.1575, 10.9952,\n",
       "          8.5637, -4.9025,  5.5201,  0.9565,  2.1145, 11.1170,  3.5669,  0.6799,\n",
       "         -0.5292,  2.7576,  8.2290, -1.3737,  8.6472,  0.7255,  5.4968,  0.4391,\n",
       "          5.3783,  2.1901, -2.8054,  9.0757,  8.2595,  5.9795,  1.8002,  6.6704,\n",
       "          3.9773,  0.0864,  1.1156,  5.2307,  8.0853,  3.1524,  3.7150, -1.5845,\n",
       "          4.5679, -4.7905,  5.7480,  5.2064,  4.4349,  6.0032,  3.6962,  8.9586,\n",
       "          6.2629, -2.8054,  9.6231, -1.7654,  5.3391,  9.6375, -1.2540,  6.2338,\n",
       "         -1.9139, -2.0413, -1.0134,  0.6820,  5.2829, -0.8543,  3.7094,  9.4297,\n",
       "         10.7020,  1.9969,  4.9371, -1.9936, 11.4662,  0.0825,  5.4906, -0.5585,\n",
       "          2.2877,  2.1863, -0.1399, -1.9720,  5.0039,  3.7253, 10.0367,  7.6321,\n",
       "          0.9659, -1.5204,  7.7300,  9.5830, 10.8227, -1.6767,  4.5204,  6.6235,\n",
       "          1.0823,  7.1936,  3.1464,  1.0830,  7.4355,  4.6406,  9.5101, -2.6082,\n",
       "          8.2501,  5.3920, -1.3208,  8.0110, -0.2631,  2.8848, 11.9024,  5.9547,\n",
       "          0.2241,  5.5820,  2.1623,  6.0103,  7.1367,  2.3998,  7.5923,  3.0575,\n",
       "          7.4375,  2.1775,  4.0203,  7.8550,  5.5137, -2.6304,  5.2039, -0.6090,\n",
       "          3.8041, -0.5033,  1.8141, -2.1859, 13.2611,  3.4946,  8.0897, 11.5321,\n",
       "          6.5078,  5.0683,  3.2842, -1.7753, -1.0592, -1.6693,  7.8284,  9.3496,\n",
       "         -2.1558,  1.8858,  5.6808,  7.3967,  2.4779,  2.7573,  7.9493, -1.7412,\n",
       "          6.1456,  6.0863, -0.5269,  4.7507, -4.2395,  2.8705,  3.5838, -1.4753,\n",
       "          4.9650,  6.2042,  8.4493,  8.8677,  0.4584, 10.3489,  7.6749,  9.3858,\n",
       "         -1.6850,  8.8208,  3.9924, 10.3487,  2.8923,  4.1135,  9.1675,  2.2631,\n",
       "          3.1847,  4.8030, -4.5496,  4.0493,  7.9687,  9.4218,  5.7030, -0.9516,\n",
       "          7.1497,  2.7166,  0.0182, 10.8724,  8.1501,  0.8415,  0.8360,  2.7620,\n",
       "          5.2406,  9.6268,  1.8292,  5.7504, -1.4950,  0.9049,  3.5053,  5.5717,\n",
       "          1.9332,  6.2359,  6.9328,  7.1198,  2.9195, -0.3473, -0.1647,  6.1558,\n",
       "          3.4496,  6.4875,  2.8547,  4.3637,  7.2270,  4.2061,  1.5985,  3.5660,\n",
       "          3.9053, -5.8542, -3.0389,  7.6314,  6.7603,  5.4203,  1.4420,  6.8459,\n",
       "          9.3047,  3.9358,  8.8691,  6.4673,  5.7038,  0.3260,  4.0654,  8.2644,\n",
       "          2.4681, -1.0075,  3.6055,  6.3876,  6.6200, -0.2627,  3.0407,  4.5609,\n",
       "         -2.1295,  6.6530,  7.0476,  2.4843,  2.5333,  3.1061,  1.6506,  0.5923,\n",
       "          4.3050, -0.4203, -3.2670,  2.7745,  9.4717, 11.8060,  2.9310, 11.8253,\n",
       "          6.3625,  8.3541,  6.4663,  5.5615,  1.2397,  0.6457,  5.9843,  0.1162,\n",
       "         -6.0462,  3.5569,  5.0615,  5.3609,  2.7116,  5.8980,  5.5834,  4.6756,\n",
       "          8.7193,  5.5244,  5.6971,  3.9300,  6.1552,  8.7089,  3.5568, 10.7573,\n",
       "          4.7165,  5.3647,  6.3161,  3.8728,  8.9081,  8.4383, -1.0569, -0.4562,\n",
       "          7.4480,  3.2610,  5.6688, 11.5892,  6.0950,  4.1141,  5.0936,  7.1090,\n",
       "          7.7506,  6.3686, 11.2078,  1.8819,  5.0649,  8.7244,  1.1525,  4.0810,\n",
       "         10.6806,  1.9061,  5.2039,  3.9472, -0.6732,  7.1071,  3.6294, -2.0042,\n",
       "          4.7459,  4.1818, -2.2012,  5.5394,  7.2166,  2.9420,  4.2737,  8.3898,\n",
       "          3.5774,  1.8222,  0.3317,  4.0149,  7.1230,  8.7294,  6.0745, -4.0878,\n",
       "          0.2531,  7.5431,  4.2827,  4.2924,  1.5342,  2.2266,  1.2136,  4.3470,\n",
       "         10.4871,  2.1568, 13.4310,  6.3915,  5.1599,  8.4196,  6.6122,  2.7673,\n",
       "          6.0562, -5.3878,  4.6723,  2.1375,  4.0957,  7.2882,  1.3024,  1.0920,\n",
       "          0.4704,  3.7835,  3.3625,  3.3781,  1.4760,  1.6944, -0.7093,  2.2560,\n",
       "         -1.1116,  4.6723,  4.7407,  2.1669, -4.4471, 11.7920, -2.4091, -5.3843,\n",
       "          0.0997,  1.4360,  6.2867,  1.3236,  8.1829,  6.8827,  6.2055,  5.3514,\n",
       "          0.3940,  2.2491, 11.3853,  6.0936,  8.9540,  6.4526,  2.1716,  4.8428,\n",
       "         -3.8241,  5.8381,  3.5084,  7.2143,  4.3073,  9.8436, -0.9175,  4.7184,\n",
       "          2.0973,  7.6298,  1.5058,  7.4477, -0.0753,  6.5497,  5.4055,  2.4087,\n",
       "          7.3555,  6.3641,  3.8170,  6.8656,  5.0078,  5.5234,  3.5824,  2.0768,\n",
       "          1.1387,  3.8938,  4.1505,  0.4471,  0.9201,  3.3551,  2.1180,  6.1087,\n",
       "          5.6974,  2.7532, -2.1034, -1.1026,  3.7128,  0.5376,  3.1064,  5.7803,\n",
       "         10.1972, -2.5622, -1.3570,  9.6211,  5.1626, -1.6291,  0.8266, -1.4040,\n",
       "          5.5467, -1.3261, -0.4224, -0.6621,  5.5857,  4.2443,  4.6858,  6.1975,\n",
       "          5.5822,  6.1739,  1.1119,  9.1024,  9.8745, 11.2708, 12.2334,  3.3587,\n",
       "         -1.5859,  7.1385,  2.8097,  0.4601,  7.8660,  1.7400,  7.0808,  2.5677,\n",
       "          8.8542,  4.0156,  1.9729,  1.1473,  3.5288,  2.9097,  1.1367,  5.9836,\n",
       "         -3.5847,  6.7748,  6.8169, 15.4919,  5.3622,  5.1829,  4.6278,  3.8150,\n",
       "          8.4426,  5.6640,  3.8271,  4.1228,  7.6492,  5.5966,  5.0319, -0.4011,\n",
       "          4.4971,  1.1553, -1.1173,  9.8744,  3.4490,  4.7222, -1.8817,  2.4754,\n",
       "          9.7458, -1.2724,  2.9281,  4.4041, 10.1867,  5.1490,  9.0661,  1.9690,\n",
       "          7.2674,  6.5370,  0.0967, 10.2331, 12.0862,  9.3596,  2.5088,  3.7067,\n",
       "          4.9643, -2.6059,  5.4719,  5.6370,  4.1147,  3.4941,  6.5277,  3.1118,\n",
       "          6.0424,  7.5767, 10.3974,  0.4484,  2.6948,  7.1302,  3.0277,  7.3161,\n",
       "          2.9720,  6.1636,  9.1993,  2.0768,  2.6307,  5.6680,  1.4367,  7.0015,\n",
       "          8.4096,  5.6983,  5.5789,  5.8315,  4.8044,  0.7298,  1.3384,  1.4151,\n",
       "          4.6112,  5.2883,  6.7912,  3.2642,  8.0377,  8.5577,  9.2952,  1.8050,\n",
       "          8.3772,  6.8727, -1.4207,  4.2649,  4.6429, -0.4542,  8.5423, -2.7298,\n",
       "          7.4596,  0.7428,  6.2252,  8.0644,  6.6302,  1.9146,  8.2058, -2.0193,\n",
       "          3.5189,  9.8126, -1.6194,  4.2190,  3.6644,  2.0528,  2.4745,  2.5087,\n",
       "         -0.3618, -1.0587,  5.4241,  2.2025,  0.2930, -4.7860,  4.2566, -2.3134,\n",
       "          3.2885,  2.1921,  6.7599,  7.8681,  3.6728,  8.1791,  5.9973, -0.7256,\n",
       "          9.1391,  7.6407, -4.2460,  3.9352,  6.7397, -2.5784,  1.6145,  8.7944,\n",
       "          0.4501,  8.5198,  2.4326,  5.4958,  2.2735,  3.4892,  8.2739, -1.1543,\n",
       "          1.7791,  0.4446,  4.3943, -2.4483,  8.2956,  4.1227, -6.8159, 10.0402,\n",
       "         11.0828,  7.2090,  4.3364,  5.1855,  4.4493,  3.6318,  7.3103, -0.6449,\n",
       "          5.2999,  3.3001, 15.7176,  1.7186, -0.0774,  4.3211,  2.9093,  1.1149,\n",
       "         -0.5586,  5.8748, -0.3989, -2.8878,  6.1218,  0.8514,  3.1979,  0.8195,\n",
       "         10.6096, -4.1030, -2.4608,  9.5227,  6.4464,  2.4166,  5.5433,  5.5692,\n",
       "          1.5595, 14.1048,  4.0243,  5.4723,  0.0248,  0.0833,  3.8308,  4.4493,\n",
       "          5.4650,  7.0665,  0.3218, -1.6361, 10.2476, -0.3211, 14.4210,  0.1522,\n",
       "          2.5513,  7.5408,  3.9513,  2.5472, 14.7533,  4.2052,  1.9472,  8.6747,\n",
       "          8.4089,  2.2438, -1.9088, 12.8950,  2.2082, 11.1631,  9.0411,  4.2684,\n",
       "          2.1155,  8.0003,  2.9384, -4.7714,  7.4212,  3.5886,  5.0161,  0.5275,\n",
       "          3.7690,  8.2202,  1.1027,  0.3456,  4.5072,  3.7686,  3.1660,  2.9685,\n",
       "          3.3102, -4.0774,  7.0761,  3.9205, -0.6963,  1.0669,  7.4932,  3.0045,\n",
       "          8.1539, 12.1340, 10.4745,  3.4435,  2.0028, -1.5650,  2.9659,  7.8221,\n",
       "         -6.0027, 10.1053,  4.9451,  1.5845,  0.3967,  8.4064, -0.2404, 10.2924,\n",
       "          0.1868,  0.1623,  3.5193,  4.2397, -1.4352,  9.4445,  8.6874,  5.9183,\n",
       "          0.2339,  8.7595,  3.8663,  5.6388,  3.7562,  2.5731,  4.9958,  6.2481,\n",
       "          1.7265,  5.5132,  2.2011,  2.5694,  3.4388, -2.8385,  3.7536,  8.4840,\n",
       "          8.6608,  0.9731,  6.1901,  6.9047, 10.6334, 10.4996,  6.9647,  4.5740,\n",
       "         13.7608,  5.4582,  1.9415, 13.1764,  0.5785,  3.6091, -1.6813,  1.2390,\n",
       "         13.3628, -0.7319,  1.4380,  9.9386,  6.9668,  2.6486, -0.3403,  6.9612,\n",
       "          7.1097,  4.1289,  5.8951,  1.4690,  8.8053, -1.9443,  8.2627,  4.6475,\n",
       "          3.5662, -0.3367, -0.0242,  9.0393,  7.0763,  0.5271,  6.8056, -1.5258,\n",
       "          7.3691, -0.7223,  1.8845,  4.4138,  2.4482,  4.8686,  0.9906,  4.8746,\n",
       "          2.9298, 14.7861,  7.1706,  5.3971,  2.9838,  4.2195,  7.6115,  6.8043,\n",
       "          4.7948,  6.0461,  4.9184,  7.1003,  6.0911,  7.2655,  1.9276,  9.7033,\n",
       "          3.5705,  4.1767,  0.7133,  0.6985,  3.0248, -0.7524,  4.4619, -3.9061]))"
      ]
     },
     "execution_count": 4,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "features, labels"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [],
   "source": [
    "import torch.utils.data as Data\n",
    "\n",
    "batch_size = 10\n",
    "dataset = Data.TensorDataset(features, labels)\n",
    "data_iter = Data.DataLoader(dataset, batch_size, shuffle=True)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "tensor([[ 0.1036,  0.8549],\n",
      "        [-0.0320, -0.6014],\n",
      "        [ 0.1167, -0.1024],\n",
      "        [ 2.0347,  0.9073],\n",
      "        [-0.1502,  0.5169],\n",
      "        [ 0.0972,  0.1426],\n",
      "        [ 0.0598,  0.5166],\n",
      "        [ 2.2002, -0.9460],\n",
      "        [ 1.3042,  0.9896],\n",
      "        [-1.7514, -0.3502]]) tensor([ 1.5058,  6.1764,  4.8044,  5.1855,  2.1568,  3.9205,  2.5513, 11.8253,\n",
      "         3.4435,  1.8858])\n"
     ]
    }
   ],
   "source": [
    "for X, y in data_iter:\n",
    "    print(X, y)\n",
    "    break"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [],
   "source": [
    "from torch import nn"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "LinearNet(\n",
      "  (linear): Linear(in_features=2, out_features=1, bias=True)\n",
      ")\n"
     ]
    }
   ],
   "source": [
    "class LinearNet(nn.Module):\n",
    "    def __init__(self, n_features):\n",
    "        super(LinearNet, self).__init__()\n",
    "        self.linear = nn.Linear(n_features, 1)\n",
    "        \n",
    "    # forward 定义前向传播\n",
    "    def forward(self, x):\n",
    "        y = self.linear(x);\n",
    "        return y\n",
    "    \n",
    "net = LinearNet(num_inputs)\n",
    "print(net)  # 使用print可以打印出网络的结构"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {},
   "outputs": [],
   "source": [
    "net = nn.Sequential(\n",
    "    nn.Linear(num_inputs, 1)\n",
    ")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Parameter containing:\n",
      "tensor([[0.3257, 0.3403]], requires_grad=True)\n",
      "Parameter containing:\n",
      "tensor([-0.5525], requires_grad=True)\n"
     ]
    }
   ],
   "source": [
    "for param in net.parameters():\n",
    "    print(param)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "Parameter containing:\n",
       "tensor([0.], requires_grad=True)"
      ]
     },
     "execution_count": 13,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "from torch.nn import init\n",
    "\n",
    "init.normal_(net[0].weight, mean=0, std=0.01)\n",
    "init.constant_(net[0].bias, val=0)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "metadata": {},
   "outputs": [],
   "source": [
    "loss = nn.MSELoss()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "SGD (\n",
      "Parameter Group 0\n",
      "    dampening: 0\n",
      "    lr: 0.03\n",
      "    momentum: 0\n",
      "    nesterov: False\n",
      "    weight_decay: 0\n",
      ")\n"
     ]
    }
   ],
   "source": [
    "import torch.optim as optim\n",
    "\n",
    "optimizer = optim.SGD(net.parameters(), lr=0.03)\n",
    "print(optimizer)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 22,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "epoch 1, loss: 0.000118\n",
      "epoch 2, loss: 0.000047\n",
      "epoch 3, loss: 0.000100\n",
      "epoch 4, loss: 0.000060\n"
     ]
    }
   ],
   "source": [
    "num_epochs = 4\n",
    "for epoch in range(1, num_epochs + 1):\n",
    "    for X, y in data_iter:\n",
    "        output = net(X)\n",
    "        l = loss(output, y.view(-1, 1))\n",
    "        optimizer.zero_grad()    # 梯度清零, net.zero_grad()\n",
    "        l.backward()\n",
    "        optimizer.step()\n",
    "    print('epoch %d, loss: %f' % (epoch, l.item()))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 23,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[2, -3.4] Parameter containing:\n",
      "tensor([[ 1.9998, -3.4007]], requires_grad=True)\n",
      "4.2 Parameter containing:\n",
      "tensor([4.2001], requires_grad=True)\n"
     ]
    }
   ],
   "source": [
    "dense = net[0]\n",
    "print(true_w, dense.weight)\n",
    "print(true_b, dense.bias)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.2"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
