{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 43,
   "metadata": {},
   "outputs": [],
   "source": [
    "import torch\n",
    "\n",
    "# 准备数据集\n",
    "# x,y是矩阵，3行1列 也就是说总共有3个数据，每个数据只有1个特征\n",
    "x_data = torch.tensor([[1.0], [2.0], [3.0]])\n",
    "y_data = torch.tensor([[2.0], [4.0], [6.0]])\n",
    " "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 44,
   "metadata": {},
   "outputs": [],
   "source": [
    "# 定义模型\n",
    "\n",
    "class LinearModel(torch.nn.Module):\n",
    "    def __init__(self):\n",
    "        super(LinearModel, self).__init__()\n",
    "        # (1,1)是指输入x和输出y的特征维度，这里数据集中的x和y的特征都是1维的\n",
    "        # 该线性层需要学习的参数是w和b  获取w/b的方式分别是~linear.weight/linear.bias\n",
    "        self.linear = torch.nn.Linear(1, 1)\n",
    " \n",
    "    def forward(self, x):\n",
    "        y_pred = self.linear(x)\n",
    "        return y_pred\n",
    " \n",
    "model = LinearModel()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 45,
   "metadata": {},
   "outputs": [],
   "source": [
    " \n",
    "# 设计损失函数和优化器\n",
    "# criterion = torch.nn.MSELoss(size_average = False)\n",
    "criterion = torch.nn.MSELoss(reduction = 'sum')\n",
    "optimizer = torch.optim.SGD(model.parameters(), lr = 0.01)\n",
    " "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 46,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0 90.488525390625\n",
      "1 40.72481155395508\n",
      "2 18.5650691986084\n",
      "3 8.693921089172363\n",
      "4 4.293397903442383\n",
      "5 2.3283302783966064\n",
      "6 1.4475433826446533\n",
      "7 1.0495343208312988\n",
      "8 0.8665304780006409\n",
      "9 0.7793232202529907\n",
      "10 0.7348449230194092\n",
      "11 0.7094696164131165\n",
      "12 0.6926783919334412\n",
      "13 0.6797871589660645\n",
      "14 0.6687111854553223\n",
      "15 0.6585185527801514\n",
      "16 0.648795485496521\n",
      "17 0.6393561959266663\n",
      "18 0.6301159858703613\n",
      "19 0.6210375428199768\n",
      "20 0.6121019124984741\n",
      "21 0.6033006310462952\n",
      "22 0.594628095626831\n",
      "23 0.5860813856124878\n",
      "24 0.5776585340499878\n",
      "25 0.5693563222885132\n",
      "26 0.5611732602119446\n",
      "27 0.5531089305877686\n",
      "28 0.5451596975326538\n",
      "29 0.5373246073722839\n",
      "30 0.5296023488044739\n",
      "31 0.5219913125038147\n",
      "32 0.5144897103309631\n",
      "33 0.5070952773094177\n",
      "34 0.4998076260089874\n",
      "35 0.4926248788833618\n",
      "36 0.48554468154907227\n",
      "37 0.47856658697128296\n",
      "38 0.47168904542922974\n",
      "39 0.46491050720214844\n",
      "40 0.45822885632514954\n",
      "41 0.45164310932159424\n",
      "42 0.4451523423194885\n",
      "43 0.4387545883655548\n",
      "44 0.43244919180870056\n",
      "45 0.42623451352119446\n",
      "46 0.42010873556137085\n",
      "47 0.41407138109207153\n",
      "48 0.40812060236930847\n",
      "49 0.40225470066070557\n",
      "50 0.39647403359413147\n",
      "51 0.390775591135025\n",
      "52 0.3851596713066101\n",
      "53 0.37962451577186584\n",
      "54 0.3741684556007385\n",
      "55 0.36879128217697144\n",
      "56 0.3634909987449646\n",
      "57 0.3582673668861389\n",
      "58 0.3531181812286377\n",
      "59 0.34804362058639526\n",
      "60 0.34304168820381165\n",
      "61 0.3381115794181824\n",
      "62 0.33325228095054626\n",
      "63 0.32846301794052124\n",
      "64 0.323742538690567\n",
      "65 0.3190895617008209\n",
      "66 0.3145037293434143\n",
      "67 0.30998367071151733\n",
      "68 0.30552899837493896\n",
      "69 0.3011380434036255\n",
      "70 0.2968103587627411\n",
      "71 0.2925446033477783\n",
      "72 0.2883402109146118\n",
      "73 0.2841964364051819\n",
      "74 0.2801121771335602\n",
      "75 0.27608639001846313\n",
      "76 0.272118479013443\n",
      "77 0.2682079076766968\n",
      "78 0.26435327529907227\n",
      "79 0.2605540454387665\n",
      "80 0.2568093538284302\n",
      "81 0.25311875343322754\n",
      "82 0.24948111176490784\n",
      "83 0.2458956390619278\n",
      "84 0.24236176908016205\n",
      "85 0.2388785481452942\n",
      "86 0.2354455590248108\n",
      "87 0.23206192255020142\n",
      "88 0.22872678935527802\n",
      "89 0.22543981671333313\n",
      "90 0.22219964861869812\n",
      "91 0.21900629997253418\n",
      "92 0.2158588469028473\n",
      "93 0.21275657415390015\n",
      "94 0.209698885679245\n",
      "95 0.20668533444404602\n",
      "96 0.20371489226818085\n",
      "97 0.20078715682029724\n",
      "98 0.19790150225162506\n",
      "99 0.19505749642848969\n",
      "w =  1.7059825658798218\n",
      "b =  0.66837078332901\n",
      "y_pred =  tensor([[7.4923]])\n"
     ]
    }
   ],
   "source": [
    "# 训练模型\n",
    "for epoch in range(100):\n",
    "    y_pred = model(x_data) # forward:predict\n",
    "    loss = criterion(y_pred, y_data) # forward: loss\n",
    "    \n",
    "    print(epoch, loss.item())\n",
    " \n",
    "    optimizer.zero_grad() # 梯度清零\n",
    "    loss.backward() # backward: autograd，自动计算梯度\n",
    "    optimizer.step() # update 参数，即更新w和b的值\n",
    " \n",
    "print('w = ', model.linear.weight.item())\n",
    "print('b = ', model.linear.bias.item())\n",
    " \n",
    "x_test = torch.tensor([[4.0]])\n",
    "y_test = model(x_test)\n",
    "print('y_pred = ', y_test.data)"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "pytorch",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.13"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
