{
 "cells": [
  {
   "cell_type": "markdown",
   "id": "459193f5",
   "metadata": {},
   "source": [
    "# 查看数据"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "id": "d0e48791",
   "metadata": {},
   "outputs": [],
   "source": [
    "%matplotlib inline\n",
    "import numpy as np\n",
    "import pandas as pd"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "id": "8af8c992",
   "metadata": {},
   "outputs": [],
   "source": [
    "# 读取csv文件\n",
    "\n",
    "train_data = pd.read_csv('train.csv')\n",
    "test_data = pd.read_csv('test.csv')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "id": "865c01ff",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "(1314, 81)\n",
      "(146, 81)\n"
     ]
    }
   ],
   "source": [
    "# 查看文件行数与列数\n",
    "print(train_data.shape) #1460个样本，一个样本有81列（80个特征+1个标签）\n",
    "print(test_data.shape) #1459个样本，一个样本有80列（80个特征）"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "id": "cae2c2de",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "   Id  MSSubClass MSZoning  LotFrontage SaleType SaleCondition  SalePrice\n",
      "0   1          60       RL         65.0       WD        Normal     208500\n",
      "1   2          20       RL         80.0       WD        Normal     181500\n",
      "2   3          60       RL         68.0       WD        Normal     223500\n",
      "3   4          70       RL         60.0       WD       Abnorml     140000\n"
     ]
    }
   ],
   "source": [
    "# 看前四个和最后两个特征，以及相应标签（房价）\n",
    "print(train_data.iloc[0:4, [0, 1, 2, 3, -3, -2, -1]])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "id": "c250fe93",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "   MSSubClass MSZoning  LotFrontage  LotArea  YrSold SaleType SaleCondition\n",
      "0          60       RL         65.0     8450    2008       WD        Normal\n",
      "1          20       RL         80.0     9600    2007       WD        Normal\n",
      "2          60       RL         68.0    11250    2008       WD        Normal\n",
      "3          70       RL         60.0     9550    2006       WD       Abnorml\n",
      "0    208500\n",
      "1    181500\n",
      "2    223500\n",
      "3    140000\n",
      "Name: SalePrice, dtype: int64\n"
     ]
    }
   ],
   "source": [
    "# 去除id，将训练集与测试集的特征连接起来，便于统一处理\n",
    "all_features = pd.concat([train_data.iloc[:, 1:-1], test_data.iloc[:, 1:-1]], axis=0)  # 全特征值\n",
    "print(all_features.iloc[0:4, [0, 1, 2, 3, -3, -2, -1]])\n",
    "\n",
    "\n",
    "all_labels = pd.concat((train_data.loc[:,'SalePrice'],test_data.loc[:,'SalePrice']))\n",
    "print(all_labels[0:4])"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "49836a77",
   "metadata": {},
   "source": [
    "# 数据预处理"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "id": "1fc569a5",
   "metadata": {},
   "outputs": [],
   "source": [
    "%matplotlib inline\n",
    "import numpy as np\n",
    "import pandas as pd\n",
    "import torch"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "id": "b6278ca9",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "MSSubClass         int64\n",
       "MSZoning          object\n",
       "LotFrontage      float64\n",
       "LotArea            int64\n",
       "Street            object\n",
       "                  ...   \n",
       "MiscVal            int64\n",
       "MoSold             int64\n",
       "YrSold             int64\n",
       "SaleType          object\n",
       "SaleCondition     object\n",
       "Length: 79, dtype: object"
      ]
     },
     "execution_count": 7,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "# 查看每列内容的属性\n",
    "all_features.dtypes"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "id": "7793de27",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "数值特征列名： Index(['MSSubClass', 'LotFrontage', 'LotArea', 'OverallQual', 'OverallCond',\n",
      "       'YearBuilt', 'YearRemodAdd', 'MasVnrArea', 'BsmtFinSF1', 'BsmtFinSF2',\n",
      "       'BsmtUnfSF', 'TotalBsmtSF', '1stFlrSF', '2ndFlrSF', 'LowQualFinSF',\n",
      "       'GrLivArea', 'BsmtFullBath', 'BsmtHalfBath', 'FullBath', 'HalfBath',\n",
      "       'BedroomAbvGr', 'KitchenAbvGr', 'TotRmsAbvGrd', 'Fireplaces',\n",
      "       'GarageYrBlt', 'GarageCars', 'GarageArea', 'WoodDeckSF', 'OpenPorchSF',\n",
      "       'EnclosedPorch', '3SsnPorch', 'ScreenPorch', 'PoolArea', 'MiscVal',\n",
      "       'MoSold', 'YrSold'],\n",
      "      dtype='object')\n",
      "非数值特征列名： Index(['MSZoning', 'Street', 'Alley', 'LotShape', 'LandContour', 'Utilities',\n",
      "       'LotConfig', 'LandSlope', 'Neighborhood', 'Condition1', 'Condition2',\n",
      "       'BldgType', 'HouseStyle', 'RoofStyle', 'RoofMatl', 'Exterior1st',\n",
      "       'Exterior2nd', 'MasVnrType', 'ExterQual', 'ExterCond', 'Foundation',\n",
      "       'BsmtQual', 'BsmtCond', 'BsmtExposure', 'BsmtFinType1', 'BsmtFinType2',\n",
      "       'Heating', 'HeatingQC', 'CentralAir', 'Electrical', 'KitchenQual',\n",
      "       'Functional', 'FireplaceQu', 'GarageType', 'GarageFinish', 'GarageQual',\n",
      "       'GarageCond', 'PavedDrive', 'PoolQC', 'Fence', 'MiscFeature',\n",
      "       'SaleType', 'SaleCondition'],\n",
      "      dtype='object')\n"
     ]
    }
   ],
   "source": [
    "# 取出所有的数值型特征名称（列标题）\n",
    "numeric_features = all_features.dtypes[all_features.dtypes != 'object'].index\n",
    "print('数值特征列名：', numeric_features)\n",
    "\n",
    "# 取出所有的非数值型特征名称（列标题）\n",
    "object_features = all_features.dtypes[all_features.dtypes == 'object'].index\n",
    "print('非数值特征列名：', object_features)\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "id": "d4f6fe5e",
   "metadata": {},
   "outputs": [],
   "source": [
    "# 将数值型特征进行 z-score 标准化\n",
    "all_features[numeric_features] = all_features[numeric_features].apply(\n",
    "    lambda x: (x - x.mean()) / (x.std()))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "id": "ea6ada50",
   "metadata": {},
   "outputs": [],
   "source": [
    "# 在标准化数据之后，所有数据都意味着消失，因此我们可以将缺失值设置为0\n",
    "all_features[numeric_features] = all_features[numeric_features].fillna(0)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "id": "5beffbe1",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(1460, 331)"
      ]
     },
     "execution_count": 11,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "# 非数值数据预处理\n",
    "\n",
    "# 将缺省值NA视为有效特征值\n",
    "all_features = pd.get_dummies(all_features, dummy_na=True)\n",
    "\n",
    "all_features.shape # 2919个样本，每个样本331个特征"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "id": "b76113c6",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "tensor([[ 0.0733, -0.2079, -0.2071,  ...,  1.0000,  0.0000,  0.0000],\n",
      "        [-0.8723,  0.4097, -0.0919,  ...,  1.0000,  0.0000,  0.0000],\n",
      "        [ 0.0733, -0.0844,  0.0735,  ...,  1.0000,  0.0000,  0.0000],\n",
      "        ...,\n",
      "        [-0.8723, -0.0844, -0.1706,  ...,  0.0000,  1.0000,  0.0000],\n",
      "        [ 0.0733,  0.0000, -0.0947,  ...,  1.0000,  0.0000,  0.0000],\n",
      "        [ 0.0733,  1.5627,  0.4265,  ...,  1.0000,  0.0000,  0.0000]])\n",
      "torch.Size([1314, 331])\n"
     ]
    }
   ],
   "source": [
    "num_train = train_data.shape[0]\n",
    "\n",
    "# 样本特征转换为tensor\n",
    "train_features = torch.tensor(all_features[:num_train].values, dtype=torch.float) #(1314, 331)\n",
    "test_features = torch.tensor(all_features[num_train:].values, dtype=torch.float) #(146, 331)\n",
    "\n",
    "print(train_features)\n",
    "print(train_features.shape)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "id": "3c10ce65",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "tensor([[208500.],\n",
      "        [181500.],\n",
      "        [223500.],\n",
      "        ...,\n",
      "        [203000.],\n",
      "        [302000.],\n",
      "        [333168.]])\n"
     ]
    }
   ],
   "source": [
    "# 样本标签转换为tensor\n",
    "train_labels = torch.tensor(all_labels[:num_train].values, dtype=torch.float)\n",
    "test_labels = torch.tensor(all_labels[num_train:].values, dtype=torch.float)\n",
    "\n",
    "\n",
    "# 标签数据变为1列\n",
    "train_labels = train_labels.view(-1,1)\n",
    "test_labels = test_labels.view(-1,1)\n",
    "\n",
    "print(train_labels)"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "26304b80",
   "metadata": {},
   "source": [
    "# 创建数据集与加载数据集"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "id": "854fe2ce",
   "metadata": {},
   "outputs": [],
   "source": [
    "from torch.utils.data import Dataset, TensorDataset, DataLoader\n",
    "\n",
    "train_dataset = TensorDataset(train_features, train_labels)\n",
    "test_dataset = TensorDataset(test_features, test_labels)\n",
    "\n",
    "train_loader = DataLoader(dataset=train_dataset, batch_size=64, shuffle=True)\n",
    "test_loader = DataLoader(dataset=test_dataset, batch_size=64, shuffle=False)"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "3d2fd2a3",
   "metadata": {},
   "source": [
    "# 构建神经网络"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "id": "2cbb8d0e",
   "metadata": {},
   "outputs": [],
   "source": [
    "import torch\n",
    "from torch import nn, optim\n",
    "import torch.nn.functional as F"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "id": "bbe4432c",
   "metadata": {},
   "outputs": [],
   "source": [
    "# 神经网络\n",
    "\n",
    "class Net(nn.Module):# 继承 torch 的 Module\n",
    "    def __init__(self, n_feature, n_output):\n",
    "        super(Net, self).__init__()     # 继承 __init__ 功能\n",
    "        # 定义每层用什么样的形式\n",
    "        self.layer1 = nn.Linear(n_feature, 600)   #\n",
    "        self.layer2 = nn.Linear(600, 1200)   #\n",
    "        self.layer3 = nn.Linear(1200, n_output)\n",
    "\n",
    "    def forward(self, x):   # 这同时也是 Module 中的 forward 功能\n",
    "        x = self.layer1(x)\n",
    "        x = torch.relu(x)      #\n",
    "        x = self.layer2(x)\n",
    "        x = torch.relu(x)      #\n",
    "        x = self.layer3(x)\n",
    "        return x\n"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "87d94320",
   "metadata": {},
   "source": [
    "# 训练神经网络"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "id": "b5fdb51e",
   "metadata": {},
   "outputs": [],
   "source": [
    "from torchkeras import summary,Model\n",
    "\n",
    "# 初始化神经网络\n",
    "model = Net(331,1)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "id": "80ee51ac",
   "metadata": {},
   "outputs": [],
   "source": [
    "# 定义损失函数与优化器\n",
    "\n",
    "#反向传播算法 SGD Adam等\n",
    "optimizer = torch.optim.Adam(model.parameters(), lr=0.01)\n",
    "#均方损失函数\n",
    "criterion =\ttorch.nn.MSELoss()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "id": "868b8fcc",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "epoch: 0, trainloss: 521735859.238965, evalloss: 171339204.38356164\n",
      "epoch: 1, trainloss: 62410530.776255704, evalloss: 37098076.93150685\n",
      "epoch: 2, trainloss: 28027879.25722983, evalloss: 15058885.698630137\n",
      "epoch: 3, trainloss: 18648908.785388127, evalloss: 11985020.05479452\n",
      "epoch: 4, trainloss: 16826867.409436833, evalloss: 12366688.438356165\n",
      "epoch: 5, trainloss: 15603950.05175038, evalloss: 13981596.05479452\n",
      "epoch: 6, trainloss: 14880485.089802131, evalloss: 10222889.643835617\n",
      "epoch: 7, trainloss: 13775072.730593607, evalloss: 11436232.98630137\n",
      "epoch: 8, trainloss: 12835798.831050228, evalloss: 10463112.547945205\n",
      "epoch: 9, trainloss: 12903734.89193303, evalloss: 13086636.273972603\n",
      "epoch: 10, trainloss: 11739756.590563165, evalloss: 10595328.219178082\n",
      "epoch: 11, trainloss: 12376901.382039573, evalloss: 11367667.506849315\n",
      "epoch: 12, trainloss: 11263064.815829528, evalloss: 11654087.671232877\n",
      "epoch: 13, trainloss: 10764990.867579909, evalloss: 11196439.671232877\n",
      "epoch: 14, trainloss: 9937206.538812784, evalloss: 12337610.95890411\n",
      "epoch: 15, trainloss: 9451479.8782344, evalloss: 11529950.465753425\n",
      "epoch: 16, trainloss: 9111648.377473364, evalloss: 11755412.164383562\n",
      "epoch: 17, trainloss: 9198152.669710808, evalloss: 15905760.438356165\n",
      "epoch: 18, trainloss: 8423113.777777778, evalloss: 11053479.671232877\n",
      "epoch: 19, trainloss: 8065393.7534246575, evalloss: 12332035.94520548\n",
      "epoch: 20, trainloss: 7671705.181126332, evalloss: 12133970.849315068\n",
      "epoch: 21, trainloss: 7419864.243531203, evalloss: 12498705.753424658\n",
      "epoch: 22, trainloss: 6991840.706240487, evalloss: 14125670.356164383\n",
      "epoch: 23, trainloss: 6742593.168949772, evalloss: 12569989.698630137\n",
      "epoch: 24, trainloss: 6408341.503805175, evalloss: 12731612.93150685\n",
      "epoch: 25, trainloss: 6275595.5799086755, evalloss: 12369074.849315068\n",
      "epoch: 26, trainloss: 5701088.852359208, evalloss: 12852027.616438355\n",
      "epoch: 27, trainloss: 5629286.977168949, evalloss: 11264083.06849315\n",
      "epoch: 28, trainloss: 5297930.891933029, evalloss: 12533051.616438355\n",
      "epoch: 29, trainloss: 5004529.03500761, evalloss: 12987650.630136987\n",
      "epoch: 30, trainloss: 4831630.563165906, evalloss: 11847292.273972603\n",
      "epoch: 31, trainloss: 4664162.94672755, evalloss: 10625035.616438355\n",
      "epoch: 32, trainloss: 4617524.98021309, evalloss: 10910684.273972603\n",
      "epoch: 33, trainloss: 4452594.179604262, evalloss: 12436736.0\n",
      "epoch: 34, trainloss: 4193158.301369863, evalloss: 12704778.520547945\n",
      "epoch: 35, trainloss: 4325665.911719939, evalloss: 13015452.05479452\n",
      "epoch: 36, trainloss: 3925331.799086758, evalloss: 11545425.09589041\n",
      "epoch: 37, trainloss: 3858293.99695586, evalloss: 11230798.465753425\n",
      "epoch: 38, trainloss: 3714926.0517503805, evalloss: 12519187.506849315\n",
      "epoch: 39, trainloss: 3809324.791476408, evalloss: 13607331.287671233\n",
      "epoch: 40, trainloss: 3700541.1628614916, evalloss: 10862609.315068493\n",
      "epoch: 41, trainloss: 3472384.3348554033, evalloss: 10201700.602739725\n",
      "epoch: 42, trainloss: 3380417.710806697, evalloss: 9972448.219178082\n",
      "epoch: 43, trainloss: 3385592.4687975645, evalloss: 10987987.06849315\n",
      "epoch: 44, trainloss: 3239384.091324201, evalloss: 10292899.287671233\n",
      "epoch: 45, trainloss: 3046477.735159817, evalloss: 10237491.726027397\n",
      "epoch: 46, trainloss: 3098913.905631659, evalloss: 12719018.95890411\n",
      "epoch: 47, trainloss: 3033163.02283105, evalloss: 11009422.684931507\n",
      "epoch: 48, trainloss: 2933592.225266362, evalloss: 10558440.328767123\n",
      "epoch: 49, trainloss: 2769550.7458143076, evalloss: 10909268.602739725\n",
      "epoch: 50, trainloss: 2734155.278538813, evalloss: 10916020.821917808\n",
      "epoch: 51, trainloss: 2639979.9695586, evalloss: 10316973.369863013\n",
      "epoch: 52, trainloss: 2567296.5722983256, evalloss: 10204480.87671233\n",
      "epoch: 53, trainloss: 2641189.400304414, evalloss: 11701094.575342465\n",
      "epoch: 54, trainloss: 2589101.936073059, evalloss: 10877136.219178082\n",
      "epoch: 55, trainloss: 2458751.7625570777, evalloss: 9826655.780821918\n",
      "epoch: 56, trainloss: 2329425.97260274, evalloss: 10523628.712328767\n",
      "epoch: 57, trainloss: 2210184.3774733637, evalloss: 11526068.164383562\n",
      "epoch: 58, trainloss: 2396918.9406392695, evalloss: 10819462.794520548\n",
      "epoch: 59, trainloss: 2452773.704718417, evalloss: 9669638.136986302\n",
      "epoch: 60, trainloss: 2717239.7503805174, evalloss: 9755409.315068493\n",
      "epoch: 61, trainloss: 2427806.98934551, evalloss: 10300616.547945205\n",
      "epoch: 62, trainloss: 2148262.885844749, evalloss: 10761926.136986302\n",
      "epoch: 63, trainloss: 1985117.193302892, evalloss: 10226014.02739726\n",
      "epoch: 64, trainloss: 1893394.7305936073, evalloss: 11586937.205479452\n",
      "epoch: 65, trainloss: 1929403.3576864535, evalloss: 11631912.328767123\n",
      "epoch: 66, trainloss: 1931902.2039573821, evalloss: 9941959.452054795\n",
      "epoch: 67, trainloss: 1930964.6818873668, evalloss: 10452775.452054795\n",
      "epoch: 68, trainloss: 1819595.8356164384, evalloss: 11529518.465753425\n",
      "epoch: 69, trainloss: 1841552.4383561644, evalloss: 10978706.410958905\n",
      "epoch: 70, trainloss: 1795586.3744292238, evalloss: 10143615.12328767\n",
      "epoch: 71, trainloss: 1655301.5738203956, evalloss: 10480364.93150685\n",
      "epoch: 72, trainloss: 1649438.818873668, evalloss: 10428362.95890411\n",
      "epoch: 73, trainloss: 1571515.208523592, evalloss: 10489290.739726027\n",
      "epoch: 74, trainloss: 1585428.6697108068, evalloss: 10281619.726027397\n",
      "epoch: 75, trainloss: 1724025.1476407915, evalloss: 11342138.739726027\n",
      "epoch: 76, trainloss: 1445454.8371385084, evalloss: 10323805.150684932\n",
      "epoch: 77, trainloss: 1439157.00456621, evalloss: 11034876.493150685\n",
      "epoch: 78, trainloss: 1424300.2404870624, evalloss: 11029650.410958905\n",
      "epoch: 79, trainloss: 1404476.4779299847, evalloss: 9995997.589041095\n",
      "epoch: 80, trainloss: 1508031.6499238964, evalloss: 10421450.08219178\n",
      "epoch: 81, trainloss: 1313299.7442922373, evalloss: 10352136.98630137\n",
      "epoch: 82, trainloss: 1440689.7595129376, evalloss: 9953663.561643835\n",
      "epoch: 83, trainloss: 1418176.9771689498, evalloss: 10260481.753424658\n",
      "epoch: 84, trainloss: 1318292.5783866057, evalloss: 10514117.91780822\n",
      "epoch: 85, trainloss: 1285831.5190258753, evalloss: 10691772.93150685\n",
      "epoch: 86, trainloss: 1268079.8751902587, evalloss: 10103845.479452055\n",
      "epoch: 87, trainloss: 1298679.4368340943, evalloss: 11529867.616438355\n",
      "epoch: 88, trainloss: 1414408.797564688, evalloss: 10838381.369863013\n",
      "epoch: 89, trainloss: 1468422.3165905632, evalloss: 11695699.287671233\n",
      "epoch: 90, trainloss: 1393911.083713851, evalloss: 10701660.93150685\n",
      "epoch: 91, trainloss: 1247222.6818873668, evalloss: 11713440.438356165\n",
      "epoch: 92, trainloss: 1163470.0, evalloss: 10464869.260273973\n",
      "epoch: 93, trainloss: 1117871.0152207, evalloss: 11064324.383561645\n",
      "epoch: 94, trainloss: 1094142.9269406393, evalloss: 11178849.09589041\n",
      "epoch: 95, trainloss: 1115023.1202435312, evalloss: 11587623.671232877\n",
      "epoch: 96, trainloss: 1160883.406392694, evalloss: 10647469.369863013\n",
      "epoch: 97, trainloss: 1303426.884322679, evalloss: 11223546.739726027\n",
      "epoch: 98, trainloss: 1337803.4824961948, evalloss: 10601055.561643835\n",
      "epoch: 99, trainloss: 1267637.1354642313, evalloss: 12079070.90410959\n",
      "epoch: 100, trainloss: 1070059.4398782344, evalloss: 10997780.164383562\n",
      "epoch: 101, trainloss: 1000053.4216133942, evalloss: 10830728.10958904\n",
      "epoch: 102, trainloss: 1017781.9330289193, evalloss: 11144213.479452055\n",
      "epoch: 103, trainloss: 1411783.5342465753, evalloss: 9923852.712328767\n",
      "epoch: 104, trainloss: 1448599.5616438356, evalloss: 10543235.506849315\n",
      "epoch: 105, trainloss: 1508928.0669710806, evalloss: 13925917.808219178\n",
      "epoch: 106, trainloss: 1192553.5494672754, evalloss: 10753286.794520548\n",
      "epoch: 107, trainloss: 1047063.7610350077, evalloss: 11251956.383561645\n",
      "epoch: 108, trainloss: 1101224.2176560122, evalloss: 12004351.561643835\n",
      "epoch: 109, trainloss: 982776.8904109589, evalloss: 11172406.794520548\n",
      "epoch: 110, trainloss: 892950.7716894977, evalloss: 11479391.12328767\n",
      "epoch: 111, trainloss: 812829.0319634703, evalloss: 12240009.863013698\n",
      "epoch: 112, trainloss: 824712.6514459666, evalloss: 11066198.136986302\n",
      "epoch: 113, trainloss: 773766.105022831, evalloss: 10826079.12328767\n",
      "epoch: 114, trainloss: 851107.5175038052, evalloss: 10808389.04109589\n",
      "epoch: 115, trainloss: 785081.4444444445, evalloss: 11903106.630136987\n",
      "epoch: 116, trainloss: 752244.4946727549, evalloss: 11629736.767123288\n",
      "epoch: 117, trainloss: 738400.4855403348, evalloss: 12399206.794520548\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "epoch: 118, trainloss: 687494.6347031964, evalloss: 11874117.91780822\n",
      "epoch: 119, trainloss: 697900.2161339421, evalloss: 11242716.93150685\n",
      "epoch: 120, trainloss: 697596.3576864536, evalloss: 11348873.205479452\n",
      "epoch: 121, trainloss: 708721.0030441401, evalloss: 11481766.575342465\n",
      "epoch: 122, trainloss: 654816.8272450533, evalloss: 11536120.767123288\n",
      "epoch: 123, trainloss: 659750.9436834095, evalloss: 13353090.849315068\n",
      "epoch: 124, trainloss: 941781.7366818874, evalloss: 13168506.08219178\n",
      "epoch: 125, trainloss: 1028176.1141552512, evalloss: 11720979.287671233\n",
      "epoch: 126, trainloss: 664824.0060882801, evalloss: 11970994.410958905\n",
      "epoch: 127, trainloss: 630045.2115677322, evalloss: 11423529.424657535\n",
      "epoch: 128, trainloss: 718533.8356164383, evalloss: 11321024.0\n",
      "epoch: 129, trainloss: 729158.0547945206, evalloss: 10969892.164383562\n",
      "epoch: 130, trainloss: 837889.1232876712, evalloss: 11659971.287671233\n",
      "epoch: 131, trainloss: 849741.5479452055, evalloss: 11202661.698630137\n",
      "epoch: 132, trainloss: 973661.0715372907, evalloss: 11800313.863013698\n",
      "epoch: 133, trainloss: 994609.4079147641, evalloss: 10721343.342465753\n",
      "epoch: 134, trainloss: 833024.3881278539, evalloss: 11957888.87671233\n",
      "epoch: 135, trainloss: 632627.297564688, evalloss: 11185999.342465753\n",
      "epoch: 136, trainloss: 591919.0304414004, evalloss: 11780995.726027397\n",
      "epoch: 137, trainloss: 631037.894977169, evalloss: 11621313.753424658\n",
      "epoch: 138, trainloss: 633306.0182648402, evalloss: 11499029.698630137\n",
      "epoch: 139, trainloss: 682005.6697108067, evalloss: 11214847.342465753\n",
      "epoch: 140, trainloss: 841108.8173515982, evalloss: 11409301.260273973\n",
      "epoch: 141, trainloss: 820578.1902587519, evalloss: 11002720.87671233\n",
      "epoch: 142, trainloss: 803859.8356164383, evalloss: 12225084.273972603\n",
      "epoch: 143, trainloss: 934824.5951293759, evalloss: 12265283.287671233\n",
      "epoch: 144, trainloss: 897480.8630136986, evalloss: 11406160.87671233\n",
      "epoch: 145, trainloss: 907892.5388127854, evalloss: 12366013.589041095\n",
      "epoch: 146, trainloss: 710802.8797564688, evalloss: 11993980.05479452\n",
      "epoch: 147, trainloss: 555983.8264840182, evalloss: 13015618.191780822\n",
      "epoch: 148, trainloss: 689969.6879756469, evalloss: 13060172.93150685\n",
      "epoch: 149, trainloss: 622439.5266362253, evalloss: 11746351.342465753\n",
      "epoch: 150, trainloss: 493415.299847793, evalloss: 12076663.01369863\n",
      "epoch: 151, trainloss: 527959.1103500761, evalloss: 11251387.616438355\n",
      "epoch: 152, trainloss: 545596.9847792998, evalloss: 11470315.178082192\n",
      "epoch: 153, trainloss: 545522.3089802131, evalloss: 11671157.479452055\n",
      "epoch: 154, trainloss: 649043.6514459666, evalloss: 13098386.410958905\n",
      "epoch: 155, trainloss: 993261.6240487063, evalloss: 11922326.356164383\n",
      "epoch: 156, trainloss: 1125515.5388127854, evalloss: 11545823.780821918\n",
      "epoch: 157, trainloss: 584546.4535768646, evalloss: 13159746.849315068\n",
      "epoch: 158, trainloss: 545495.7549467275, evalloss: 12020040.328767123\n",
      "epoch: 159, trainloss: 496527.2085235921, evalloss: 12264367.780821918\n",
      "epoch: 160, trainloss: 482294.2792998478, evalloss: 11163735.452054795\n",
      "epoch: 161, trainloss: 442057.24733637745, evalloss: 11540814.246575342\n",
      "epoch: 162, trainloss: 401221.7716894977, evalloss: 11726202.739726027\n",
      "epoch: 163, trainloss: 381044.94140030444, evalloss: 12307639.452054795\n",
      "epoch: 164, trainloss: 392212.8477929985, evalloss: 11511075.06849315\n",
      "epoch: 165, trainloss: 371751.7496194825, evalloss: 12356067.726027397\n",
      "epoch: 166, trainloss: 365941.6636225266, evalloss: 11739189.479452055\n",
      "epoch: 167, trainloss: 342480.80175038055, evalloss: 11627780.602739725\n",
      "epoch: 168, trainloss: 365996.1187214612, evalloss: 12154507.616438355\n",
      "epoch: 169, trainloss: 348891.3515981735, evalloss: 12229897.205479452\n",
      "epoch: 170, trainloss: 331934.03576864535, evalloss: 11876675.94520548\n",
      "epoch: 171, trainloss: 336198.1803652968, evalloss: 12044186.95890411\n",
      "epoch: 172, trainloss: 345934.46423135465, evalloss: 11810734.684931507\n",
      "epoch: 173, trainloss: 305003.68759512936, evalloss: 12042004.164383562\n",
      "epoch: 174, trainloss: 353490.0783866058, evalloss: 12159351.01369863\n",
      "epoch: 175, trainloss: 310315.2157534247, evalloss: 12567261.369863013\n",
      "epoch: 176, trainloss: 304968.9657534247, evalloss: 11987981.369863013\n",
      "epoch: 177, trainloss: 304740.3592085236, evalloss: 11695628.05479452\n",
      "epoch: 178, trainloss: 302738.00152207003, evalloss: 11881247.12328767\n",
      "epoch: 179, trainloss: 305681.70243531204, evalloss: 12791331.506849315\n",
      "epoch: 180, trainloss: 332041.03576864535, evalloss: 11502196.602739725\n",
      "epoch: 181, trainloss: 349565.36643835617, evalloss: 12475728.87671233\n",
      "epoch: 182, trainloss: 348620.0106544901, evalloss: 13470133.260273973\n",
      "epoch: 183, trainloss: 358053.93759512936, evalloss: 11712343.01369863\n",
      "epoch: 184, trainloss: 359558.8592085236, evalloss: 12189452.273972603\n",
      "epoch: 185, trainloss: 366048.3789954338, evalloss: 12072440.98630137\n",
      "epoch: 186, trainloss: 369434.8828006088, evalloss: 12709726.684931507\n",
      "epoch: 187, trainloss: 389052.33485540334, evalloss: 12951057.315068493\n",
      "epoch: 188, trainloss: 400139.6171993912, evalloss: 11732027.397260275\n",
      "epoch: 189, trainloss: 350382.35388127854, evalloss: 11930140.273972603\n",
      "epoch: 190, trainloss: 303971.9703196347, evalloss: 12295709.589041095\n",
      "epoch: 191, trainloss: 322004.5703957382, evalloss: 11787832.767123288\n",
      "epoch: 192, trainloss: 366125.71423135465, evalloss: 12583384.98630137\n",
      "epoch: 193, trainloss: 372150.7579908676, evalloss: 11440589.808219178\n",
      "epoch: 194, trainloss: 494262.1933028919, evalloss: 14664962.630136987\n",
      "epoch: 195, trainloss: 482069.9619482496, evalloss: 12097154.849315068\n",
      "epoch: 196, trainloss: 416824.2694063927, evalloss: 11998730.301369863\n",
      "epoch: 197, trainloss: 480047.0905631659, evalloss: 12008090.739726027\n",
      "epoch: 198, trainloss: 398202.32724505325, evalloss: 12978120.10958904\n",
      "epoch: 199, trainloss: 455847.8477929985, evalloss: 11304445.369863013\n"
     ]
    }
   ],
   "source": [
    "# 训练\n",
    "from torch.autograd import Variable\n",
    "\n",
    "\n",
    "\n",
    "#记录用于绘图\n",
    "losses = []#记录每次迭代后训练的loss\n",
    "eval_losses = []#测试的\n",
    "\n",
    "for i in range(200):\n",
    "    train_loss = 0\n",
    "    \n",
    "    model.train() #网络设置为训练模式 暂时可加可不加\n",
    "    for step, (x,y) in enumerate(train_loader):\n",
    "        tdata = Variable(x)\n",
    "        tlabel = Variable(y)\n",
    "        \n",
    "        #前向传播       \n",
    "        y_ = model(tdata)\n",
    "        #记录单批次一次batch的loss\n",
    "        loss = criterion(y_, tlabel)\n",
    "        #反向传播\n",
    "        optimizer.zero_grad()\n",
    "        loss.backward()\n",
    "        optimizer.step()\n",
    "        #累计单批次误差\n",
    "        train_loss = train_loss + loss.item()\n",
    "    losses.append(train_loss / len(train_data))\n",
    "    \n",
    "    \n",
    "    # 测试集进行测试\n",
    "    eval_loss = 0\n",
    "    model.eval()  # 可加可不加\n",
    "    for step,(x,y) in enumerate(test_loader):\n",
    "        edata = Variable(x)\n",
    "        elabel = Variable(y)\n",
    "        \n",
    "        # 前向传播\n",
    "        y_ = model(edata)\n",
    "        # 记录单批次一次batch的loss，测试集就不需要反向传播更新网络了\n",
    "        loss = criterion(y_, elabel)\n",
    "        # 累计单批次误差\n",
    "        eval_loss = eval_loss + loss.item()\n",
    "    eval_losses.append(eval_loss / len(test_data))\n",
    "\n",
    "    print('epoch: {}, trainloss: {}, evalloss: {}'.format(i, train_loss / len(train_data), eval_loss / len(test_data)))\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "id": "4f24f8c7",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAWoAAAEICAYAAAB25L6yAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAsTAAALEwEAmpwYAAAaeklEQVR4nO3de3Bc53nf8e+ziysBLCgKILFLUiKtC8mFHF0CK7LVOqlkh7Tjxo4TJVIdTyZNR+2Mk1qNR47kuE2c6TjqqM4oM42dsopjp3bFyraiJHJiyZVsy7EdS6DoSCIp6kJdSAAkwCtAEiCA3ad/7C4uJEAsSSzO7feZwezuOWd3Hx4Sv3P4vu85r7k7IiISXqmgCxARkXNTUIuIhJyCWkQk5BTUIiIhp6AWEQk5BbWISMgpqCXyzOwNM3tP0HWI1IqCWkQk5BTUIiIhp6CW2DCzRjN7wMz6yz8PmFljeV2HmT1mZsfM7IiZfd/MUuV1v2dmfWY2YmZ7zOzWYP8kIrPVBV2AyCL6feAm4DrAgb8BPg38Z+ATwH6gs7ztTYCb2Qbgt4F3uHu/ma0D0ktbtsi51eyM2sy+aGaDZvZiFdteZmbfMbMdZva8mb2/VnVJrH0E+CN3H3T3IeAzwEfL6yaALHC5u0+4+/e9dKObAtAI5M2s3t3fcPfXAqleZB61bPr4ErClym0/DTzs7tcDtwOfr1VREms54M0Zr98sLwO4H3gVeMLM9prZPQDu/ipwF/CHwKCZbTOzHCIhUrOgdvengSMzl5nZFWb2LTPbXm4j3FjZHMiUn7cD/bWqS2KtH7h8xuvLystw9xF3/4S7vw3418DvVtqi3f3/uPu/KL/Xgf+2tGWLnNtSt1FvBf6Du79iZj9D6cz5FkpnM0+Y2e8ALYDGxMqFeAj4tJk9Sylw/wvwFQAz+wDwEvAaMEypyaNQbqNeDfwAGANGUSe7hMySBbWZtQLvAr5mZpXFjeXHO4AvufvnzOydwP82s2vcvbhU9Uks/FdK/zN7vvz6a+VlAFcB/4NSZ+JR4PPu/l0z+yngPmATpXbsHwJ3LmXRIguxWk4cUO5Bf8zdrzGzDLDH3bNzbLcT2OLu+8qv9wI3uftgzYoTEYmIJfsvnrsPA6+b2W0AVnJtefVbwK3l5ZuAJmBoqWoTEQmzmp1Rm9lDwM8BHcBB4A+Ap4AvUBomVQ9sc/c/MrM88L+AVkpti5909ydqUpiISMTUtOlDREQunnq3RURCriajPjo6OnzdunW1+GgRkVjavn37IXfvnGtdTYJ63bp19Pb21uKjRURiyczenG+dmj5EREJOQS0iEnIKahGRkFNQi4iEnIJaRCTkQjPDy6M7+rj/8T30Hxslt7yZuzdv4EPXrw66LBGRwIUiqB/d0ce9j7zA6EQBgL5jo9z7yAsACmsRSbxQNH3c//ieqZCuGJ0ocP/jewKqSEQkPEIR1P3HRs9ruYhIkoQiqHPLm89ruYhIkoQiqO/evIHm+vSsZc31ae7evCGgikREwiMUnYmVDsPP/N1Ojp6aYGVbI596/yZ1JIqIEJIzaiiF9cP//p0A3PO+jQppEZGy0AQ1wPqOFhrrUuzqHw66FBGR0Kiq6cPM3gBGgAIw6e49NSkmnWJjNsNOBbWIyJTzaaP+V+5+qGaVlOWzGf7+hQHcHTOr9deJiIReqJo+ALpzGY6PTtCnMdQiIkD1Qe3AE2a23czunGsDM7vTzHrNrHdoaOiCC8rnMgBqpxYRKas2qG929xuA9wEfM7N3n7mBu2919x537+nsnHPar6ps6sqQMtROLSJSVlVQu3t/+XEQ+GvgxloV1NyQZn1HC7sGFNQiIlBFUJtZi5m1VZ4DPw+8WMuiunPtavoQESmr5ox6FfCPZvbPwDPAN939W7UsqjuXoe/YKEdPjtfya0REImHB4Xnuvhe4dglqmVLpUNw9MMy7ruxYyq8WEQmd0A3Pg9JYalCHoogIhDSoL21tpCvTpA5FERFCGtRQaqfe2X886DJERAIX2qDO5zK8NnSSsTOm6BIRSZrQBnV3LkOh6Ow5MBJ0KSIigQptUOez7QBqpxaRxAttUK9d0UxbY53aqUUk8UIb1GbGplxGVyiKSOKFNqih1E69e2CEQtGDLkVEJDChDup8NsPoRIE3Dp8MuhQRkcCEOqi7c6UORV2hKCJJFuqgvnJlK/VpUzu1iCRaqIO6oS7F1avaNPJDRBIt1EENpXbqXf3DuKtDUUSSKfRB3Z3LcPjkOIMjp4MuRUQkEKEP6ny5Q1Ht1CKSVKEP6k3ZNgC1U4tIYoU+qNua6rn80mW654eIJFbogxoq96ZWUItIMkUiqPPZDG8ePsXI2ETQpYiILLlIBHXlCsXdA7o3tYgkT0SCujTZ7S51KIpIAkUiqDvbGulobVA7tYgkUiSC2szI59oV1CKSSJEIaih1KL4yOML4ZDHoUkREllRkgro7l2Gi4LwyqA5FEUmWyAR1fqpDUc0fIpIskQnqdZe2sKwhrXZqEUmcyAR1OmVs7GrTpeQikjhVB7WZpc1sh5k9VsuCzqU7187u/mGKmuxWRBLkfM6oPw7srlUh1cjnMoycnmT/0dEgyxARWVJVBbWZrQF+AXiwtuWcW+UKRd3yVESSpNoz6geATwKBDmK+elUb6ZSpnVpEEmXBoDazDwCD7r59ge3uNLNeM+sdGhpatAJnaqpPc2Vnq0Z+iEiiVHNGfTPwi2b2BrANuMXMvnLmRu6+1d173L2ns7Nzkcucls9lNJZaRBJlwaB293vdfY27rwNuB55y91+veWXz6M5lODA8xuETmuxWRJIhMuOoK/LZ8hWKaqcWkYQ4r6B29++6+wdqVUw18lMjPxTUIpIMkTujXr6sgdXLm9VOLSKJEbmghtJZtcZSi0hSRDOosxn2HjrJqfHJoEsREam5SAZ1dy6DO7x0QPemFpH4i2RQ697UIpIkkQzq1cubaW+u18gPEUmESAa1mdGdy2gstYgkQiSDGkodii8NDDNZ0GS3IhJvkQ3q7tUZTk8W2XvoZNCliIjUVGSDOp9tB9ShKCLxF9mgvqKzhYa6lC58EZHYi2xQ16VTmuxWRBIhskENpQtfdvYP467JbkUkviId1PlshmOnJhg4PhZ0KSIiNRPtoM6VOhR14YuIxFmkg3pjVxtmGvkhIvEW6aBuaaxjfUeLRn6ISKxFOqih1E6tkR8iEmeRD+ruXDv7j45y/NRE0KWIiNRE5IN66panOqsWkZiKflBnK5Pdqp1aROIp8kHd2dbIyrZGnVGLSGxFPqihdIWihuiJSFzFIqjzuQyvDp5gbKIQdCkiIosuFkHdnWtnsui8cvBE0KWIiCy6WAR1pUNx14A6FEUkfmIR1JetWEZrY53u+SEisRSLoE6ljE3ZNnUoikgsxSKoodROvXtgmGJR96YWkXiJTVDncxlOjhd488ipoEsREVlUCwa1mTWZ2TNm9s9mttPMPrMUhZ0vXaEoInFVzRn1aeAWd78WuA7YYmY31bSqC3D1qjbq06YORRGJnbqFNvDShISVAcr15Z/QNQQ31KW4cqU6FEUkfqpqozaztJn9BBgEvu3uP55jmzvNrNfMeoeGhha5zOpUJrsVEYmTqoLa3Qvufh2wBrjRzK6ZY5ut7t7j7j2dnZ2LXGZ18tkMh06cZnBEk92KSHyc16gPdz8GfBfYUotiLlZ3rtKhqLNqEYmPakZ9dJrZ8vLzZuA9wEs1ruuCbKpMIqCgFpEYWbAzEcgCXzazNKVgf9jdH6ttWRcm01TPZSuWKahFJFaqGfXxPHD9EtSyKDTZrYjETWyuTKzozmV4/dBJTpyeDLoUEZFFEbugrkx2+5LOqkUkJmIX1N25dkAjP0QkPmIX1KsyjaxoaVCHoojERuyC2sxKVyhqthcRiYnYBTWURn68fOAEE4Vi0KWIiFy0eAZ1LsN4ocirg5rsVkSiL5ZB3a0rFEUkRmIZ1Os7WmmqT2nkh4jEQiyDOp0yNnZl2KUORRGJgVgGNZSaP3b1D1Oa90BEJLpiHNTtDI9Nsv/oaNCliIhclNgGdV73phaRmIhtUG/saiNl6E56IhJ5sQ3qpvo0V3S2sqtfHYoiEm2xDWrQZLciEg+xDup8LsPA8TGOnBwPuhQRkQsW66Cu3PJUVyiKSJTFOqjz2fKl5LrwRUQiLNZBfUlLA7n2JrVTi0ikxTqoodROraYPEYmyBAR1O68NnWB0vBB0KSIiFyT+QZ3NUHTYc3Ak6FJERC5I7IO6e+pScnUoikg0xT6o11zSTKapTu3UIhJZsQ9qMyOvKxRFJMJiH9QA+Ww7Lx0YplDUvalFJHoSEdTduQxjE0VeP6TJbkUkehIR1Lo3tYhEWSKC+sqVrTSkU+pQFJFIWjCozWytmX3HzHab2U4z+/hSFLaY6tMpru5q1SQCIhJJ1ZxRTwKfcPdNwE3Ax8wsX9uyFl93tp2dmuxWRCJowaB29wF3f678fATYDayudWGLLZ/LcOTkOAeHTwddiojIeTmvNmozWwdcD/x4jnV3mlmvmfUODQ0tUnmLR1coikhUVR3UZtYKfAO4y93Paux1963u3uPuPZ2dnYtZ46LYmM1gpkkERCR6qgpqM6unFNJfdfdHaltSbbQ21rHu0hYN0RORyKlm1IcBfwHsdvc/qX1JtZPPZTTyQ0Qip5oz6puBjwK3mNlPyj/vr3FdNZHPZnjryCmGxyaCLkVEpGp1C23g7v8I2BLUUnOVDsVd/cPc9LZLA65GRKQ6ibgysSI/I6hFRKIiUUG9sq2JzrZGdSiKSKQkKqih1E6tDkURiZLEBXV3LsMrB0c4PanJbkUkGhIX1Plchsmi88pB3ZtaRKIhcUHdnWsH1KEoItGRuKC+fMUyWhrSaqcWkchIXFCnUsambEY3ZxKRyEhcUEOpnXr3wAhFTXYrIhGQyKDuzmU4cXqSt46cCroUEZEFJTKo89lyh6LaqUUkAhIZ1FetaqUuZWqnFpFISGRQN9WnuXJlq4boiUgkJDKoodShqHt+iEgUJDeosxkGR04zNKLJbkUk3BIb1FNXKKpDUURCLrFBnc/q3tQiEg2JDer2ZfWsuaRZIz9EJPQSG9RQuvBFTR8iEnaJDup8tp3XD53k5OnJoEsREZlXooO6O5fBHV46MBJ0KSIi80p0UE9Pdqt2ahEJr0QHdba9iUuW1audWkRCLdFBbWa6QlFEQi/RQQ2lC19eOjDCRKEYdCkiInNKfFDnsxnGJ4vsHToZdCkiInNKfFB3lzsUdeGLiIRV4oN6fUcLjXUpXUouIqGV+KCuS6fYmFWHooiEV+KDGkrt1LsGhnHXZLciEj4LBrWZfdHMBs3sxaUoKAjduQzHRyfoOzYadCkiImep5oz6S8CWGtcRqOkrFNX8ISLhs2BQu/vTwJElqCUwm7oypAy1U4tIKC1aG7WZ3WlmvWbWOzQ0tFgfuySaG9Ks72jRpeQiEkqLFtTuvtXde9y9p7Ozc7E+dsl059rV9CEioaRRH2X5XIa+Y6McOzUedCkiIrMoqMu61aEoIiFVzfC8h4AfARvMbL+Z/Vbty1p6U5Pdqp1aREKmbqEN3P2OpSgkaJe2NtKVadLIDxEJHTV9zJDPZdT0ISKho6CeoTuX4dWhE4xNFIIuRURkioJ6hu5chkLRefmgJrsVkfBQUM+Qz7YDukJRRMJFQT3D2hXNtDXWqZ1aREJFQT2DmbEpl9FsLyISKgrqMzTVpdjx1jHW3/NNbr7vKR7d0Rd0SSKScAuOo06SR3f08aO9h6lMH9B3bJR7H3kBgA9dvzq4wkQk0XRGPcP9j+9hojB7lpfRiQL3P74noIpERBTUs/TPM8NL37FRnnvrqKbqEpFAKKhnyC1vnnO5AR/+/A/Z8sD3+csfvK477InIklJQz3D35g0016dnLWuuT/PHH76Gz/7S22msT/GZv9vFjZ99kru27eCf9h7WWbaI1Jw6E2eodBje//ge+o+NklvezN2bN0wt/zc/cxk7+4+z7Zl9PLqjj0d/0s/bOlu4/R1r+eUb1nBpa2OQ5YtITFktzgh7enq8t7d30T83TE6NT/LN5wfY9uw+tr95lPq08fP5Lm6/cS03X9FBKmVBlygiEWJm2929Z851CuqL9/LBEbY9s49Hduzn2KkJ1q5o5td61nJbz1pWZZqCLk9EIkBBvUTGJgo8vvMA257Zx4/2HiadMm7ZuJI7blzLz169krTOskVkHucKarVRL6Km+jQfvG41H7xuNa8fOsn/fXYfX9++n2/vOki2vYnbetbyqz1rWHPJsqBLFZEI0Rl1jU0Uijy5+yAPPbOPp18ZAuDdV3Vyx41ruXXTKurTGngjImr6CI39R0/x8LP7eLh3PweGx+hobeRXfnoNt79jLes6WoIuT0QCpKAOmclCke+9PMRDz+zjO3sGKRSdd11xKbffeBmbu1fxDy8cmHeIoIjEk4I6xA4Oj/G13n1se3Yf+4+Osqw+xemCUyhO/72ULrp5u8JaJMYU1BFQLDo/eO0Qd/5VL6MTxbPWtzfX87nbrqWrvYmu9iZWLGvQWG2RGNGojwhIpYx/eVUnY3OENMDx0Qn+3V9NH/zq08aqTBNdmVJwZ9ubWJVpItveTFd7I13tzaxsa1RnpUgMKKhDJre8mb457uLXlWnif370pxk4PsbB4bEZj6O82Hec/7f74FkhbwYdrY0zQnz6sat9OuSXNcz9z+DRHX2haCsPSx0iQVFQh8zdmzdw7yMvMDpRmFrWXJ/mnvdt5Nq1y7l27dzvc3eOj05woBLix8dmhfpbh0/xzOtHOD46cdZ7M011ZNubWdXeRLYc3gPHR3l0Rz/jhVL49x0b5Z5HnmeyUOTDN6xZsmaXR3f0zdofQU7mEJYDRljqkKWjNuoQquUv4uh4oRzmo1MhfqDyM1x6HDpxmoX+WaRTRl3KaEinqEsb9ekU9TOe16Uqy4y68mNp+Yzn6en316VSNNSV3leXTtFQft8Xvvsqx0cnz/r+S5bV89lfejvplJ31U5dKkU5BOpUibWeum2t7I1V5tOltzKYPRmceMCCYTt6w1FGpJegDRhhqWKw61Jko52WiUOTq3/8H5vuXcdd7rmKy4EwUikwUnMlicer5RKE4Y12RyeKM7QpFxsuPk0VnfLLIZLG0/fiM900Ww3Hr2JRBXSpFKgWnJ4pz7o+0QVd7M6kUpK0U9GaQOvN5eb2ZkZpnfWVZZf2sbcvrn9w9OCukK1oa0tzWs5aUGekUUweetJUOQmkz0mmbOnClzKhLW3l7m3VAm3nQKi2bPuhV/hw/fO0Qf/69vZyenG5ua6xL8bvvvYr35rumviM1VcP0/kmlpr+z8ueq1Hk+wnLQWqw6FNRy3m6+76k528pXL2/mB/fcUtPvdvepgL/1c99j4PjYWdusbGvky//2RgrF0lDGgpceJwtOsfz+YrH0WNlmslgsrTvnNtPLKtsUis7Wp/fOW+8v37CGonv5B4ruuDvFIrOWVZ575Xmxsi0UzrF+5mfsHTo5bx2ZpjqKzqz9UQjJQa9apYBn1gEkVV6WnnHwSJlxYHhszj9fXcpY19Ey573i59wbcyyca7szP6/yav/R0TnrON/fFY36kPM2X1v53Zs31Py7zWyqeeT3tmycs45PvX8Tm7KZmtdS8c3nB+Y9cH3uV69dsjou5ABaPCO4Cz7jYHTmuvIBqlCkdGArMuf6jzz443lrfODXrpvarvJZhfLB68zvKPp0DcVi6YBUeV6p1WccfIoz3vuN5/bP+f2TRWfDqrbSizlO0uc6b5/ZzHXu7c7e5s3Dp+asY76p/S6EglrmtNAkCkmrI8gD18XWkUoZKYwzJi+6KKvnGZ20ennzkv3d/NPew/PW8GcfuWFJagB49o2jc9Yx39R+F6KqoDazLcCfAmngQXe/b9EqkND60PWrQzGaIAx1hOWAEZY6wnDgCkMNS1XHgm3UZpYGXgbeC+wHngXucPdd871HbdQi8ReGERdhqGGx6riozkQzeyfwh+6+ufz6XgB3/+P53qOgFhE5P+cK6mquL14N7Jvxen952ZlfcqeZ9ZpZ79DQ0IVVKiIiZ6kmqOfq/DzrNNzdt7p7j7v3dHZ2XnxlIiICVBfU+4GZFy6vAfprU46IiJypmqB+FrjKzNabWQNwO/C3tS1LREQqFhye5+6TZvbbwOOUhud90d131rwyEREBanQJuZkNAW8u+gcvrQ7gUNBFhIT2xWzaH7Npf0y7mH1xubvP2cFXk6COAzPrnW+oTNJoX8ym/TGb9se0Wu0LTf8hIhJyCmoRkZBTUM9va9AFhIj2xWzaH7Npf0yryb5QG7WISMjpjFpEJOQU1CIiIaegnsHM1prZd8xst5ntNLOPB11T0MwsbWY7zOyxoGsJmpktN7Ovm9lL5X8j7wy6piCZ2X8q/568aGYPmVlT0DUtJTP7opkNmtmLM5atMLNvm9kr5cdLFuO7FNSzTQKfcPdNwE3Ax8wsH3BNQfs4sDvoIkLiT4FvuftG4FoSvF/MbDXwH4Eed7+G0lXLtwdb1ZL7ErDljGX3AE+6+1XAk+XXF01BPYO7D7j7c+XnI5R+EYOf4iQgZrYG+AXgwaBrCZqZZYB3A38B4O7j7n4s0KKCVwc0m1kdsIyE3azN3Z8Gjpyx+IPAl8vPvwx8aDG+S0E9DzNbB1wPzD+LZ/w9AHwSKAZcRxi8DRgC/rLcFPSgmbUEXVRQ3L0P+O/AW8AAcNzdnwi2qlBY5e4DUDrxA1YuxocqqOdgZq3AN4C73H046HqCYGYfAAbdfXvQtYREHXAD8AV3vx44ySL9tzaKym2vHwTWAzmgxcx+Pdiq4ktBfQYzq6cU0l9190eCridANwO/aGZvANuAW8zsK8GWFKj9wH53r/wP6+uUgjup3gO87u5D7j4BPAK8K+CawuCgmWUByo+Di/GhCuoZzMwotUHudvc/CbqeILn7ve6+xt3XUeokesrdE3vG5O4HgH1mVpla+lZg3gmeE+At4CYzW1b+vbmVBHeuzvC3wG+Un/8G8DeL8aEL3o86YW4GPgq8YGY/KS/7lLv/fXAlSYj8DvDV8gQae4HfDLiewLj7j83s68BzlEZL7SBhl5Kb2UPAzwEdZrYf+APgPuBhM/stSgez2xblu3QJuYhIuKnpQ0Qk5BTUIiIhp6AWEQk5BbWISMgpqEVEQk5BLSIScgpqEZGQ+//OqGm3GIpIrwAAAABJRU5ErkJggg==\n",
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    }
   ],
   "source": [
    "import matplotlib.pyplot as plt\n",
    "\n",
    "plt.plot(range(1,11,1), losses[:10], marker='o')\n",
    "plt.title(\"loss\") #标题\n",
    "\n",
    "plt.show()"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "db70aeed",
   "metadata": {},
   "source": [
    "# 预测"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 22,
   "id": "d2eed5cc",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "tensor([[ 0.0733, -0.2079, -0.2071,  ...,  1.0000,  0.0000,  0.0000],\n",
      "        [-0.8723,  0.4097, -0.0919,  ...,  1.0000,  0.0000,  0.0000],\n",
      "        [ 0.0733, -0.0844,  0.0735,  ...,  1.0000,  0.0000,  0.0000],\n",
      "        [ 0.3098, -0.4138, -0.0969,  ...,  0.0000,  0.0000,  0.0000]])\n",
      "tensor([208500., 181500., 223500., 140000.])\n",
      "tensor([[214119.9688],\n",
      "        [177394.6094],\n",
      "        [219405.1250],\n",
      "        [131715.1094]], grad_fn=<AddmmBackward>)\n"
     ]
    }
   ],
   "source": [
    "test_features = torch.tensor(all_features[0:4].values, dtype=torch.float)\n",
    "print(test_features)\n",
    "\n",
    "test_labels = torch.tensor(all_labels[0:4].values, dtype=torch.float)\n",
    "print(test_labels)\n",
    "\n",
    "output = model(test_features)\n",
    "print(output)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "ece37b5f",
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.8.8"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
