{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "id": "9e2436ee",
   "metadata": {},
   "outputs": [],
   "source": [
    "import pandas as pd\n",
    "import torch\n",
    "from torch import nn, optim\n",
    "from torch.utils import data\n",
    "from sklearn.preprocessing import StandardScaler\n",
    "%matplotlib inline"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "id": "0460ad3c",
   "metadata": {},
   "outputs": [],
   "source": [
    "train_file = pd.read_csv('../data/house-prices/train.csv')\n",
    "test_file = pd.read_csv('../data/house-prices/test.csv')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "id": "bcb51b78",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "   Id  MSSubClass MSZoning  LotFrontage  LotArea Street Alley LotShape  \\\n",
      "0   1          60       RL         65.0     8450   Pave   NaN      Reg   \n",
      "1   2          20       RL         80.0     9600   Pave   NaN      Reg   \n",
      "2   3          60       RL         68.0    11250   Pave   NaN      IR1   \n",
      "\n",
      "  LandContour Utilities  ... PoolArea PoolQC Fence MiscFeature MiscVal MoSold  \\\n",
      "0         Lvl    AllPub  ...        0    NaN   NaN         NaN       0      2   \n",
      "1         Lvl    AllPub  ...        0    NaN   NaN         NaN       0      5   \n",
      "2         Lvl    AllPub  ...        0    NaN   NaN         NaN       0      9   \n",
      "\n",
      "  YrSold  SaleType  SaleCondition  SalePrice  \n",
      "0   2008        WD         Normal     208500  \n",
      "1   2007        WD         Normal     181500  \n",
      "2   2008        WD         Normal     223500  \n",
      "\n",
      "[3 rows x 81 columns]\n",
      "['Id', 'MSSubClass', 'MSZoning', 'LotFrontage', 'LotArea', 'Street', 'Alley', 'LotShape', 'LandContour', 'Utilities', 'LotConfig', 'LandSlope', 'Neighborhood', 'Condition1', 'Condition2', 'BldgType', 'HouseStyle', 'OverallQual', 'OverallCond', 'YearBuilt', 'YearRemodAdd', 'RoofStyle', 'RoofMatl', 'Exterior1st', 'Exterior2nd', 'MasVnrType', 'MasVnrArea', 'ExterQual', 'ExterCond', 'Foundation', 'BsmtQual', 'BsmtCond', 'BsmtExposure', 'BsmtFinType1', 'BsmtFinSF1', 'BsmtFinType2', 'BsmtFinSF2', 'BsmtUnfSF', 'TotalBsmtSF', 'Heating', 'HeatingQC', 'CentralAir', 'Electrical', '1stFlrSF', '2ndFlrSF', 'LowQualFinSF', 'GrLivArea', 'BsmtFullBath', 'BsmtHalfBath', 'FullBath', 'HalfBath', 'BedroomAbvGr', 'KitchenAbvGr', 'KitchenQual', 'TotRmsAbvGrd', 'Functional', 'Fireplaces', 'FireplaceQu', 'GarageType', 'GarageYrBlt', 'GarageFinish', 'GarageCars', 'GarageArea', 'GarageQual', 'GarageCond', 'PavedDrive', 'WoodDeckSF', 'OpenPorchSF', 'EnclosedPorch', '3SsnPorch', 'ScreenPorch', 'PoolArea', 'PoolQC', 'Fence', 'MiscFeature', 'MiscVal', 'MoSold', 'YrSold', 'SaleType', 'SaleCondition', 'SalePrice']\n"
     ]
    }
   ],
   "source": [
    "print(train_file.head(3))\n",
    "print(train_file.columns.tolist())"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 202,
   "id": "535f215f",
   "metadata": {},
   "outputs": [],
   "source": [
    "train_x = train_file.drop(['Id', 'SalePrice'], axis=1)\n",
    "train_y = train_file['SalePrice']"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 203,
   "id": "9ea6efbe",
   "metadata": {},
   "outputs": [],
   "source": [
    "non_numeric_columns = train_x.select_dtypes(include=['object']).columns\n",
    "train_x = pd.get_dummies(train_x, columns=non_numeric_columns, dummy_na=True)\n",
    "train_x.fillna(0, inplace=True)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 204,
   "id": "8d4f89a5",
   "metadata": {},
   "outputs": [],
   "source": [
    "scaler = StandardScaler()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 205,
   "id": "eb578dff",
   "metadata": {},
   "outputs": [],
   "source": [
    "test_x = test_file.drop(['Id'], axis=1)\n",
    "test_x = pd.get_dummies(test_x, columns=non_numeric_columns, dummy_na=True)\n",
    "test_x.fillna(0, inplace=True)\n",
    "test_x = test_x.reindex(columns=train_x.columns, fill_value=0)\n",
    "test_x = scaler.fit_transform(test_x)\n",
    "test_x = torch.from_numpy(test_x).float()\n",
    "train_x = scaler.fit_transform(train_x)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 206,
   "id": "26b0770a",
   "metadata": {},
   "outputs": [],
   "source": [
    "net = torch.nn.Sequential(\n",
    "    nn.Linear(331, 128),\n",
    "    nn.Dropout(0.2),\n",
    "    nn.ReLU(),\n",
    "    nn.Linear(128, 32),\n",
    "    nn.ReLU(),\n",
    "    nn.Linear(32, 1)\n",
    ")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 207,
   "id": "e56f737a",
   "metadata": {},
   "outputs": [],
   "source": [
    "batch_size = 64"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 208,
   "id": "2f32e0ec",
   "metadata": {},
   "outputs": [],
   "source": [
    "train_x = torch.from_numpy(train_x)\n",
    "train_y = torch.from_numpy(train_y.values)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 209,
   "id": "c07933eb",
   "metadata": {},
   "outputs": [],
   "source": [
    "train = data.TensorDataset(train_x, train_y)\n",
    "train_iter = data.DataLoader(train, batch_size=batch_size, shuffle=True)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 210,
   "id": "5a910664",
   "metadata": {},
   "outputs": [],
   "source": [
    "num_epochs = 10\n",
    "loss = nn.SmoothL1Loss()\n",
    "updater = optim.Adam(net.parameters())"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 211,
   "id": "5a22f305",
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "d:\\Anaconda3\\envs\\pytorch\\lib\\site-packages\\torch\\nn\\modules\\loss.py:922: UserWarning: Using a target size (torch.Size([64])) that is different to the input size (torch.Size([64, 1])). This will likely lead to incorrect results due to broadcasting. Please ensure they have the same size.\n",
      "  return F.smooth_l1_loss(input, target, reduction=self.reduction, beta=self.beta)\n",
      "d:\\Anaconda3\\envs\\pytorch\\lib\\site-packages\\torch\\nn\\modules\\loss.py:922: UserWarning: Using a target size (torch.Size([52])) that is different to the input size (torch.Size([52, 1])). This will likely lead to incorrect results due to broadcasting. Please ensure they have the same size.\n",
      "  return F.smooth_l1_loss(input, target, reduction=self.reduction, beta=self.beta)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "epoch 1, loss 171088.3125\n",
      "epoch 2, loss 169353.9219\n",
      "epoch 3, loss 178076.8125\n",
      "epoch 4, loss 178436.0312\n",
      "epoch 5, loss 193864.9688\n",
      "epoch 6, loss 165980.5156\n",
      "epoch 7, loss 177999.0000\n",
      "epoch 8, loss 175264.9062\n",
      "epoch 9, loss 185714.5156\n",
      "epoch 10, loss 188224.4062\n"
     ]
    }
   ],
   "source": [
    "for epoch in range(num_epochs):\n",
    "    for x, y in train_iter:\n",
    "        y_hat = net(x.float())\n",
    "        l = loss(y_hat, y.float())\n",
    "        updater.zero_grad()\n",
    "        l.backward()\n",
    "        updater.step()\n",
    "    print(f'epoch {epoch + 1}, loss {l.item():.4f}')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 212,
   "id": "db3af55e",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(torch.Size([1459, 331]), torch.Size([1460, 331]), torch.Size([1460]))"
      ]
     },
     "execution_count": 212,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "test_x.shape,train_x.shape,train_y.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 213,
   "id": "61721c6a",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "Sequential(\n",
       "  (0): Linear(in_features=331, out_features=128, bias=True)\n",
       "  (1): Dropout(p=0.2, inplace=False)\n",
       "  (2): ReLU()\n",
       "  (3): Linear(in_features=128, out_features=32, bias=True)\n",
       "  (4): ReLU()\n",
       "  (5): Linear(in_features=32, out_features=1, bias=True)\n",
       ")"
      ]
     },
     "execution_count": 213,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "net.eval()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 214,
   "id": "9121cf57",
   "metadata": {},
   "outputs": [],
   "source": [
    "test_y = net(test_x)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 215,
   "id": "a165e926",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "tensor([[3839.2212],\n",
       "        [ 920.5335],\n",
       "        [2797.3313],\n",
       "        ...,\n",
       "        [1157.0094],\n",
       "        [3697.3872],\n",
       "        [3053.2161]], grad_fn=<AddmmBackward0>)"
      ]
     },
     "execution_count": 215,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "test_y"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 216,
   "id": "83621519",
   "metadata": {},
   "outputs": [],
   "source": [
    "result = pd.concat([test_file['Id'], pd.Series(test_y.reshape(-1).detach().numpy(), name='SalePrice')], axis=1)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 218,
   "id": "729ae813",
   "metadata": {},
   "outputs": [],
   "source": [
    "result.to_csv('../data/house-prices/submission.csv', index=False)\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "24d98061",
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "pytorch",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.7.13"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
