{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [],
   "source": [
    "# 首先 import 必要的模块\n",
    "import pandas as pd \n",
    "import numpy as np\n",
    "\n",
    "import matplotlib.pyplot as plt\n",
    "import seaborn as sns\n",
    "%matplotlib inline\n",
    "from sklearn.model_selection import GridSearchCV\n",
    "\n",
    "# 使用r2_score评价模型在测试集和训练集上的性能，并输出评估结果\n",
    "from sklearn.metrics import r2_score  #评价回归预测模型的性能\n",
    "from sklearn.metrics import mean_squared_error#MSE\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [],
   "source": [
    "train = pd.read_csv(\"rop.csv\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>depth</th>\n",
       "      <th>a</th>\n",
       "      <th>b</th>\n",
       "      <th>c</th>\n",
       "      <th>d</th>\n",
       "      <th>e</th>\n",
       "      <th>f</th>\n",
       "      <th>j</th>\n",
       "      <th>k</th>\n",
       "      <th>rop</th>\n",
       "      <th>wob</th>\n",
       "      <th>rpm</th>\n",
       "      <th>L</th>\n",
       "      <th>B</th>\n",
       "      <th>M</th>\n",
       "      <th>V</th>\n",
       "      <th>Angle</th>\n",
       "      <th>Azimuth</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.234043</td>\n",
       "      <td>0.285714</td>\n",
       "      <td>0.655172</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.418182</td>\n",
       "      <td>0.285714</td>\n",
       "      <td>0.333333</td>\n",
       "      <td>0.800</td>\n",
       "      <td>0.340361</td>\n",
       "      <td>0.2</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.944444</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>1.000000</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>0.009183</td>\n",
       "      <td>0.170213</td>\n",
       "      <td>0.142857</td>\n",
       "      <td>0.879310</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.363636</td>\n",
       "      <td>0.142857</td>\n",
       "      <td>0.851852</td>\n",
       "      <td>0.775</td>\n",
       "      <td>0.340361</td>\n",
       "      <td>0.2</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.944444</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.011881</td>\n",
       "      <td>0.921779</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>0.024269</td>\n",
       "      <td>0.063830</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.454545</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.850</td>\n",
       "      <td>0.451807</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.166667</td>\n",
       "      <td>0.769231</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.055240</td>\n",
       "      <td>0.604410</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>0.025105</td>\n",
       "      <td>0.021277</td>\n",
       "      <td>0.183673</td>\n",
       "      <td>0.793103</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.545455</td>\n",
       "      <td>0.183673</td>\n",
       "      <td>0.777778</td>\n",
       "      <td>0.700</td>\n",
       "      <td>0.451807</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.166667</td>\n",
       "      <td>0.769231</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.054242</td>\n",
       "      <td>0.603477</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>0.138351</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.489796</td>\n",
       "      <td>0.982759</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.090909</td>\n",
       "      <td>0.489796</td>\n",
       "      <td>0.740741</td>\n",
       "      <td>1.000</td>\n",
       "      <td>0.421687</td>\n",
       "      <td>1.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.833333</td>\n",
       "      <td>0.333333</td>\n",
       "      <td>0.846154</td>\n",
       "      <td>0.8</td>\n",
       "      <td>0.109731</td>\n",
       "      <td>0.432592</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "      depth         a         b         c    d         e         f         j  \\\n",
       "0  0.000000  0.234043  0.285714  0.655172  0.0  0.418182  0.285714  0.333333   \n",
       "1  0.009183  0.170213  0.142857  0.879310  0.0  0.363636  0.142857  0.851852   \n",
       "2  0.024269  0.063830  0.000000  1.000000  0.0  0.454545  0.000000  1.000000   \n",
       "3  0.025105  0.021277  0.183673  0.793103  0.0  0.545455  0.183673  0.777778   \n",
       "4  0.138351  0.000000  0.489796  0.982759  0.0  0.090909  0.489796  0.740741   \n",
       "\n",
       "       k       rop  wob  rpm         L         B         M    V     Angle  \\\n",
       "0  0.800  0.340361  0.2  1.0  0.944444  0.000000  0.000000  0.0  0.000000   \n",
       "1  0.775  0.340361  0.2  1.0  0.944444  0.000000  0.000000  0.0  0.011881   \n",
       "2  0.850  0.451807  0.0  1.0  1.000000  0.166667  0.769231  1.0  0.055240   \n",
       "3  0.700  0.451807  0.0  1.0  1.000000  0.166667  0.769231  1.0  0.054242   \n",
       "4  1.000  0.421687  1.0  1.0  0.833333  0.333333  0.846154  0.8  0.109731   \n",
       "\n",
       "    Azimuth  \n",
       "0  1.000000  \n",
       "1  0.921779  \n",
       "2  0.604410  \n",
       "3  0.603477  \n",
       "4  0.432592  "
      ]
     },
     "execution_count": 3,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "from sklearn.preprocessing import MinMaxScaler\n",
    "# from sklearn.preprocessing import StandardScaler\n",
    "#标准化\n",
    "ms=MinMaxScaler()\n",
    "# ms=StandardScaler()\n",
    "#保存特征名\n",
    "feat_names=train.columns\n",
    "train=ms.fit_transform(train)\n",
    "train=pd.DataFrame(columns=feat_names,data=train)\n",
    "train.head()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(80, 15)"
      ]
     },
     "execution_count": 4,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "y = train.filter(['rop','Angle','Azimuth'])   \n",
    "# y = train['rop']\n",
    "X = train.drop(['rop','Angle','Azimuth'], axis=1)\n",
    "#将数据分割训练数据与测试数据\n",
    "from sklearn.model_selection import train_test_split\n",
    "\n",
    "# 随机采样20%的数据构建测试样本，其余作为训练样本\n",
    "X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=33, test_size=0.2)\n",
    "X_train.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [],
   "source": [
    "#三层网络15*1000*1000->3\n",
    "import torch\n",
    "net = torch.nn.Sequential(\n",
    "    torch.nn.Linear(15, 1000),\n",
    "    torch.nn.ReLU(),\n",
    "    torch.nn.Linear(1000, 2000),\n",
    "    torch.nn.ReLU(),\n",
    "    torch.nn.Linear(2000, 3),\n",
    "\n",
    ")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {},
   "outputs": [],
   "source": [
    "optimizer = torch.optim.Adam(net.parameters(), lr=0.01)#优化器\n",
    "loss_func = torch.nn.MSELoss()  # this is for regression mean squared loss"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/home/liu/anaconda3/lib/python3.7/site-packages/ipykernel_launcher.py:3: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  This is separate from the ipykernel package so we can avoid doing imports until\n",
      "/home/liu/anaconda3/lib/python3.7/site-packages/ipykernel_launcher.py:4: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  after removing the cwd from sys.path.\n"
     ]
    }
   ],
   "source": [
    "X_train_t=torch.from_numpy(X_train.values)\n",
    "y_train_t=torch.from_numpy(y_train.values)\n",
    "X_train_t=torch.tensor(X_train_t, dtype=torch.float32)\n",
    "y_train_t=torch.tensor(y_train_t, dtype=torch.float32)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "The r2 score on train is -621.4210062735162\n",
      "MSELossloss tensor(0.2715, grad_fn=<MseLossBackward>)\n",
      "The r2 score on train is 0.36483411274065\n",
      "MSELossloss tensor(0.0164, grad_fn=<MseLossBackward>)\n",
      "The r2 score on train is 0.9259967602604883\n",
      "MSELossloss tensor(0.0041, grad_fn=<MseLossBackward>)\n",
      "The r2 score on train is 0.9857348104996152\n",
      "MSELossloss tensor(0.0009, grad_fn=<MseLossBackward>)\n",
      "The r2 score on train is 0.9940310757613648\n",
      "MSELossloss tensor(0.0003, grad_fn=<MseLossBackward>)\n",
      "The r2 score on train is 0.9958047821367343\n",
      "MSELossloss tensor(0.0002, grad_fn=<MseLossBackward>)\n",
      "The r2 score on train is 0.9976908027236867\n",
      "MSELossloss tensor(0.0001, grad_fn=<MseLossBackward>)\n",
      "The r2 score on train is 0.997905507294496\n",
      "MSELossloss tensor(0.0001, grad_fn=<MseLossBackward>)\n",
      "The r2 score on train is 0.9981565754357234\n",
      "MSELossloss tensor(9.6838e-05, grad_fn=<MseLossBackward>)\n",
      "The r2 score on train is 0.9989368296538412\n",
      "MSELossloss tensor(5.7221e-05, grad_fn=<MseLossBackward>)\n"
     ]
    }
   ],
   "source": [
    "#训练\n",
    "for t in range(300):\n",
    "    prediction = net(X_train_t)     # input x and predict based on x\n",
    "\n",
    "    loss = loss_func(prediction, y_train_t)     # must be (1. nn output, 2. target)\n",
    "\n",
    "    optimizer.zero_grad()   # clear gradients for next train\n",
    "    loss.backward()         # backpropagation, compute gradients\n",
    "    optimizer.step()        # apply gradients\n",
    "\n",
    "    if t % 30 == 0:\n",
    "        prediction_n=prediction.detach().numpy()\n",
    "        y_train_n=y_train_t.detach().numpy()\n",
    "        print ('The r2 score on train is', r2_score(prediction_n, y_train_n))\n",
    "        print('MSELossloss',loss)\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/home/liu/anaconda3/lib/python3.7/site-packages/ipykernel_launcher.py:3: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  This is separate from the ipykernel package so we can avoid doing imports until\n",
      "/home/liu/anaconda3/lib/python3.7/site-packages/ipykernel_launcher.py:4: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  after removing the cwd from sys.path.\n"
     ]
    }
   ],
   "source": [
    "X_test_t=torch.from_numpy(X_test.values)\n",
    "y_test_t=torch.from_numpy(y_test.values)\n",
    "X_test_t=torch.tensor(X_test_t, dtype=torch.float32)\n",
    "y_test_t=torch.tensor(y_test_t, dtype=torch.float32)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "The r2 score on test is 0.805567356911863\n",
      "MSEloss tensor(4.8677e-05, grad_fn=<MseLossBackward>)\n"
     ]
    }
   ],
   "source": [
    "y_test_prediction=net(X_test_t)\n",
    "y_test_prediction_n=y_test_prediction.detach().numpy()\n",
    "y_test_n=y_test_t.detach().numpy()\n",
    "\n",
    "print('The r2 score on test is', r2_score(y_test_prediction_n, y_test_n))\n",
    "print(\"MSEloss\",loss_func(prediction, y_train_t))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.7.0"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
