{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "import torch\n",
    "import torch.nn as nn\n",
    "import torch.optim as optim\n",
    "from torch.utils.data import Dataset, DataLoader, ConcatDataset\n",
    "from torch.autograd import Variable, Function\n",
    "\n",
    "from sklearn.metrics import *\n",
    "\n",
    "import datetime\n",
    "\n",
    "import os\n",
    "os.environ['CUDA_VISIBLE_DEVICE']='1'\n",
    "\n",
    "import pandas as pd\n",
    "import numpy as np\n",
    "\n",
    "import matplotlib.pyplot as plt\n",
    "\n",
    "from sklearn.preprocessing import *\n",
    "\n",
    "import fastai\n",
    "from fastai.data_block import DataLoader, DataBunch, DeviceDataLoader\n",
    "from fastai.basic_train import Learner, DatasetType, LearnerCallback\n",
    "from fastai.train import lr_find, fit_one_cycle\n",
    "from fastai.callback import Callback, CallbackHandler\n",
    "from fastai.callbacks import *\n",
    "from fastai.tabular import mean_absolute_error"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "torch.__version__"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "fastai.__version__"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "qi_data = pd.read_excel('inputs/入库流量数据.xlsx', parse_dates=['TimeStample'], )\n",
    "qi_data = qi_data.set_index('TimeStample')\n",
    "qi_scaler = StandardScaler()\n",
    "qi_data['Qi'] = qi_scaler.fit_transform(qi_data['Qi'].values.reshape(-1,1))\n",
    "qi_data1 = qi_data[:'2014-11-19']\n",
    "qi_data2 = qi_data['2015-1-1':'2018-1-31']\n",
    "qi_data3 = qi_data['2018-7-1':'2018-7-31']\n",
    "qi_data4 = qi_data['2018-10-1':'2018-10-31']"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "telemetry_data = pd.read_csv('inputs/遥测站降雨数据.csv', parse_dates=['TimeStample'])\n",
    "telemetry_data.fillna(0, inplace=True)\n",
    "date_range1 = pd.date_range('2013-1-1 00:00:00', '2018-1-31 23:00:00', freq='1h').tolist()\n",
    "date_range2 = pd.date_range('2018-7-1 00:00:00', '2018-7-31 23:00:00', freq='1h').tolist()\n",
    "date_range3 = pd.date_range('2018-10-1 00:00:00', '2018-10-31 23:00:00', freq='1h').tolist()\n",
    "telemetry_data['TimeStample'] = date_range1+date_range2+date_range3\n",
    "\n",
    "telemetry_data = telemetry_data.set_index('TimeStample').resample('3h').sum()\n",
    "telemetry_data['mean_qi'] = telemetry_data.iloc[:,:].mean(axis=1)\n",
    "telemetry_data['sum_qi'] = telemetry_data.iloc[:,:].sum(axis=1)\n",
    "telemetry_data.index = telemetry_data.index.map(lambda x: x + pd.Timedelta('2h'))\n",
    "cols = [col for col in telemetry_data.columns if col != 'TimeStample']\n",
    "telemetry_data[cols] = scale(telemetry_data[cols])\n",
    "\n",
    "telemetry_data1 = telemetry_data['2013-1-1':'2018-1-31']\n",
    "telemetry_data2 = telemetry_data['2018-7-1':'2018-7-31']\n",
    "telemetry_data3 = telemetry_data['2018-10-1':'2018-10-31']"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "rain_data = pd.read_excel('inputs/降雨预报数据.xlsx', parse_dates=['TimeStample'])\n",
    "cols = [col for col in rain_data.columns if col != 'TimeStample']\n",
    "rain_data[cols] = scale(rain_data[cols])\n",
    "rain_data['Date'] = rain_data['TimeStample'].map(lambda x: x.strftime('%Y-%m-%d'))\n",
    "rain_data = rain_data.drop('TimeStample', axis=1)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "def get_data(qi_data, telemetry_data, rain_data, n_x, n_y, is_train):\n",
    "    qi_data_new = qi_data[['Qi']].copy()\n",
    "    qi_cols = ['Qi']\n",
    "    for i in range(1, n_x):\n",
    "        col = 'Qi'+str(i)\n",
    "        qi_cols.append(col)\n",
    "        qi_data_new[col] = qi_data_new['Qi'].shift(i)\n",
    "    qi_data_new.dropna(inplace=True)\n",
    "\n",
    "    if is_train:\n",
    "        y_cols = []\n",
    "        for i in range(1, n_y+1):\n",
    "            col = 'y'+str(i)\n",
    "            y_cols.append(col)\n",
    "            qi_data_new[col] = qi_data_new['Qi'].shift(-i)\n",
    "        qi_data_new.dropna(inplace=True)\n",
    "    qi_data_new.index = qi_data_new.index.map(lambda x:x+pd.Timedelta('3h'))\n",
    "    \n",
    "    telemetry_cols = ['mean_qi']\n",
    "    telemetry_data_new = telemetry_data[telemetry_cols].copy()\n",
    "    for col in ['mean_qi']:\n",
    "        for i in range(1, 8*10): # 历史10天\n",
    "            col_ = col+'-'+str(i)\n",
    "            telemetry_cols.append(col_)\n",
    "            telemetry_data_new[col_] = telemetry_data[col].shift(i) \n",
    "    telemetry_data_new.dropna(inplace=True) \n",
    "    telemetry_data_new.index = telemetry_data_new.index.map(lambda x:x+pd.Timedelta('3h'))\n",
    "    \n",
    "    qi_data_new['Date'] = qi_data_new.index.map(lambda x: x.strftime('%Y-%m-%d'))\n",
    "    \n",
    "    data = qi_data_new.join(telemetry_data_new)\n",
    "    data.dropna(inplace=True)\n",
    "    \n",
    "    cols_use = qi_cols+telemetry_cols\n",
    "    \n",
    "    X = data[cols_use].copy()\n",
    "    \n",
    "    if is_train:\n",
    "        Y = data[y_cols].copy()\n",
    "        return X, Y\n",
    "    else:\n",
    "        return X.iloc[-1:]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "n_x = 8*10 #历史10天\n",
    "n_y = 8*7 # 预测7天"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "X1, Y1 = get_data(qi_data1, telemetry_data1, rain_data, n_x, n_y, True)\n",
    "X2, Y2 = get_data(qi_data2, telemetry_data1, rain_data, n_x, n_y, True)\n",
    "X3, Y3 = get_data(qi_data3, telemetry_data2, rain_data, n_x, n_y, True)\n",
    "X4, Y4 = get_data(qi_data4, telemetry_data3, rain_data, n_x, n_y, True)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "X_trn = X1.append(X2)\n",
    "Y_trn = Y1.append(Y2)\n",
    "\n",
    "X_val = X3\n",
    "Y_val = Y3\n",
    "\n",
    "X_test = X4\n",
    "Y_test = Y4"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "X_submit1 =get_data(qi_data2, telemetry_data1, rain_data, n_x, n_y, False)\n",
    "X_submit2 =get_data(qi_data3, telemetry_data2, rain_data, n_x, n_y, False)\n",
    "X_submit3 =get_data(qi_data4, telemetry_data3, rain_data, n_x, n_y, False)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "def nas_metric(y_true, y_pred):\n",
    "    batch_size = y_true.size()[0]\n",
    "    length = y_true.size()[1]*y_true.size()[2]\n",
    "    y_true = y_true.reshape(batch_size, length)\n",
    "    y_pred = y_pred.reshape(batch_size, length)\n",
    "    numerator = (y_true - y_pred) ** 2\n",
    "    denominator = (y_true - y_true.mean(axis=1).reshape(-1,1)) ** 2\n",
    "    r1 = numerator[:,:2*8].sum(axis=1)/denominator[:,:2*8].sum(axis=1)\n",
    "    r2 = numerator[:,2*8:].sum(axis=1)/denominator[:,2*8:].sum(axis=1)\n",
    "    return 1 - torch.mean(0.65*r1+0.35*r2)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "def nas_loss(y_true, y_pred, w):\n",
    "    batch_size = y_true.size()[0]\n",
    "    length = y_true.size()[1]*y_true.size()[2]\n",
    "    y_true = y_true.reshape(batch_size, length)\n",
    "    y_pred = y_pred.reshape(batch_size, length)\n",
    "    r = torch.matmul((y_true-y_pred)**2, w)/torch.sum((y_true-torch.mean(y_true, axis=1).reshape(-1,1))**2, axis=1)\n",
    "    return torch.mean(r)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "def mse_loss(y_true, y_pred, w):\n",
    "    batch_size = y_true.size()[0]\n",
    "    length = y_true.size()[1]*y_true.size()[2]\n",
    "    y_true = y_true.reshape(batch_size, length)\n",
    "    y_pred = y_pred.reshape(batch_size, length)\n",
    "    r = torch.matmul((y_true-y_pred)**2, w)\n",
    "    return torch.mean(r)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "def mape_loss(y_true, y_pred, w):\n",
    "    batch_size = y_true.size()[0]\n",
    "    length = y_true.size()[1]*y_true.size()[2]\n",
    "    y_true = y_true.reshape(batch_size, length)\n",
    "    y_pred = y_pred.reshape(batch_size, length)\n",
    "    r = torch.matmul((y_true-y_pred)/y_true, w)\n",
    "    return torch.mean(r)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "def generate_data(x_new=None, y=None):\n",
    "    enc_in = x_new.values[:n_x].reshape(-1,8)\n",
    "    telemetry_in = x_new.values[n_x:]\n",
    "\n",
    "    data_range = pd.date_range(start=x_new.name.strftime('%Y-%m-%d'), periods=7, freq='1d')\n",
    "    X_new = pd.DataFrame(index=data_range)\n",
    "    X_new['Date'] = X_new.index.map(lambda x: x.strftime('%Y-%m-%d'))\n",
    "    # 顺序不要变,跟建模有关\n",
    "    X_new['month'] = X_new.index.map(lambda x:x.month)\n",
    "    X_new['day'] = X_new.index.map(lambda x:x.day)\n",
    "    X_new['weekday'] = X_new.index.map(lambda x: x.weekday())\n",
    "    X_new['range'] = [i for  i in range(len(X_new))]\n",
    "    emb_in = X_new[['month', 'day','weekday', 'range']].values.reshape(-1, 4)\n",
    "\n",
    "    X_new = pd.merge(X_new, rain_data, how='left', left_on='Date', right_on='Date')\n",
    "    X_new.fillna(0, inplace=True)\n",
    "    rain_in = X_new[['D1','D2','D3','D4','D5']].values.reshape(-1, 5)   \n",
    "        \n",
    "    return (enc_in,telemetry_in,rain_in,emb_in), y  "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "class MyDataset(Dataset):\n",
    "    def __init__(self, x=None, y=None):\n",
    "        self.x = x.iloc[::8] # 按天滑动，每天8个点\n",
    "        self.y = y.iloc[::8] # 按天滑动，每天8个点\n",
    "    def __getitem__(self, index):\n",
    "        x_new = self.x.iloc[index, :]\n",
    "        y = self.y.iloc[index, :].values.reshape(-1, 8)\n",
    "        (enc_in,telemetry_in,rain_in,emb_in ), y = generate_data(x_new, y)\n",
    "        return (enc_in,telemetry_in,rain_in,emb_in) ,y\n",
    "    \n",
    "    def __len__(self):\n",
    "        return len(self.x)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "class Encoder(nn.Module):\n",
    "    def __init__(self, input_dim, hid_dim, n_layers, dropout):\n",
    "        super().__init__()\n",
    "        \n",
    "        self.rnn = nn.LSTM(input_dim, hid_dim, n_layers, dropout = dropout)\n",
    "        \n",
    "        self.dropout = nn.Dropout(dropout)\n",
    "        \n",
    "    def forward(self, src):\n",
    "        \n",
    "        #src = [src sent len, batch size, input_dim]\n",
    "        \n",
    "        outputs, (hidden, cell) = self.rnn(src)\n",
    "        \n",
    "        return hidden, cell"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "class Decoder(nn.Module):\n",
    "    def __init__(self, input_dim, output_dim, hid_dim, n_layers, dropout):\n",
    "        super().__init__()\n",
    "        \n",
    "        self.hid_dim = hid_dim\n",
    "        self.output_dim = output_dim\n",
    "        self.n_layers = n_layers\n",
    "        self.dropout = dropout\n",
    "        \n",
    "        self.rnn = nn.LSTM(input_dim, hid_dim, n_layers, dropout = dropout)\n",
    "        \n",
    "        self.out = nn.Linear(hid_dim, output_dim)\n",
    "        \n",
    "        self.dropout = nn.Dropout(dropout)\n",
    "        \n",
    "    def forward(self, inp, hidden, cell):\n",
    "        \n",
    "        #input = [batch size]\n",
    "        #hidden = [n layers * n directions, batch size, hid dim]\n",
    "        #cell = [n layers * n directions, batch size, hid dim]\n",
    "        inp = inp.unsqueeze(0)\n",
    "                \n",
    "        output, (hidden, cell) = self.rnn(inp, (hidden, cell))\n",
    "        \n",
    "        #output = [sent len, batch size, hid dim * n directions]\n",
    "        #hidden = [n layers * n directions, batch size, hid dim]\n",
    "        #cell = [n layers * n directions, batch size, hid dim]\n",
    "        \n",
    "        prediction = self.out(output.squeeze(0))\n",
    "        \n",
    "        #prediction = [batch size, output dim]\n",
    "        \n",
    "        return prediction, hidden, cell"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "class Seq2Seq(nn.Module):\n",
    "    \"\"\"\n",
    "    encoder输入为历史入库流量数据，decoder输入包括观测站降雨量子模型、日期相关特征、天气预报\n",
    "    \"\"\"\n",
    "    def __init__(self, n_telemetry, out_len, encoder, decoder, device):\n",
    "        super().__init__()\n",
    "        self.encoder = encoder\n",
    "        self.decoder = decoder\n",
    "        self.out_len = out_len\n",
    "        self.device = device\n",
    "        #观测站降雨量子模型，可以优化，这里只用了平均降雨量\n",
    "        self.linears = nn.ModuleList([nn.Linear(n_telemetry, 8) for i in range(out_len)])\n",
    "\n",
    "        # 月、日、周、位置(预测的第几天) size=3+4+3+3\n",
    "        self.embs = nn.ModuleList([nn.Embedding(i, emb_dim) for i,emb_dim in zip([13, 32, 8, 8],[3,4,3,3])])\n",
    "    \n",
    "        self.train_losses = []\n",
    "        self.val_losses = []\n",
    "        \n",
    "        self.loss_weights = torch.from_numpy(np.array([0.65]*2*8+[0.35]*5*8)).to(self.device)\n",
    "        \n",
    "    def forward(self, enc_in, telemetry_in, rain_in, emb_in):\n",
    "        # enc_in [seq_len, batch, dim]\n",
    "        # telemetry_in [n, batch]\n",
    "        # emb_in [seq_len, batch ,5]\n",
    "        \n",
    "        enc_in = enc_in.transpose(0,1).float() # encoder，输入\n",
    "        telemetry_in = telemetry_in.transpose(0,1).float() # 观测站降雨量，输入\n",
    "        rain_in = rain_in.transpose(0,1).float() # 天气预报，输入\n",
    "        emb_in = emb_in.transpose(0,1) # 日期相关，输入\n",
    "        \n",
    "        batch_size = enc_in.shape[1]\n",
    "        max_len = self.out_len\n",
    "        \n",
    "        outputs = torch.zeros(max_len, batch_size, self.decoder.output_dim).to(self.device)\n",
    "        hidden, cell = self.encoder(enc_in)\n",
    "        \n",
    "        for t in range(max_len): #输出长度（几天）\n",
    "            # [n, batch]\n",
    "            t_in = self.linears[t](telemetry_in.transpose(1,0))\n",
    "            embs = []\n",
    "            for i in range(4): # 月、日、周、位置(预测的第几天)\n",
    "                # emb [emb_dim, batch]\n",
    "                emb = self.embs[i](emb_in[t][:,i])\n",
    "                embs.append(emb)\n",
    "            r_in = rain_in[t]   \n",
    "            dec_in = torch.cat([t_in, r_in]+embs, dim=1)\n",
    "            output, hidden, cell = self.decoder(dec_in, hidden, cell)\n",
    "            outputs[t] = output\n",
    "        return outputs.transpose(0,1)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "device = 'cpu:0'\n",
    "enc = Encoder(input_dim=8, hid_dim=32, n_layers=2, dropout=0.5)\n",
    "dec = Decoder(input_dim=26, output_dim=8, hid_dim=32, n_layers=2, dropout=0.5)\n",
    "model = Seq2Seq(n_telemetry=10*8, out_len=7, encoder=enc, decoder=dec, device=device).to(device)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "trn_set = ConcatDataset([MyDataset(X_trn, Y_trn),MyDataset(X_val, Y_val)])\n",
    "val_set = MyDataset(X_test, Y_test)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "batch_size = 8"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "trn_dl = DataLoader(trn_set, batch_size=batch_size)\n",
    "val_dl = DataLoader(val_set, batch_size=batch_size)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "opt = optim.Adam(model.parameters())"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "dls = DataBunch(trn_dl, val_dl, device=device)\n",
    "loss_func = lambda y,y_:mse_loss(y,y_,model.loss_weights)\n",
    "learn = Learner(dls, model, loss_func=loss_func)\n",
    "learn.opt = opt"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "learn.fit_one_cycle(5, 1e-3)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "model.eval()\n",
    "submit_pred = []\n",
    "for x_submit in [X_submit1, X_submit2, X_submit3]:\n",
    "    (enc_in,telemetry_in,rain_in,emb_in), y = generate_data(x_submit.iloc[-1], None)\n",
    "    \n",
    "    enc_in = np.expand_dims(enc_in,0)\n",
    "    telemetry_in = np.expand_dims(telemetry_in,0)\n",
    "    rain_in = np.expand_dims(rain_in,0)\n",
    "    emb_in = np.expand_dims(emb_in,0)\n",
    "    print(enc_in.shape, telemetry_in.shape, rain_in.shape, emb_in.shape)\n",
    "    outputs = learn.model(torch.Tensor(enc_in).to(device), \n",
    "                           torch.Tensor(telemetry_in).to(device), \n",
    "                           torch.Tensor(rain_in).to(device), \n",
    "                           torch.LongTensor(emb_in).to(device)).detach().cpu().numpy()\n",
    "    pred = qi_scaler.inverse_transform(outputs.flatten())\n",
    "    submit_pred.append(pred)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "plt.plot(submit_pred[0])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "plt.plot(submit_pred[1])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "plt.plot(submit_pred[2])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.7.7"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 4
}
