{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [],
   "source": [
    "from datetime import date, timedelta\n",
    "\n",
    "import pandas as pd\n",
    "import numpy as np\n",
    "from tqdm.notebook import tqdm\n",
    "\n",
    "import joblib\n",
    "from joblib import Parallel, delayed\n",
    "\n",
    "import datetime\n",
    "\n",
    "from tsfresh.feature_extraction import *\n",
    "from tsfresh.feature_extraction.settings import *"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [],
   "source": [
    "def proc_data(df):\n",
    "    return df.set_index('TimeStample')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [],
   "source": [
    "def fill_data(df):\n",
    "    d = pd.DataFrame(index=pd.date_range('2013-1-1', '2020-1-1'))\n",
    "    d.index.name = 'TimeStample'\n",
    "    df = d.join(df.set_index('TimeStample')).reset_index()\n",
    "    df.fillna(0, inplace=True)\n",
    "    return df"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [],
   "source": [
    "qi_data1 = pd.read_excel('inputs/入库流量数据.xlsx', parse_dates=['TimeStample'],)\n",
    "qi_data2 = pd.read_excel('inputs/入库流量数据1.xlsx', parse_dates=['TimeStample'],)\n",
    "qi_data = qi_data1.append(qi_data2)\n",
    "qi_data = proc_data(qi_data)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [],
   "source": [
    "telemetry_data1 = pd.read_csv('inputs/遥测站降雨数据.csv', parse_dates=['TimeStample'])\n",
    "telemetry_data2 = pd.read_excel('inputs/遥测站降雨数据1.xlsx', parse_dates=['TimeStample'])\n",
    "telemetry_data = telemetry_data1.append(telemetry_data2)\n",
    "telemetry_data = proc_data(telemetry_data)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {},
   "outputs": [],
   "source": [
    "rain_data1 = pd.read_excel('inputs/降雨预报数据.xlsx', parse_dates=['TimeStample'])\n",
    "rain_data2 = pd.read_excel('inputs/降雨预报数据1.xlsx', parse_dates=['TimeStample'])\n",
    "rain_data = rain_data1.append(rain_data2)\n",
    "rain_data = rain_data.drop(['5天合计'], axis=1)\n",
    "rain_data = fill_data(rain_data)\n",
    "rain_data = proc_data(rain_data)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [],
   "source": [
    "env_data1 = pd.read_excel('inputs/环境表.xlsx', parse_dates=['TimeStample'])[['TimeStample','T','w']]\n",
    "env_data2 = pd.read_csv('inputs/环境表1.csv', parse_dates=['TimeStample'])[['TimeStample','T','w']]\n",
    "env_data = env_data1.append(env_data2.dropna(),ignore_index=True)\n",
    "env_data = fill_data(env_data)\n",
    "env_data = proc_data(env_data)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {},
   "outputs": [],
   "source": [
    "def get_timespan(df, dt, minus, periods, freq='3h'):\n",
    "    # dt:某天 如 pd.to_datetime('2017-1-1')\n",
    "    # minus：历史minus天 如3\n",
    "    # periods: 如 3*8=24\n",
    "    # 返回：2016-12-29 02:00:00 到 2016-12-31 23:00:00共24个点的数据\n",
    "    return df.loc[pd.date_range(dt - timedelta(days=minus) , periods=periods, freq=freq)]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {},
   "outputs": [],
   "source": [
    "# dt = pd.to_datetime('2017-1-1')\n",
    "# 2016-12-29 02:00:00 到 2016-12-31 23:00:00共24个点的数据\n",
    "# get_timespan(qi_data, dt+pd.Timedelta('2h'), 3, 3*8, '3h')\n",
    "\n",
    "# 2016-12-29 到 2016-12-31 共3天的环境数据\n",
    "# get_timespan(env_data, dt, 3, 3, '1D')\n",
    "\n",
    "# 2016-12-29 到 2016-12-31 共3天的观测站数据\n",
    "# get_timespan(telemetry_data, dt+pd.Timedelta('1h'), 3, 3*24, '1h')\n",
    "\n",
    "# 2017-01-01 到 2017-01-02 共3天的降雨预报数据\n",
    "# get_timespan(rain_data, dt, 0, 3, '1D')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {},
   "outputs": [],
   "source": [
    "dr = pd.date_range('2013-1-1','2020-1-1',freq='1d')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "application/vnd.jupyter.widget-view+json": {
       "model_id": "2fd0fcdef1d94de1b40d31c4dfb4baa5",
       "version_major": 2,
       "version_minor": 0
      },
      "text/plain": [
       "HBox(children=(FloatProgress(value=0.0, max=2557.0), HTML(value='')))"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n"
     ]
    }
   ],
   "source": [
    "ts_l = []\n",
    "for dt in tqdm(dr):\n",
    "    try:\n",
    "        d = get_timespan(qi_data, dt+pd.Timedelta('2h'), 7, 7*8, '3h')\n",
    "        ts = pd.DataFrame(d.values,columns=['qi'])\n",
    "        ts['date'] = dt\n",
    "        ts_l.append(ts)\n",
    "    except:\n",
    "        continue\n",
    "\n",
    "qi_ts = pd.concat(ts_l)\n",
    "qi_ts['qi'] = qi_ts['qi'].astype(float)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "Feature Extraction: 100%|███████████| 153/153 [00:07<00:00, 20.02it/s]\n"
     ]
    }
   ],
   "source": [
    "qi_ts_fea = extract_features(qi_ts, default_fc_parameters=EfficientFCParameters(), column_id='date')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {},
   "outputs": [],
   "source": [
    "def get_dataset(dt,is_train=True):\n",
    "    X = qi_ts_fea.loc[dt].to_dict()\n",
    "#     X = {}\n",
    "    tmp = get_timespan(qi_data, dt+pd.Timedelta('2h'), 10, 10*8, '3h')\n",
    "    for i in range(10*8):\n",
    "        X['qi_%s' % i] = tmp.values[i]\n",
    "    for i in range(8):\n",
    "        # 历史各小时\n",
    "        X['qi_%s_mean' % i] = tmp.values[i::8].mean()\n",
    "        X['qi_%s_median' % i] = np.median(tmp.values[i::8])\n",
    "        X['qi_%s_max' % i] = np.max(tmp.values[i::8])\n",
    "        X['qi_%s_min' % i] = np.min(tmp.values[i::8])\n",
    "    for i in range(1,11):\n",
    "        tmp = get_timespan(qi_data, dt+pd.Timedelta('2h'), i, i*8, '3h')\n",
    "        X['qi_diff_%s_mean' % i] = tmp.diff(i).mean()\n",
    "        X['qi_mean_%s_decay' % i] = (tmp.values * np.power(0.9, np.arange(i*8))).mean()\n",
    "        X['qi_mean_%s' % i] = tmp.mean()\n",
    "        X['qi_median_%s' % i] = tmp.median()\n",
    "        X['qi_min_%s' % i] = tmp.min()\n",
    "        X['qi_max_%s' % i] = tmp.max()\n",
    "        X['qi_std_%s' % i] = tmp.std()\n",
    "        \n",
    "    tmp = get_timespan(telemetry_data, dt, 10, 10*24, '1h')\n",
    "    for i in range(10):\n",
    "        X['telemetry_sum_%sd' % i] = tmp.sum(axis=1)[i*24:(i+1)*24].sum()\n",
    "    for i in range(1, 11):\n",
    "        tmp = get_timespan(telemetry_data, dt, i, i*24, '1h')\n",
    "        X['telemetry_sum_mean_%s' % i] = tmp.sum(axis=0).mean()\n",
    "        X['telemetry_sum_max_%s' % i] = tmp.sum(axis=0).max()\n",
    "        X['telemetry_sum_min_%s' % i] = tmp.sum(axis=0).min()\n",
    "        X['telemetry_sum_std_%s' % i] = tmp.sum(axis=0).std()\n",
    "        decay_tmp = (tmp.values.T * (np.power(0.9, np.arange(i*24)))).T\n",
    "        X['telemetry_mean_%s_decay' % i] = decay_tmp.sum(axis=0).mean()\n",
    "        X['telemetry_max_%s_decay' % i] = decay_tmp.sum(axis=0).max()\n",
    "        X['telemetry_min_%s_decay' % i] = decay_tmp.sum(axis=0).min()\n",
    "        X['telemetry_std_%s_decay' % i] = decay_tmp.sum(axis=0).std()\n",
    "        X['telemetry_mean_%s' % i] = tmp.mean(axis=0).mean()\n",
    "        X['telemetry_median_%s' % i] = tmp.median(axis=0).mean()\n",
    "        X['telemetry_min_%s' % i] = tmp.min(axis=0).mean()\n",
    "        X['telemetry_max_%s' % i] = tmp.max(axis=0).mean()\n",
    "        X['telemetry_std_%s' % i] = tmp.std(axis=0).mean()\n",
    "\n",
    "    for i in range(1,7):\n",
    "        tmp = get_timespan(env_data, dt, i, i, '1D')\n",
    "        X['env_t_median_%s' % i] = tmp['T'].median()\n",
    "        X['env_t_min_%s' % i] = tmp['T'].min()\n",
    "        X['env_t_max_%s' % i] = tmp['T'].max()\n",
    "        if i > 1:\n",
    "            X['env_t_std_%s' % i] = tmp['T'].std()\n",
    "\n",
    "        X['env_w_median_%s' % i] = tmp['w'].median()\n",
    "        X['env_w_min_%s' % i] = tmp['w'].min()\n",
    "        X['env_w_max_%s' % i] = tmp['w'].max()\n",
    "        if i > 1:\n",
    "            X['env_w_std_%s' % i] = tmp['w'].std()\n",
    "\n",
    "    for i in range(7):\n",
    "        # 未来7天降雨预报\n",
    "        tmp = get_timespan(rain_data, dt, 0, i+1, '1D')\n",
    "        X['rain_median_%s' % i] = tmp['D1'].median()\n",
    "        X['rain_mean_%s' % i] = tmp['D1'].mean()\n",
    "        X['rain_min_%s' % i] = tmp['D1'].min()\n",
    "        X['rain_max_%s' % i] = tmp['D1'].max()\n",
    "        if i > 0:\n",
    "            X['rain_std_%s' % i] = tmp['D1'].std()\n",
    "\n",
    "        tmp = get_timespan(rain_data, dt, -i, 1, '1D')\n",
    "        X['rain_%s' % i] = tmp['D1'].astype(float).values\n",
    "    X['date'] = dt\n",
    "    X = pd.DataFrame(X,index=[dt])  \n",
    "    if is_train:\n",
    "        y = qi_data.loc[pd.date_range(dt+pd.Timedelta('2h'), periods=7*8, freq='3h')].T\n",
    "        return X, y\n",
    "    else:\n",
    "        return X"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "application/vnd.jupyter.widget-view+json": {
       "model_id": "b7a2d09a851b47a88456e8e05ebfda27",
       "version_major": 2,
       "version_minor": 0
      },
      "text/plain": [
       "HBox(children=(FloatProgress(value=0.0, max=1628.0), HTML(value='')))"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n"
     ]
    }
   ],
   "source": [
    "dr = pd.date_range('2015-3-1','2017-12-12',freq='1d').tolist() + pd.date_range('2013-4-1','2014-9-1',freq='1d').tolist()\\\n",
    "+pd.date_range('2018-1-11','2018-1-23',freq='1d').tolist()+pd.date_range('2018-7-11','2018-7-23',freq='1d').tolist()\\\n",
    "+pd.date_range('2018-10-11','2018-10-23',freq='1d').tolist()+pd.date_range('2019-1-11','2019-1-23',freq='1d').tolist()+pd.date_range('2019-3-11','2019-3-23',freq='1d').tolist()\\\n",
    "+pd.date_range('2019-5-11','2019-4-23',freq='1d').tolist()+pd.date_range('2019-7-11','2019-7-23',freq='1d').tolist()+pd.date_range('2019-10-11','2019-10-23',freq='1d').tolist()\n",
    "res = Parallel(n_jobs=32)(delayed(get_dataset)(dt) for dt in tqdm(dr))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "metadata": {},
   "outputs": [],
   "source": [
    "X_train = pd.concat([i[0] for i in res], axis=0)\n",
    "y_train = np.concatenate([i[1] for i in res], axis=0)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "metadata": {},
   "outputs": [],
   "source": [
    "index = [(d,i) for i,d in enumerate(X_train.date)]\n",
    "sorted_index = sorted(index, key=lambda x:x[0])\n",
    "sorted_index = [x[1] for x in sorted_index]\n",
    "X_train_all = X_train.iloc[sorted_index]\n",
    "y_train_all = y_train[sorted_index]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.7.7"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 4
}
