{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [],
   "source": [
    "import pandas as pd\n",
    "import numpy as np\n",
    "import math\n",
    "from tqdm import tqdm\n",
    "from sklearn.metrics import mean_squared_error,explained_variance_score\n",
    "from sklearn.model_selection import KFold\n",
    "import lightgbm as lgb\n",
    "test_data_path = '../data/A_testData0531.csv'\n",
    "train_gps_path = '../data/train0523.csv'\n",
    "port_path = '../data/port.csv'\n",
    "result_path = '../result/result_local.csv'\n",
    "\n",
    "# import moxing as mox\n",
    "# OBS_DATA_PATH = \"s3://ship-eta/data/train0523.csv\"\n",
    "# OBS_TEST_PATH = \"s3://ship-eta/data/A_testData0531.csv\"\n",
    "# OBS_RES_PATH =  \"s3://ship-eta/result/result_local.csv\"\n",
    "# mox.file.copy_parallel(OBS_DATA_PATH, train_gps_path)\n",
    "# mox.file.copy_parallel(OBS_TEST_PATH, test_data_path)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "array(['CNYTN-MXZLO', 'CNSHK-MYTPP', 'CNSHK-SGSIN', 'CNSHK-CLVAP',\n",
       "       'CNYTN-ARENA', 'CNYTN-MATNG', 'CNSHK-GRPIR', 'CNSHK-PKQCT',\n",
       "       'COBUN-HKHKG', 'CNYTN-PAONX', 'CNSHK-SIKOP', 'CNYTN-CAVAN',\n",
       "       'CNSHK-ESALG', 'CNYTN-MTMLA', 'CNSHK-ZADUR', 'CNSHK-LBBEY',\n",
       "       'CNSHA-SGSIN', 'CNYTN-RTM', 'CNHKG-MXZLO', 'HKHKG-FRFOS',\n",
       "       'CNYTN-NZAKL', 'CNSHA-PAMIT'], dtype=object)"
      ]
     },
     "execution_count": 2,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "def format_data_type(data, mode='train'):\n",
    "    if mode=='test':\n",
    "        data['onboardDate'] = pd.to_datetime(data['onboardDate'], infer_datetime_format=True)\n",
    "        data['temp_timestamp'] = data['timestamp']\n",
    "        data['ETA'] = None\n",
    "    data['creatDate'] = None\n",
    "    data['loadingOrder'] = data['loadingOrder'].astype(str)\n",
    "    data['timestamp'] = pd.to_datetime(data['timestamp'], infer_datetime_format=True)\n",
    "    data['longitude'] = data['longitude'].astype(float)\n",
    "    data['latitude'] = data['latitude'].astype(float)\n",
    "    data['speed'] = data['speed'].astype(float)\n",
    "    data['TRANSPORT_TRACE'] = data['TRANSPORT_TRACE'].astype(str)\n",
    "    return data\n",
    "\n",
    "def get_test_data_info(path):\n",
    "    data = pd.read_csv(path) \n",
    "    test_trace_set = data['TRANSPORT_TRACE'].unique()\n",
    "    test_order_belong_to_trace = {}\n",
    "    for item in test_trace_set:\n",
    "        orders = data[data['TRANSPORT_TRACE'] == item]['loadingOrder'].unique()\n",
    "        test_order_belong_to_trace[item] = orders\n",
    "    return format_data_type(data, mode='test'), test_trace_set, test_order_belong_to_trace\n",
    "\n",
    "test_data_origin, test_trace_set, test_order_belong_to_trace = get_test_data_info(test_data_path)\n",
    "test_trace_set"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [],
   "source": [
    "def get_port_info():\n",
    "    port_data = {}\n",
    "    test_port_set = set()\n",
    "    for route in test_trace_set:\n",
    "        ports = route.split('-')\n",
    "        test_port_set = set.union(test_port_set, set(ports))\n",
    "    port_data_origin = pd.read_csv(port_path)\n",
    "    for item in port_data_origin.itertuples():\n",
    "        if getattr(item, 'TRANS_NODE_NAME') in test_port_set:\n",
    "            port_data[getattr(item, 'TRANS_NODE_NAME')] = {'LONGITUDE': getattr(item, 'LONGITUDE'),'LATITUDE': getattr(item, 'LATITUDE') }\n",
    "    return port_data\n",
    "port_data = get_port_info()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "  0%|          | 0/22 [00:00<?, ?it/s]\n",
      "0it [00:03, ?it/s]\u001b[A\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "      loadingOrder  latitude_min  latitude_max  latitude_mean  \\\n",
      "0   BV874211308307     22.570732      22.57197      22.571558   \n",
      "1   CA939455313964     22.570732      22.57197      22.571558   \n",
      "2   BV874211308307     22.570732      22.57197      22.571558   \n",
      "3   CA939455313964     22.570732      22.57197      22.571558   \n",
      "4   BV874211308307     22.570732      22.57197      22.571558   \n",
      "..             ...           ...           ...            ...   \n",
      "59  CA939455313964     22.570732      22.57197      22.571558   \n",
      "60  BV874211308307     22.570732      22.57197      22.571558   \n",
      "61  CA939455313964     22.570732      22.57197      22.571558   \n",
      "62  BV874211308307     22.570732      22.57197      22.571558   \n",
      "63  CA939455313964     22.570732      22.57197      22.571558   \n",
      "\n",
      "    latitude_median  longitude_min  longitude_max  longitude_mean  \\\n",
      "0         22.571569     114.268783     114.269963      114.269682   \n",
      "1         22.571569     114.268783     114.269963      114.269682   \n",
      "2         22.571569     114.268783     114.269963      114.269682   \n",
      "3         22.571569     114.268783     114.269963      114.269682   \n",
      "4         22.571569     114.268783     114.269963      114.269682   \n",
      "..              ...            ...            ...             ...   \n",
      "59        22.571569     114.268783     114.269963      114.269682   \n",
      "60        22.571569     114.268783     114.269963      114.269682   \n",
      "61        22.571569     114.268783     114.269963      114.269682   \n",
      "62        22.571569     114.268783     114.269963      114.269682   \n",
      "63        22.571569     114.268783     114.269963      114.269682   \n",
      "\n",
      "    longitude_median  speed_min  speed_max  speed_mean  speed_median    label  \n",
      "0         114.269708        0.0        0.0         0.0           0.0  12699.0  \n",
      "1         114.269708        0.0        0.0         0.0           0.0  12699.0  \n",
      "2         114.269708        0.0        0.0         0.0           0.0  12699.0  \n",
      "3         114.269708        0.0        0.0         0.0           0.0  12699.0  \n",
      "4         114.269708        0.0        0.0         0.0           0.0  12699.0  \n",
      "..               ...        ...        ...         ...           ...      ...  \n",
      "59        114.269708        0.0        0.0         0.0           0.0  12699.0  \n",
      "60        114.269708        0.0        0.0         0.0           0.0  12699.0  \n",
      "61        114.269708        0.0        0.0         0.0           0.0  12699.0  \n",
      "62        114.269708        0.0        0.0         0.0           0.0  12699.0  \n",
      "63        114.269708        0.0        0.0         0.0           0.0  12699.0  \n",
      "\n",
      "[64 rows x 14 columns]\n",
      "Training until validation scores don't improve for 100 rounds\n",
      "[100]\tvalid_0's l2: 1.61265e+08\tvalid_0's mse_score: 1.61265e+08\n",
      "Early stopping, best iteration is:\n",
      "[1]\tvalid_0's l2: 1.61265e+08\tvalid_0's mse_score: 1.61265e+08\n",
      "Training until validation scores don't improve for 100 rounds\n",
      "[100]\tvalid_0's l2: 1.61265e+08\tvalid_0's mse_score: 1.61265e+08\n",
      "Early stopping, best iteration is:\n",
      "[1]\tvalid_0's l2: 1.61265e+08\tvalid_0's mse_score: 1.61265e+08\n",
      "Training until validation scores don't improve for 100 rounds\n",
      "[100]\tvalid_0's l2: 1.61265e+08\tvalid_0's mse_score: 1.61265e+08\n",
      "Early stopping, best iteration is:\n",
      "[1]\tvalid_0's l2: 1.61265e+08\tvalid_0's mse_score: 1.61265e+08\n",
      "Training until validation scores don't improve for 100 rounds\n",
      "[100]\tvalid_0's l2: 1.61265e+08\tvalid_0's mse_score: 1.61265e+08\n",
      "Early stopping, best iteration is:\n",
      "[1]\tvalid_0's l2: 1.61265e+08\tvalid_0's mse_score: 1.61265e+08\n",
      "Training until validation scores don't improve for 100 rounds\n",
      "[100]\tvalid_0's l2: 1.61265e+08\tvalid_0's mse_score: 1.61265e+08\n",
      "Early stopping, best iteration is:\n",
      "[1]\tvalid_0's l2: 1.61265e+08\tvalid_0's mse_score: 1.61265e+08\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "  0%|          | 0/22 [00:04<?, ?it/s]\n"
     ]
    }
   ],
   "source": [
    "def get_train_order_by_route(route):\n",
    "    ports = route.split(\"-\")\n",
    "    start_port = ports[0]\n",
    "    dest_port = ports[-1]\n",
    "    train_order_by_route = None\n",
    "    train_data_origin_chunk = pd.read_csv(train_gps_path, chunksize = 2000000, usecols = [0,2,3,4,6,12], header=None)\n",
    "    for chunk in tqdm(train_data_origin_chunk):\n",
    "        valid_order_name = chunk[chunk[12].apply(lambda x: str(x).startswith(start_port) and (dest_port in str(x)))][0].unique()\n",
    "        if (valid_order_name.size > 0):\n",
    "            valid_order_info = chunk[chunk[0].isin(valid_order_name)]\n",
    "            train_order_by_route = pd.concat([train_order_by_route,valid_order_info])\n",
    "            break\n",
    "    train_order_by_route.columns = ['loadingOrder','timestamp','longitude','latitude','speed', 'TRANSPORT_TRACE']\n",
    "    data = format_data_type(train_order_by_route, mode='train')\n",
    "    return data\n",
    "\n",
    "def get_train_data(route_order_info, route):\n",
    "    ports = route.split(\"-\")\n",
    "    dest_port = ports[-1]\n",
    "    dest_longitude = port_data[dest_port]['LONGITUDE']\n",
    "    dest_latitude = port_data[dest_port]['LATITUDE']\n",
    "    train_data = None\n",
    "    order_list = route_order_info['loadingOrder'].unique()\n",
    "    for order in order_list:\n",
    "        order_info_set = route_order_info[route_order_info['loadingOrder'] == order].sort_values(by='timestamp')\n",
    "#         获取起航时间\n",
    "        for info_item in order_info_set.itertuples():\n",
    "            if getattr(info_item, 'speed') > 0:\n",
    "                start_time = getattr(info_item, 'timestamp')\n",
    "                break\n",
    "#         获取到达目的地时间，这里需要改用 GPS 判断\n",
    "        end_time = order_info_set['timestamp'].max()\n",
    "        for info_item in order_info_set.itertuples():\n",
    "            if abs(getattr(info_item, 'longitude') - dest_longitude) < 1 and abs(getattr(info_item, 'latitude') - dest_latitude) < 1:\n",
    "                end_time = min(end_time, getattr(info_item, 'timestamp'))\n",
    "                break   \n",
    "#         人工截取前 40% 的数据   \n",
    "        cut_size = math.ceil(order_info_set.shape[0]*0.4)\n",
    "        order_info_set = order_info_set[0:cut_size]\n",
    "        \n",
    "        agg_function = ['min', 'max', 'mean', 'median']\n",
    "        agg_col = ['latitude', 'longitude', 'speed']\n",
    "        feature_temp = order_info_set.groupby('loadingOrder')[agg_col].agg(agg_function).reset_index()\n",
    "        feature_temp.columns = ['loadingOrder'] + ['{}_{}'.format(i, j) for i in agg_col for j in agg_function]\n",
    "        #         算出航行用时\n",
    "        feature_temp['label'] = (end_time - start_time).total_seconds()\n",
    "        train_data = pd.concat([train_data,feature_temp])\n",
    "    if (train_data.shape[0] < 10):\n",
    "        for i in range(5):\n",
    "            train_data = pd.concat([train_data,train_data])\n",
    "    return train_data.reset_index(drop=True)\n",
    "def get_test_data(order):\n",
    "    order_info_set = test_data_origin[test_data_origin['loadingOrder'] == order].sort_values(by='timestamp')\n",
    "    agg_function = ['min', 'max', 'mean', 'median']\n",
    "    agg_col = ['latitude', 'longitude', 'speed']\n",
    "    feature = order_info_set.groupby('loadingOrder')[agg_col].agg(agg_function).reset_index()\n",
    "    feature.columns = ['loadingOrder'] + ['{}_{}'.format(i, j) for i in agg_col for j in agg_function]\n",
    "    return feature.reset_index(drop=True)\n",
    "def mse_score_eval(preds, valid):\n",
    "    labels = valid.get_label()\n",
    "    scores = mean_squared_error(y_true=labels, y_pred=preds)\n",
    "    return 'mse_score', scores, True\n",
    "def train_model(x, y, seed=981125, is_shuffle=True):\n",
    "    train_pred = np.zeros((x.shape[0], ))\n",
    "    n_splits = min(5, x.shape[0])\n",
    "    # Kfold\n",
    "    fold = KFold(n_splits=n_splits, shuffle=is_shuffle, random_state=seed)\n",
    "    kf_way = fold.split(x)\n",
    "    # params\n",
    "    params = {\n",
    "        'learning_rate': 0.01,\n",
    "        'boosting_type': 'gbdt',\n",
    "        'objective': 'regression',\n",
    "        'num_leaves': 36,\n",
    "        'feature_fraction': 0.6,\n",
    "        'bagging_fraction': 0.7,\n",
    "        'bagging_freq': 6,\n",
    "        'seed': 8,\n",
    "        'bagging_seed': 1,\n",
    "        'feature_fraction_seed': 7,\n",
    "        'min_data_in_leaf': 25,\n",
    "        'nthread': 8,\n",
    "        'verbose': 1,\n",
    "    }\n",
    "    # train\n",
    "    for n_fold, (train_idx, valid_idx) in enumerate(kf_way, start=1):\n",
    "        train_x, train_y = x.iloc[train_idx], y.iloc[train_idx]\n",
    "        valid_x, valid_y = x.iloc[valid_idx], y.iloc[valid_idx]\n",
    "        # 数据加载\n",
    "        n_train = lgb.Dataset(train_x, label=train_y)\n",
    "        n_valid = lgb.Dataset(valid_x, label=valid_y)\n",
    "        clf = lgb.train(\n",
    "            params=params,\n",
    "            train_set=n_train,\n",
    "            num_boost_round=3000,\n",
    "            valid_sets=[n_valid],\n",
    "            early_stopping_rounds=100,\n",
    "            verbose_eval=100,\n",
    "            feval=mse_score_eval\n",
    "        )\n",
    "        train_pred[valid_idx] = clf.predict(valid_x, num_iteration=clf.best_iteration)\n",
    "    return clf\n",
    "\n",
    "\n",
    "for route in tqdm(test_order_belong_to_trace):\n",
    "    route_order_info = get_train_order_by_route(route)\n",
    "    train_data = get_train_data(route_order_info, route)\n",
    "    print (train_data)\n",
    "    \n",
    "    features = [c for c in train_data.columns if c not in ['loadingOrder', 'label']]\n",
    "    model_by_route = train_model(train_data[features], train_data['label'])\n",
    "    \n",
    "    for order in test_order_belong_to_trace[route]:\n",
    "        test_order_data = get_test_data(order)\n",
    "#         print(test_order_data)\n",
    "        res = model_by_route.predict(test_order_data[features], num_iteration=model_by_route.best_iteration)\n",
    "        test_data_origin.loc[test_data_origin['loadingOrder'] == order, 'ETA'] = (test_data_origin[test_data_origin['loadingOrder'] == order]['onboardDate'] + pd.Timedelta(seconds=res[0])).apply(lambda x:x.strftime('%Y/%m/%d  %H:%M:%S'))\n",
    "#         print(test_data_origin[test_data_origin['loadingOrder'] == order])\n",
    "        break\n",
    "    break\n",
    "\n",
    "test_data_origin['creatDate'] = pd.datetime.now().strftime('%Y/%m/%d  %H:%M:%S')\n",
    "test_data_origin['timestamp'] = test_data_origin['temp_timestamp']\n",
    "\n",
    "result = test_data_origin[['loadingOrder', 'timestamp', 'longitude', 'latitude', 'carrierName', 'vesselMMSI', 'onboardDate', 'ETA', 'creatDate']]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "result.to_csv(result_path)\n",
    "# mox.file.copy_parallel(result_path, OBS_RES_PATH)"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3.7.6 64-bit ('AI': conda)",
   "language": "python",
   "name": "python37664bitaiconda6859e03b37c34f0182c9bde8073269f7"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.7.6"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 4
}
