{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/home/ubuntu/miniconda3/envs/py35/lib/python3.5/site-packages/sklearn/cross_validation.py:44: DeprecationWarning: This module was deprecated in version 0.18 in favor of the model_selection module into which all the refactored classes and functions are moved. Also note that the interface of the new CV iterators are different from that of this module. This module will be removed in 0.20.\n",
      "  \"This module will be removed in 0.20.\", DeprecationWarning)\n"
     ]
    }
   ],
   "source": [
    "from IPython.core.interactiveshell import InteractiveShell\n",
    "InteractiveShell.ast_node_interactivity = \"all\"\n",
    "\n",
    "import numpy as np\n",
    "import pandas as pd\n",
    "# import cPickle\n",
    "# import matplotlib.pyplot as plt\n",
    "# from matplotlib import pyplot\n",
    "import gc\n",
    "# %matplotlib inline\n",
    "\n",
    "import lightgbm as lgb\n",
    "\n",
    "from pprint import pprint\n",
    "import xgboost as xgb\n",
    "from xgboost import XGBClassifier\n",
    "from xgboost import plot_importance\n",
    "from sklearn.model_selection import GridSearchCV, cross_val_score\n",
    "from sklearn.feature_selection import SelectFromModel\n",
    "from sklearn import cross_validation, metrics\n",
    "from sklearn.metrics import f1_score\n",
    "from sklearn.model_selection import GroupKFold\n",
    "from sklearn.ensemble import ExtraTreesClassifier\n",
    "from collections import Counter\n",
    "\n",
    "np.random.seed(0)\n",
    "\n",
    "THRESHOLD = 0.38"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "# load data\n",
    "order_train = pd.read_pickle('data/order_train.pkl')\n",
    "order_test = pd.read_pickle('data/order_test.pkl')\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style>\n",
       "    .dataframe thead tr:only-child th {\n",
       "        text-align: right;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: left;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>user_product_reordered_ratio</th>\n",
       "      <th>reordered_sum</th>\n",
       "      <th>add_to_cart_order_inverted_mean</th>\n",
       "      <th>add_to_cart_order_relative_mean</th>\n",
       "      <th>reorder_prob</th>\n",
       "      <th>last</th>\n",
       "      <th>prev1</th>\n",
       "      <th>prev2</th>\n",
       "      <th>median</th>\n",
       "      <th>mean</th>\n",
       "      <th>...</th>\n",
       "      <th>up_order_rate</th>\n",
       "      <th>up_orders_since_last_order</th>\n",
       "      <th>up_order_rate_since_first_order</th>\n",
       "      <th>up_orders</th>\n",
       "      <th>up_first_order</th>\n",
       "      <th>up_last_order</th>\n",
       "      <th>up_mean_cart_position</th>\n",
       "      <th>days_since_prior_order_mean</th>\n",
       "      <th>order_dow_mean</th>\n",
       "      <th>order_hour_of_day_mean</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>3.25</td>\n",
       "      <td>5.5</td>\n",
       "      <td>12.000000</td>\n",
       "      <td>1</td>\n",
       "      <td>0.435484</td>\n",
       "      <td>24.0</td>\n",
       "      <td>7.0</td>\n",
       "      <td>9999.0</td>\n",
       "      <td>7.0</td>\n",
       "      <td>7.0</td>\n",
       "      <td>...</td>\n",
       "      <td>0.666667</td>\n",
       "      <td>1.0</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>2</td>\n",
       "      <td>1</td>\n",
       "      <td>2.0</td>\n",
       "      <td>2.500000</td>\n",
       "      <td>7.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.312500</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>0.75</td>\n",
       "      <td>0.5</td>\n",
       "      <td>8.500000</td>\n",
       "      <td>1</td>\n",
       "      <td>0.435484</td>\n",
       "      <td>46.0</td>\n",
       "      <td>102.0</td>\n",
       "      <td>9999.0</td>\n",
       "      <td>102.0</td>\n",
       "      <td>102.0</td>\n",
       "      <td>...</td>\n",
       "      <td>0.153846</td>\n",
       "      <td>1.0</td>\n",
       "      <td>1.181818</td>\n",
       "      <td>2</td>\n",
       "      <td>3</td>\n",
       "      <td>12.0</td>\n",
       "      <td>4.500000</td>\n",
       "      <td>18.500000</td>\n",
       "      <td>1.500000</td>\n",
       "      <td>0.833333</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>1.00</td>\n",
       "      <td>2.0</td>\n",
       "      <td>10.333333</td>\n",
       "      <td>2</td>\n",
       "      <td>0.435484</td>\n",
       "      <td>7.0</td>\n",
       "      <td>119.0</td>\n",
       "      <td>14.0</td>\n",
       "      <td>66.5</td>\n",
       "      <td>66.5</td>\n",
       "      <td>...</td>\n",
       "      <td>0.214286</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.272727</td>\n",
       "      <td>3</td>\n",
       "      <td>4</td>\n",
       "      <td>14.0</td>\n",
       "      <td>4.333333</td>\n",
       "      <td>9.666667</td>\n",
       "      <td>4.000000</td>\n",
       "      <td>0.564815</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>8.00</td>\n",
       "      <td>7.0</td>\n",
       "      <td>17.000000</td>\n",
       "      <td>0</td>\n",
       "      <td>0.435484</td>\n",
       "      <td>30.0</td>\n",
       "      <td>9999.0</td>\n",
       "      <td>9999.0</td>\n",
       "      <td>9999.0</td>\n",
       "      <td>9999.0</td>\n",
       "      <td>...</td>\n",
       "      <td>0.333333</td>\n",
       "      <td>1.0</td>\n",
       "      <td>1.500000</td>\n",
       "      <td>1</td>\n",
       "      <td>2</td>\n",
       "      <td>2.0</td>\n",
       "      <td>6.000000</td>\n",
       "      <td>4.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.461538</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>1.50</td>\n",
       "      <td>8.0</td>\n",
       "      <td>15.166667</td>\n",
       "      <td>5</td>\n",
       "      <td>0.435484</td>\n",
       "      <td>37.0</td>\n",
       "      <td>27.0</td>\n",
       "      <td>57.0</td>\n",
       "      <td>37.0</td>\n",
       "      <td>35.4</td>\n",
       "      <td>...</td>\n",
       "      <td>0.125000</td>\n",
       "      <td>2.0</td>\n",
       "      <td>1.500000</td>\n",
       "      <td>6</td>\n",
       "      <td>17</td>\n",
       "      <td>46.0</td>\n",
       "      <td>8.666667</td>\n",
       "      <td>9.833333</td>\n",
       "      <td>2.666667</td>\n",
       "      <td>0.441188</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "<p>5 rows × 43 columns</p>\n",
       "</div>"
      ],
      "text/plain": [
       "   user_product_reordered_ratio  reordered_sum  \\\n",
       "0                          3.25            5.5   \n",
       "1                          0.75            0.5   \n",
       "2                          1.00            2.0   \n",
       "3                          8.00            7.0   \n",
       "4                          1.50            8.0   \n",
       "\n",
       "   add_to_cart_order_inverted_mean  add_to_cart_order_relative_mean  \\\n",
       "0                        12.000000                                1   \n",
       "1                         8.500000                                1   \n",
       "2                        10.333333                                2   \n",
       "3                        17.000000                                0   \n",
       "4                        15.166667                                5   \n",
       "\n",
       "   reorder_prob  last   prev1   prev2  median    mean           ...            \\\n",
       "0      0.435484  24.0     7.0  9999.0     7.0     7.0           ...             \n",
       "1      0.435484  46.0   102.0  9999.0   102.0   102.0           ...             \n",
       "2      0.435484   7.0   119.0    14.0    66.5    66.5           ...             \n",
       "3      0.435484  30.0  9999.0  9999.0  9999.0  9999.0           ...             \n",
       "4      0.435484  37.0    27.0    57.0    37.0    35.4           ...             \n",
       "\n",
       "   up_order_rate  up_orders_since_last_order  up_order_rate_since_first_order  \\\n",
       "0       0.666667                         1.0                         1.000000   \n",
       "1       0.153846                         1.0                         1.181818   \n",
       "2       0.214286                         0.0                         1.272727   \n",
       "3       0.333333                         1.0                         1.500000   \n",
       "4       0.125000                         2.0                         1.500000   \n",
       "\n",
       "   up_orders  up_first_order  up_last_order  up_mean_cart_position  \\\n",
       "0          2               1            2.0               2.500000   \n",
       "1          2               3           12.0               4.500000   \n",
       "2          3               4           14.0               4.333333   \n",
       "3          1               2            2.0               6.000000   \n",
       "4          6              17           46.0               8.666667   \n",
       "\n",
       "   days_since_prior_order_mean  order_dow_mean  order_hour_of_day_mean  \n",
       "0                     7.000000        1.000000                0.312500  \n",
       "1                    18.500000        1.500000                0.833333  \n",
       "2                     9.666667        4.000000                0.564815  \n",
       "3                     4.000000        1.000000                0.461538  \n",
       "4                     9.833333        2.666667                0.441188  \n",
       "\n",
       "[5 rows x 43 columns]"
      ]
     },
     "execution_count": 3,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "text/plain": [
       "array([ 1.,  1.,  1.,  1.,  1.], dtype=float32)"
      ]
     },
     "execution_count": 3,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style>\n",
       "    .dataframe thead tr:only-child th {\n",
       "        text-align: right;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: left;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>user_product_reordered_ratio</th>\n",
       "      <th>reordered_sum</th>\n",
       "      <th>add_to_cart_order_inverted_mean</th>\n",
       "      <th>add_to_cart_order_relative_mean</th>\n",
       "      <th>reorder_prob</th>\n",
       "      <th>last</th>\n",
       "      <th>prev1</th>\n",
       "      <th>prev2</th>\n",
       "      <th>median</th>\n",
       "      <th>mean</th>\n",
       "      <th>...</th>\n",
       "      <th>up_order_rate</th>\n",
       "      <th>up_orders_since_last_order</th>\n",
       "      <th>up_order_rate_since_first_order</th>\n",
       "      <th>up_orders</th>\n",
       "      <th>up_first_order</th>\n",
       "      <th>up_last_order</th>\n",
       "      <th>up_mean_cart_position</th>\n",
       "      <th>days_since_prior_order_mean</th>\n",
       "      <th>order_dow_mean</th>\n",
       "      <th>order_hour_of_day_mean</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>13.000000</td>\n",
       "      <td>12.0</td>\n",
       "      <td>17.000000</td>\n",
       "      <td>0</td>\n",
       "      <td>0.378882</td>\n",
       "      <td>82.0</td>\n",
       "      <td>9999.0</td>\n",
       "      <td>9999.0</td>\n",
       "      <td>9999.0</td>\n",
       "      <td>9999.0000</td>\n",
       "      <td>...</td>\n",
       "      <td>0.100000</td>\n",
       "      <td>8.0</td>\n",
       "      <td>1.111111</td>\n",
       "      <td>1</td>\n",
       "      <td>2</td>\n",
       "      <td>2.0</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>6.000000</td>\n",
       "      <td>0.076923</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>3.000000</td>\n",
       "      <td>2.0</td>\n",
       "      <td>9.000000</td>\n",
       "      <td>0</td>\n",
       "      <td>0.378882</td>\n",
       "      <td>21.0</td>\n",
       "      <td>9999.0</td>\n",
       "      <td>9999.0</td>\n",
       "      <td>9999.0</td>\n",
       "      <td>9999.0000</td>\n",
       "      <td>...</td>\n",
       "      <td>0.015873</td>\n",
       "      <td>5.0</td>\n",
       "      <td>10.500000</td>\n",
       "      <td>1</td>\n",
       "      <td>58</td>\n",
       "      <td>58.0</td>\n",
       "      <td>11.000000</td>\n",
       "      <td>7.000000</td>\n",
       "      <td>5.000000</td>\n",
       "      <td>0.846154</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>0.117647</td>\n",
       "      <td>1.0</td>\n",
       "      <td>13.823529</td>\n",
       "      <td>16</td>\n",
       "      <td>0.378882</td>\n",
       "      <td>88.0</td>\n",
       "      <td>4.0</td>\n",
       "      <td>11.0</td>\n",
       "      <td>9.5</td>\n",
       "      <td>13.1875</td>\n",
       "      <td>...</td>\n",
       "      <td>0.186813</td>\n",
       "      <td>12.0</td>\n",
       "      <td>1.096386</td>\n",
       "      <td>17</td>\n",
       "      <td>9</td>\n",
       "      <td>79.0</td>\n",
       "      <td>3.823529</td>\n",
       "      <td>2.764706</td>\n",
       "      <td>3.117647</td>\n",
       "      <td>0.649977</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>10.000000</td>\n",
       "      <td>9.0</td>\n",
       "      <td>15.000000</td>\n",
       "      <td>0</td>\n",
       "      <td>0.378882</td>\n",
       "      <td>48.0</td>\n",
       "      <td>9999.0</td>\n",
       "      <td>9999.0</td>\n",
       "      <td>9999.0</td>\n",
       "      <td>9999.0000</td>\n",
       "      <td>...</td>\n",
       "      <td>0.021739</td>\n",
       "      <td>1.0</td>\n",
       "      <td>23.000000</td>\n",
       "      <td>1</td>\n",
       "      <td>45</td>\n",
       "      <td>45.0</td>\n",
       "      <td>4.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>4.000000</td>\n",
       "      <td>0.307692</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>7.750000</td>\n",
       "      <td>14.5</td>\n",
       "      <td>12.000000</td>\n",
       "      <td>1</td>\n",
       "      <td>0.378882</td>\n",
       "      <td>109.0</td>\n",
       "      <td>83.0</td>\n",
       "      <td>9999.0</td>\n",
       "      <td>83.0</td>\n",
       "      <td>83.0000</td>\n",
       "      <td>...</td>\n",
       "      <td>0.050000</td>\n",
       "      <td>10.0</td>\n",
       "      <td>2.222222</td>\n",
       "      <td>2</td>\n",
       "      <td>23</td>\n",
       "      <td>30.0</td>\n",
       "      <td>3.500000</td>\n",
       "      <td>9.000000</td>\n",
       "      <td>3.500000</td>\n",
       "      <td>0.196594</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "<p>5 rows × 43 columns</p>\n",
       "</div>"
      ],
      "text/plain": [
       "   user_product_reordered_ratio  reordered_sum  \\\n",
       "0                     13.000000           12.0   \n",
       "1                      3.000000            2.0   \n",
       "2                      0.117647            1.0   \n",
       "3                     10.000000            9.0   \n",
       "4                      7.750000           14.5   \n",
       "\n",
       "   add_to_cart_order_inverted_mean  add_to_cart_order_relative_mean  \\\n",
       "0                        17.000000                                0   \n",
       "1                         9.000000                                0   \n",
       "2                        13.823529                               16   \n",
       "3                        15.000000                                0   \n",
       "4                        12.000000                                1   \n",
       "\n",
       "   reorder_prob   last   prev1   prev2  median       mean  \\\n",
       "0      0.378882   82.0  9999.0  9999.0  9999.0  9999.0000   \n",
       "1      0.378882   21.0  9999.0  9999.0  9999.0  9999.0000   \n",
       "2      0.378882   88.0     4.0    11.0     9.5    13.1875   \n",
       "3      0.378882   48.0  9999.0  9999.0  9999.0  9999.0000   \n",
       "4      0.378882  109.0    83.0  9999.0    83.0    83.0000   \n",
       "\n",
       "            ...            up_order_rate  up_orders_since_last_order  \\\n",
       "0           ...                 0.100000                         8.0   \n",
       "1           ...                 0.015873                         5.0   \n",
       "2           ...                 0.186813                        12.0   \n",
       "3           ...                 0.021739                         1.0   \n",
       "4           ...                 0.050000                        10.0   \n",
       "\n",
       "   up_order_rate_since_first_order  up_orders  up_first_order  up_last_order  \\\n",
       "0                         1.111111          1               2            2.0   \n",
       "1                        10.500000          1              58           58.0   \n",
       "2                         1.096386         17               9           79.0   \n",
       "3                        23.000000          1              45           45.0   \n",
       "4                         2.222222          2              23           30.0   \n",
       "\n",
       "   up_mean_cart_position  days_since_prior_order_mean  order_dow_mean  \\\n",
       "0               1.000000                     1.000000        6.000000   \n",
       "1              11.000000                     7.000000        5.000000   \n",
       "2               3.823529                     2.764706        3.117647   \n",
       "3               4.000000                     1.000000        4.000000   \n",
       "4               3.500000                     9.000000        3.500000   \n",
       "\n",
       "   order_hour_of_day_mean  \n",
       "0                0.076923  \n",
       "1                0.846154  \n",
       "2                0.649977  \n",
       "3                0.307692  \n",
       "4                0.196594  \n",
       "\n",
       "[5 rows x 43 columns]"
      ]
     },
     "execution_count": 3,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "# for train data or test data \n",
    "\n",
    "features = [\n",
    "    # 'reordered_dow_ration', 'reordered_dow', 'reordered_dow_size',\n",
    "    # 'reordered_prev', 'add_to_cart_order_prev', 'order_dow_prev', 'order_hour_of_day_prev',\n",
    "    'user_product_reordered_ratio', 'reordered_sum',\n",
    "    'add_to_cart_order_inverted_mean', 'add_to_cart_order_relative_mean',\n",
    "    'reorder_prob',\n",
    "    'last', 'prev1', 'prev2', 'median', 'mean',\n",
    "    'dep_reordered_ratio', 'aisle_reordered_ratio',\n",
    "    'aisle_products',\n",
    "    'aisle_reordered',\n",
    "    'dep_products',\n",
    "    'dep_reordered',\n",
    "    'prod_users_unq', 'prod_users_unq_reordered',\n",
    "    'order_number', 'prod_add_to_card_mean',\n",
    "    'days_since_prior_order',\n",
    "    'order_dow', 'order_hour_of_day',\n",
    "    'reorder_ration',\n",
    "    'user_orders', 'user_order_starts_at', 'user_mean_days_since_prior',\n",
    "    # 'user_median_days_since_prior',\n",
    "    'user_average_basket', 'user_distinct_products', 'user_reorder_ratio', 'user_total_products',\n",
    "    'prod_orders', 'prod_reorders',\n",
    "    'up_order_rate', 'up_orders_since_last_order', 'up_order_rate_since_first_order',\n",
    "    'up_orders', 'up_first_order', 'up_last_order', 'up_mean_cart_position',\n",
    "    # 'up_median_cart_position',\n",
    "    'days_since_prior_order_mean',\n",
    "    # 'days_since_prior_order_median',\n",
    "    'order_dow_mean',\n",
    "    # 'order_dow_median',\n",
    "    'order_hour_of_day_mean',\n",
    "    # 'order_hour_of_day_median'\n",
    "]\n",
    "\n",
    "data_train = order_train[features]\n",
    "labels = order_train[['reordered']].values.astype(np.float32).flatten()\n",
    "data_val = order_test[features]\n",
    "\n",
    "X_train = data_train\n",
    "y_train = labels\n",
    "X_test = data_val\n",
    "\n",
    "X_train.head()\n",
    "y_train[:5]\n",
    "X_test.head()\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "loading train\n",
      "loading orders\n",
      "cv ok\n"
     ]
    }
   ],
   "source": [
    "# cv score\n",
    "\n",
    "from functools import partial\n",
    "\n",
    "print('loading train')\n",
    "trains = pd.read_csv('data/order_products__train.csv', dtype={\n",
    "            'order_id': np.int32,\n",
    "            'product_id': np.uint16,\n",
    "            'add_to_cart_order': np.int16,\n",
    "            'reordered': np.int8})\n",
    "\n",
    "print('loading orders')\n",
    "orders = pd.read_csv('data/orders.csv', dtype={\n",
    "        'order_id': np.int32,\n",
    "        'user_id': np.int32,\n",
    "        'eval_set': 'category',\n",
    "        'order_number': np.int16,\n",
    "        'order_dow': np.int8,\n",
    "        'order_hour_of_day': np.int8,\n",
    "        'days_since_prior_order': np.float32})\n",
    "\n",
    "\n",
    "def compare_results(df_gt, df_preds):\n",
    "    df_gt_cut = df_gt.loc[df_preds.index]\n",
    "    \n",
    "    f1 = []\n",
    "    for gt, pred in zip(df_gt_cut.sort_index().products, df_preds.sort_index().products):\n",
    "        lgt = gt.replace(\"None\", \"-1\").split(' ')\n",
    "        lpred = pred.replace(\"None\", \"-1\").split(' ')\n",
    "\n",
    "        rr = (np.intersect1d(lgt, lpred))\n",
    "        precision = np.float(len(rr)) / len(lpred)\n",
    "        recall = np.float(len(rr)) / len(lgt)\n",
    "\n",
    "        denom = precision + recall\n",
    "        f1.append(((2 * precision * recall) / denom) if denom > 0 else 0)\n",
    "\n",
    "    #print(np.mean(f1))\n",
    "    return(np.mean(f1))\n",
    "\n",
    "\n",
    "try:\n",
    "#     df_train_gt = pd.read_csv('train.csv', index_col='order_id')\n",
    "    df_train_gt = pd.read_pickle('data/df_train_gt.pkl')    \n",
    "except:\n",
    "    train_gtl = []\n",
    "    \n",
    "    train_details = pd.merge(\n",
    "                left=trains,\n",
    "                 right=orders, \n",
    "                 how='left', \n",
    "                 on='order_id'\n",
    "        ).apply(partial(pd.to_numeric, errors='ignore', downcast='integer'))\n",
    "\n",
    "    for uid, subset in train_details.groupby('user_id'):\n",
    "        subset1 = subset[subset.reordered == 1]\n",
    "        oid = subset.order_id.values[0]\n",
    "\n",
    "        if len(subset1) == 0:\n",
    "            train_gtl.append((oid, 'None'))\n",
    "            continue\n",
    "\n",
    "        ostr = ' '.join([str(int(e)) for e in subset1.product_id.values])\n",
    "        # .strip is needed because join can have a padding space at the end\n",
    "        train_gtl.append((oid, ostr.strip()))\n",
    "\n",
    "    df_train_gt = pd.DataFrame(train_gtl)\n",
    "\n",
    "    df_train_gt.columns = ['order_id', 'products']\n",
    "    df_train_gt.set_index('order_id', inplace=True)\n",
    "    df_train_gt.sort_index(inplace=True)\n",
    "    \n",
    "#     df_train_gt.to_csv('train.csv')        \n",
    "    df_train_gt .to_pickle('data/df_train_gt.pkl')\n",
    "    \n",
    "    \n",
    "# result_t = None\n",
    "# result_t0 = None\n",
    "def get_f1(df, proba_pred, t):\n",
    "#     global result_t, result_t0\n",
    "    val_out = df[['product_id', 'order_id']].copy()\n",
    "    val_out.loc[:,'reordered'] = (proba_pred > t).astype(int)\n",
    "#     val_out.loc[:,'reordered'] = df['reordered']\n",
    "    val_out.loc[:, 'product_id'] = val_out.product_id.astype(str)\n",
    "\n",
    "    result_t0 = pd.DataFrame()\n",
    "    result_t0['products'] = val_out[val_out.reordered == 1].groupby('order_id')['product_id'].apply(lambda x: ' '.join(set(x)))\n",
    "#     print(result_t0.head())\n",
    "\n",
    "    result_t = pd.DataFrame(index = df.order_id.unique())\n",
    "    result_t.index.name = 'order_id'\n",
    "    result_t['products'] = ['None'] * len(result_t)\n",
    "    result_t.loc[result_t0.index, 'products'] = result_t0.products\n",
    "#     print(len(result_t))\n",
    "#     print(result_t.head())\n",
    "    score = compare_results(df_train_gt, result_t)\n",
    "    \n",
    "    del val_out\n",
    "    del result_t0\n",
    "    del result_t\n",
    "    gc.collect()\n",
    "    \n",
    "    return score\n",
    "\n",
    "print('cv ok')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(8474661, 43)"
      ]
     },
     "execution_count": 5,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "{'num_leaves': [200, 256, 300]}\n",
      "Fitting 2 folds for each of 3 candidates, totalling 6 fits\n",
      "[CV] num_leaves=200 ..................................................\n",
      "[CV] ................... num_leaves=200, score=0.257811, total=  23.1s\n",
      "[CV] num_leaves=200 ..................................................\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "[Parallel(n_jobs=1)]: Done   1 out of   1 | elapsed:   29.8s remaining:    0.0s\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[CV] ................... num_leaves=200, score=0.262643, total=  23.0s\n",
      "[CV] num_leaves=256 ..................................................\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "[Parallel(n_jobs=1)]: Done   2 out of   2 | elapsed:   59.6s remaining:    0.0s\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[CV] ................... num_leaves=256, score=0.260620, total=  23.3s\n",
      "[CV] num_leaves=256 ..................................................\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "[Parallel(n_jobs=1)]: Done   3 out of   3 | elapsed:  1.5min remaining:    0.0s\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[CV] ................... num_leaves=256, score=0.264446, total=  23.5s\n",
      "[CV] num_leaves=300 ..................................................\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "[Parallel(n_jobs=1)]: Done   4 out of   4 | elapsed:  2.0min remaining:    0.0s\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[CV] ................... num_leaves=300, score=0.261939, total=  23.7s\n",
      "[CV] num_leaves=300 ..................................................\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "[Parallel(n_jobs=1)]: Done   5 out of   5 | elapsed:  2.5min remaining:    0.0s\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[CV] ................... num_leaves=300, score=0.266513, total=  24.0s\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "[Parallel(n_jobs=1)]: Done   6 out of   6 | elapsed:  3.0min remaining:    0.0s\n",
      "[Parallel(n_jobs=1)]: Done   6 out of   6 | elapsed:  3.0min finished\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "GridSearchCV(cv=[(array([      0,       4, ..., 8474659, 8474660]), array([      1,       2, ..., 8474657, 8474658])), (array([      1,       2, ..., 8474657, 8474658]), array([      0,       4, ..., 8474659, 8474660]))],\n",
       "       error_score='raise',\n",
       "       estimator=LGBMClassifier(boosting_type='gbdt', colsample_bytree=1, feature_fraction=0.6,\n",
       "        learning_rate=0.05, max_bin=255, max_depth=-1,\n",
       "        metric={'binary_logloss', 'auc'}, min_child_samples=10,\n",
       "        min_child_weight=5, min_data_in_leaf=20, min_split_gain=0,\n",
       "        n_estimators=10, nthread=-1, num_leaves=31, objective='binary',\n",
       "        reg_alpha=0, reg_lambda=0, seed=0, silent=True, subsample=1,\n",
       "        subsample_for_bin=50000, subsample_freq=1),\n",
       "       fit_params={}, iid=True, n_jobs=1,\n",
       "       param_grid={'num_leaves': [200, 256, 300]}, pre_dispatch='2*n_jobs',\n",
       "       refit=True, return_train_score=True, scoring=make_scorer(f1_score),\n",
       "       verbose=10)"
      ]
     },
     "execution_count": 5,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "params:\n",
      "  param 0: {'num_leaves': 200}\n",
      "  param 1: {'num_leaves': 256}\n",
      "  param 2: {'num_leaves': 300}\n",
      "best_params_: {'num_leaves': 300}\n",
      "mean_train_score: [ 0.26350493  0.26665518  0.26929162], std_train_score: [ 0.00241905  0.00215678  0.00229073]\n",
      "mean_test_score: [ 0.26022682  0.2625331   0.26422589], std_test_score: [ 0.00241616  0.00191271  0.00228727]\n",
      "best_score_: 0.26422588529051305\n"
     ]
    }
   ],
   "source": [
    "# model\n",
    "\n",
    "#model\n",
    "CV = 2\n",
    "\n",
    "X_train.shape\n",
    "\n",
    "# USE_XGB = True\n",
    "USE_XGB = False\n",
    "if USE_XGB:\n",
    "#     n_estimators = list(range(1, 102, 15))\n",
    "#     n_estimators = [100, 300]\n",
    "#     n_estimators = [1, 10, 60, 300, 1500]\n",
    "#     max_depth = list(range(1, 102, 10))\n",
    "#     max_depth = [5, 6, 7]\n",
    "#     max_depth = [5, 6]\n",
    "    max_depth = [2, 3]\n",
    "#     min_child_weight = list(range(1, 102, 10))\n",
    "    min_child_weight = [1]\n",
    "#     min_child_weight = [1, 3]\n",
    "#     min_child_weight = [5]\n",
    "#     gamma = list(range(1, 12, 1))\n",
    "#     gamma = [0.1 * x for x in range(0, 12, 1)]\n",
    "    param_grid = {\n",
    "#         'learning_rate': [0.1], #so called `eta` value\n",
    "#         'nthread':[4], #when use hyperthread, xgboost may become slower\n",
    "#         'objective':['binary:logistic'],\n",
    "        'max_depth': max_depth,\n",
    "        'min_child_weight': min_child_weight,\n",
    "#         'gamma':gamma,\n",
    "#         'scale_pos_weight':scale_pos_weight,\n",
    "#         'silent': [1],\n",
    "#         'subsample': [0.8],\n",
    "#         'colsample_bytree': [0.7],\n",
    "#         'missing':[-999],\n",
    "#         'n_estimators': n_estimators, #number of trees, change it to 1000 for better results\n",
    "#         'missing':[-999],\n",
    "    }\n",
    "    estimator = xgb.XGBClassifier(learning_rate=0.1, n_estimators=10, silent=False, \\\n",
    "                                  objective='binary:logistic', seed=0)\n",
    "    estimator.predict = estimator.predict_proba\n",
    "else:\n",
    "# params = {\n",
    "#     'task': 'train',\n",
    "#     'boosting_type': 'gbdt',\n",
    "#     'objective': 'binary',\n",
    "#     'metric': {'binary_logloss', 'auc'},\n",
    "#     'num_leaves': 256,\n",
    "#     'min_sum_hessian_in_leaf': 20,\n",
    "#     'max_depth': 12,\n",
    "#     'learning_rate': 0.05,\n",
    "#     'feature_fraction': 0.6,\n",
    "#     # 'bagging_fraction': 0.9,\n",
    "#     # 'bagging_freq': 3,\n",
    "#     'verbose': 1\n",
    "# }    \n",
    "    # n_estimators = [10, 50, 100]\n",
    "#     n_estimators = [5, 10, 15]\n",
    "#     n_estimators = [5, 10]\n",
    "#     n_estimators = [50, 70, 100]\n",
    "#     max_depth = [5, 10, 15]\n",
    "#     max_depth = [5]\n",
    "#     num_leaves = [490, 512, 540]\n",
    "#     num_leaves = [512, 550, 600]\n",
    "#     num_leaves = [600, 800, 1000]\n",
    "#     num_leaves = [500, 600, 700]\n",
    "\n",
    "\n",
    "    num_leaves = [200, 256, 300]\n",
    "    param_grid = {\n",
    "          'num_leaves': num_leaves,                    # higher number of leaves\n",
    "\n",
    "#         'boosting_type': ['gbdt'],\n",
    "#         'objective': ['binary'],\n",
    "#         'num_leaves': [96],\n",
    "#         'max_depth': max_depth,\n",
    "#         'feature_fraction': [0.9],\n",
    "#         'bagging_fraction': [0.95],\n",
    "#         'bagging_freq': [5],\n",
    "#         'learning_rate': [0.01],\n",
    "#         'n_estimators': n_estimators,\n",
    "#         'seed': [0],\n",
    "    #     'metric': ['binary_logloss']\n",
    "    }\n",
    "#     estimator = lgb.LGBMClassifier(metric={'binary_logloss', 'auc'}, boosting_type='gbdt', objective='binary', seed=0,\n",
    "#                                   feature_fraction=0.9, bagging_fraction=0.95, bagging_freq=5, min_data_in_leaf=200, \n",
    "#                                   learning_rate=0.1, num_iterations=1000, max_bin=100)\n",
    "    estimator = lgb.LGBMClassifier(metric={'binary_logloss', 'auc'}, boosting_type='gbdt', objective='binary', seed=0,\n",
    "                                  feature_fraction=0.6, min_data_in_leaf=20, \n",
    "                                  learning_rate=0.05)\n",
    "pprint(param_grid)\n",
    "\n",
    "cv_index = 0\n",
    "def xgb_f1_score(y_true, y_pred):\n",
    "    global cv_index\n",
    "#     y_pred = np.ones_like(y_pred) # pred=1\n",
    "#     print(type(y_true), len(y_true), len(y_pred))\n",
    "#     print(cv_index)\n",
    "    y_pred = y_pred[:, 1]\n",
    "#     print(y_true[:20], y_pred[:20])\n",
    "#     y_pred = (y_pred > THRESHOLD).astype(int)\n",
    "    index_0 = int(cv_index / 2)\n",
    "    index_1 = int(cv_index % 2)\n",
    "    index_1 = 1 if index_1 == 0  else 0\n",
    "#     print(index_0, index_1)\n",
    "#     print(len(train.iloc[cv[index_0][index_1]]))\n",
    "    s = get_f1(train.iloc[cv[index_0][index_1]][['order_id', 'product_id']], y_pred, THRESHOLD)\n",
    "    cv_index += 1\n",
    "    if cv_index >= 2 * len(cv):\n",
    "        cv_index = 0\n",
    "    return s #f1_score(y_true, y_pred)\n",
    "\n",
    "cv = list(GroupKFold(n_splits=CV).split(X_train, y_train, order_train['user_id'].values))\n",
    "# len(cv[0][0])\n",
    "# len(cv[0][1])\n",
    "# len(cv)\n",
    "# print(train.iloc[cv[0][0][:20]]['reordered'], train.iloc[cv[0][1][:20]]['reordered'])\n",
    "# cv\n",
    "# scorer = metrics.make_scorer(xgb_f1_score, greater_is_better=True, needs_proba=True)\n",
    "# clf = GridSearchCV(estimator, param_grid, scoring=scorer,\n",
    "#                    cv=cv, refit=True, verbose=10)\n",
    "scorer = metrics.make_scorer(f1_score, greater_is_better=True)\n",
    "clf = GridSearchCV(estimator, param_grid, scoring=scorer,\n",
    "                   cv=cv, refit=True, verbose=10)\n",
    "clf.fit(X_train, y_train)\n",
    "\n",
    "def gridcv_results(clf):\n",
    "    print('params:')\n",
    "    for i, r in enumerate(clf.cv_results_['params']):\n",
    "        print('  param {}: {}'.format(i, r))\n",
    "    print('best_params_: {}'.format(clf.best_params_))\n",
    "    print('mean_train_score: {}, std_train_score: {}'.format(clf.cv_results_['mean_train_score'], clf.cv_results_['std_train_score']))\n",
    "    print('mean_test_score: {}, std_test_score: {}'.format(clf.cv_results_['mean_test_score'], clf.cv_results_['std_test_score']))\n",
    "    print('best_score_: {}'.format(clf.best_score_))\n",
    "\n",
    "gridcv_results(clf)\n",
    "# print('CV={} CV_THRESHOLD={} '.format(THRESHOLD, CV))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style>\n",
       "    .dataframe thead tr:only-child th {\n",
       "        text-align: right;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: left;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>order_id</th>\n",
       "      <th>prediction</th>\n",
       "      <th>product_id</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>65432</td>\n",
       "      <td>0.311053</td>\n",
       "      <td>17330</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>203668</td>\n",
       "      <td>0.316497</td>\n",
       "      <td>17330</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>1734476</td>\n",
       "      <td>0.330122</td>\n",
       "      <td>17330</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>1627971</td>\n",
       "      <td>0.327502</td>\n",
       "      <td>17330</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>1213682</td>\n",
       "      <td>0.314284</td>\n",
       "      <td>17330</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>5</th>\n",
       "      <td>1456859</td>\n",
       "      <td>0.304002</td>\n",
       "      <td>17330</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>6</th>\n",
       "      <td>1192576</td>\n",
       "      <td>0.307040</td>\n",
       "      <td>17330</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>7</th>\n",
       "      <td>322069</td>\n",
       "      <td>0.304584</td>\n",
       "      <td>17330</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>8</th>\n",
       "      <td>761818</td>\n",
       "      <td>0.316017</td>\n",
       "      <td>17330</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>9</th>\n",
       "      <td>1029203</td>\n",
       "      <td>0.310277</td>\n",
       "      <td>17330</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>10</th>\n",
       "      <td>3052061</td>\n",
       "      <td>0.316079</td>\n",
       "      <td>17330</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11</th>\n",
       "      <td>1981837</td>\n",
       "      <td>0.340562</td>\n",
       "      <td>17330</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>12</th>\n",
       "      <td>2224210</td>\n",
       "      <td>0.442916</td>\n",
       "      <td>17330</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>13</th>\n",
       "      <td>2669437</td>\n",
       "      <td>0.331641</td>\n",
       "      <td>17330</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>14</th>\n",
       "      <td>526558</td>\n",
       "      <td>0.368308</td>\n",
       "      <td>17330</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>15</th>\n",
       "      <td>1339380</td>\n",
       "      <td>0.410407</td>\n",
       "      <td>17330</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>16</th>\n",
       "      <td>821750</td>\n",
       "      <td>0.303875</td>\n",
       "      <td>17330</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>17</th>\n",
       "      <td>993166</td>\n",
       "      <td>0.433530</td>\n",
       "      <td>17330</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>18</th>\n",
       "      <td>983407</td>\n",
       "      <td>0.304584</td>\n",
       "      <td>17330</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>19</th>\n",
       "      <td>1320217</td>\n",
       "      <td>0.329876</td>\n",
       "      <td>17330</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "    order_id  prediction  product_id\n",
       "0      65432    0.311053       17330\n",
       "1     203668    0.316497       17330\n",
       "2    1734476    0.330122       17330\n",
       "3    1627971    0.327502       17330\n",
       "4    1213682    0.314284       17330\n",
       "5    1456859    0.304002       17330\n",
       "6    1192576    0.307040       17330\n",
       "7     322069    0.304584       17330\n",
       "8     761818    0.316017       17330\n",
       "9    1029203    0.310277       17330\n",
       "10   3052061    0.316079       17330\n",
       "11   1981837    0.340562       17330\n",
       "12   2224210    0.442916       17330\n",
       "13   2669437    0.331641       17330\n",
       "14    526558    0.368308       17330\n",
       "15   1339380    0.410407       17330\n",
       "16    821750    0.303875       17330\n",
       "17    993166    0.433530       17330\n",
       "18    983407    0.304584       17330\n",
       "19   1320217    0.329876       17330"
      ]
     },
     "execution_count": 6,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "predict finish.\n"
     ]
    }
   ],
   "source": [
    "prediction = clf.predict_proba(data_val)[:, 1]\n",
    "# prediction = model.predict(data_val)\n",
    "orders = order_test.order_id.values\n",
    "products = order_test.product_id.values\n",
    "\n",
    "result = pd.DataFrame({'product_id': products, 'order_id': orders, 'prediction': prediction})\n",
    "if USE_XGB:\n",
    "    result.to_pickle('data/prediction_xgb.pkl')\n",
    "else:\n",
    "    result.to_pickle('data/prediction_lgbm.pkl')\n",
    "result.head(20)\n",
    "\n",
    "print('predict finish.')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 28,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "array([ 1.,  1.,  1.,  1.,  1.,  1.,  1.,  0.,  0.,  0.,  0.,  0.,  0.,\n",
       "        0.,  0.,  0.,  0.,  0.,  0.,  0.,  0.,  0.,  0.,  0.,  0.,  0.,\n",
       "        0.,  0.,  0.,  0.,  0.,  0.,  1.,  0.,  1.,  1.,  1.,  1.,  1.,\n",
       "        1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.,  1.], dtype=float32)"
      ]
     },
     "execution_count": 28,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "text/plain": [
       "array([ 0.43697931,  0.3505616 ,  0.38935966,  0.34739725,  0.35149449])"
      ]
     },
     "execution_count": 28,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "8474661 8474661\n",
      "threshold=0.25,i_score=0.21540057948287963\n",
      "threshold=0.3,i_score=0.21540057948287963\n",
      "threshold=0.35000000000000003,i_score=0.363611875698088\n",
      "coarse best  threshold=0.35000000000000003,i_score=0.363611875698088\n",
      " \n",
      "threshold=0.31,i_score=0.24040780020557892\n",
      "threshold=0.32,i_score=0.28266652847506796\n",
      "threshold=0.33,i_score=0.31912435857283816\n",
      "threshold=0.34,i_score=0.3451839344548496\n",
      "threshold=0.35000000000000003,i_score=0.363611875698088\n",
      "threshold=0.36,i_score=0.37481898276927245\n",
      "threshold=0.37,i_score=0.38041137963078436\n",
      "threshold=0.38,i_score=0.38137633298090445\n",
      "threshold=0.39,i_score=0.3787140460649657\n",
      "fine best threshold=0.38,i_score=0.38137633298090445\n"
     ]
    }
   ],
   "source": [
    "USE_VARIABLE_THRESHOLD = False\n",
    "\n",
    "if USE_VARIABLE_THRESHOLD:\n",
    "    preds = pred_variable\n",
    "    y_train[:10]\n",
    "    preds[:10]\n",
    "    score = get_f1(train, preds, 0.5)\n",
    "    print('USE_VARIABLE_THRESHOLD score:', score)\n",
    "else:\n",
    "#     preds_train = clf.predict_proba(X_train)\n",
    "#     preds = preds_train[:, 1]\n",
    "    y_train[:10]\n",
    "    preds[:10]\n",
    "    print(len(y_train), len(preds))\n",
    "\n",
    "    i_threshold = 0 \n",
    "    score = 0\n",
    "    # for i in xrange(5, 100, 5):\n",
    "    # for i in xrange(5, 50, 5):\n",
    "    for i in range(25, 40, 5):\n",
    "    # for i in [26]:\n",
    "    #     i_score = metrics.f1_score(y_train, (p>0.01*i).astype(int))\n",
    "        t = 0.01*i\n",
    "        i_score = get_f1(order_train, preds, t)\n",
    "        print('threshold={},i_score={}'.format(t, i_score))\n",
    "        if i_score > score:\n",
    "            score = i_score\n",
    "            i_treshold = i\n",
    "    print('coarse best  threshold={},i_score={}'.format(0.01*i_treshold, score))\n",
    "    print(' ')\n",
    "\n",
    "#     i_treshold = 30\n",
    "    score = 0\n",
    "    for i in range(i_treshold-4, i_treshold+5, 1):\n",
    "    #     i_score = metrics.f1_score(y_train, (p>0.01*i).astype(int))\n",
    "        t = 0.01*i\n",
    "        i_score = get_f1(order_train, preds, t)\n",
    "        print('threshold={},i_score={}'.format(t, i_score))\n",
    "        if i_score > score:\n",
    "            score = i_score\n",
    "            i_treshold = i\n",
    "            THRESHOLD = t\n",
    "    # if i_treshold == i_treshold+5-1 or i_treshold == i_treshold-4:\n",
    "    #     print('!!!!!!!!!!!!!!! not get best threshold')\n",
    "    print('fine best threshold={},i_score={}'.format(0.01*i_treshold, score))\n",
    "\n",
    "\n",
    "    # score = 0\n",
    "    # for i in [i_treshold-0.4, i_treshold-0.2, i_treshold, i_treshold+0.2, i_treshold+0.4]:\n",
    "    # #     i_score = metrics.f1_score(y_train, (p>0.01*i).astype(int))\n",
    "    #     t = 0.01*i\n",
    "    #     i_score = get_f1(train, preds, t)\n",
    "    #     print('threshold={},i_score={}'.format(t, i_score))\n",
    "    #     if i_score > score:\n",
    "    #         score = i_score\n",
    "    # #         i_treshold = i\n",
    "    # #         THRESHOLD = t\n",
    "    # # if i_treshold == i_treshold+5-1 or i_treshold == i_treshold-4:\n",
    "    # #     print('!!!!!!!!!!!!!!! not get best threshold')\n",
    "    # print('fine best threshold={},i_score={}'.format(0.01*i_treshold, score))\n",
    "\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 29,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "4833292 647785\n"
     ]
    },
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style>\n",
       "    .dataframe thead tr:only-child th {\n",
       "        text-align: right;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: left;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>order_id</th>\n",
       "      <th>product_id</th>\n",
       "      <th>reordered</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>65432</td>\n",
       "      <td>17330</td>\n",
       "      <td>0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>203668</td>\n",
       "      <td>17330</td>\n",
       "      <td>0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>1734476</td>\n",
       "      <td>17330</td>\n",
       "      <td>0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>1627971</td>\n",
       "      <td>17330</td>\n",
       "      <td>0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>1213682</td>\n",
       "      <td>17330</td>\n",
       "      <td>0</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "   order_id  product_id  reordered\n",
       "0     65432       17330          0\n",
       "1    203668       17330          0\n",
       "2   1734476       17330          0\n",
       "3   1627971       17330          0\n",
       "4   1213682       17330          0"
      ]
     },
     "execution_count": 29,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style>\n",
       "    .dataframe thead tr:only-child th {\n",
       "        text-align: right;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: left;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>order_id</th>\n",
       "      <th>0</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>17</td>\n",
       "      <td>21</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>34</td>\n",
       "      <td>78</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>137</td>\n",
       "      <td>68</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>182</td>\n",
       "      <td>100</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>257</td>\n",
       "      <td>57</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "   order_id    0\n",
       "0        17   21\n",
       "1        34   78\n",
       "2       137   68\n",
       "3       182  100\n",
       "4       257   57"
      ]
     },
     "execution_count": 29,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style>\n",
       "    .dataframe thead tr:only-child th {\n",
       "        text-align: right;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: left;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>order_id</th>\n",
       "      <th>products</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>17</td>\n",
       "      <td>{39275, 21709, 13107, 47766, 21463, 38777, 26429}</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>34</td>\n",
       "      <td>{2596, 48523, 47792, 21137, 43504, 39475, 1608...</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>137</td>\n",
       "      <td>{38689, 25890, 5134, 23794, 24852, 2326, 41787}</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>182</td>\n",
       "      <td>{11520, 5479, 33000, 47209, 39275, 32109, 1525...</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>257</td>\n",
       "      <td>{27104, 1025, 24838, 29837, 13870, 37646, 2113...</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "   order_id                                           products\n",
       "0        17  {39275, 21709, 13107, 47766, 21463, 38777, 26429}\n",
       "1        34  {2596, 48523, 47792, 21137, 43504, 39475, 1608...\n",
       "2       137    {38689, 25890, 5134, 23794, 24852, 2326, 41787}\n",
       "3       182  {11520, 5479, 33000, 47209, 39275, 32109, 1525...\n",
       "4       257  {27104, 1025, 24838, 29837, 13870, 37646, 2113..."
      ]
     },
     "execution_count": 29,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style>\n",
       "    .dataframe thead tr:only-child th {\n",
       "        text-align: right;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: left;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>order_id</th>\n",
       "      <th>0</th>\n",
       "      <th>products</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>12</th>\n",
       "      <td>474</td>\n",
       "      <td>21</td>\n",
       "      <td>None</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>15</th>\n",
       "      <td>513</td>\n",
       "      <td>16</td>\n",
       "      <td>None</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>42</th>\n",
       "      <td>2297</td>\n",
       "      <td>10</td>\n",
       "      <td>None</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>61</th>\n",
       "      <td>3519</td>\n",
       "      <td>15</td>\n",
       "      <td>None</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>130</th>\n",
       "      <td>7125</td>\n",
       "      <td>43</td>\n",
       "      <td>None</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "     order_id   0 products\n",
       "12        474  21     None\n",
       "15        513  16     None\n",
       "42       2297  10     None\n",
       "61       3519  15     None\n",
       "130      7125  43     None"
      ]
     },
     "execution_count": 29,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "text/plain": [
       "75000"
      ]
     },
     "execution_count": 29,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style>\n",
       "    .dataframe thead tr:only-child th {\n",
       "        text-align: right;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: left;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>order_id</th>\n",
       "      <th>products</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>17</td>\n",
       "      <td>39275  21709  13107  47766  21463  38777  26429</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>34</td>\n",
       "      <td>2596  48523  47792  21137  43504  39475  16083...</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>137</td>\n",
       "      <td>38689  25890  5134  23794  24852  2326  41787</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>182</td>\n",
       "      <td>11520  5479  33000  47209  39275  32109  15252...</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>257</td>\n",
       "      <td>27104  1025  24838  29837  13870  37646  21137...</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "   order_id                                           products\n",
       "0        17    39275  21709  13107  47766  21463  38777  26429\n",
       "1        34  2596  48523  47792  21137  43504  39475  16083...\n",
       "2       137      38689  25890  5134  23794  24852  2326  41787\n",
       "3       182  11520  5479  33000  47209  39275  32109  15252...\n",
       "4       257  27104  1025  24838  29837  13870  37646  21137..."
      ]
     },
     "execution_count": 29,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "submission saved.\n"
     ]
    }
   ],
   "source": [
    "result = order_test[['order_id', 'product_id']].copy()    \n",
    "p = clf.predict_proba(X_test)\n",
    "result['reordered'] = (p[:, 1] > THRESHOLD) * 1\n",
    "print(len(result), result.reordered.sum())\n",
    "\n",
    "# result['reordered'] = np.array([1] * len(test))\n",
    "# result['reordered'] = np.array([0] * len(test))\n",
    "result.head()\n",
    "\n",
    "# submission = pd.DataFrame(index = result.order_id.unique())\n",
    "submission = pd.DataFrame()\n",
    "g = result.groupby('order_id')\n",
    "submission = g.size().to_frame()\n",
    "submission.reset_index(inplace=True)\n",
    "submission.head()\n",
    "\n",
    "product = pd.DataFrame()\n",
    "g = result[result.reordered == 1].groupby('order_id')\n",
    "product['products'] = g['product_id'].apply(set)\n",
    "product.reset_index(inplace=True)\n",
    "product.head()\n",
    "\n",
    "submission = submission.merge(product, on='order_id', how='left')\n",
    "submission['products'] = submission['products'].map(lambda s: str(s).replace(',', ' '))\n",
    "submission['products'] = submission['products'].map(lambda s: s.strip('set()[]{}'))\n",
    "submission['products'] = submission['products'].map(lambda s: 'None' if s == 'nan' else s)\n",
    "submission[submission.products=='None'].head()\n",
    "\n",
    "len(submission)\n",
    "submission = submission[['order_id', 'products']]\n",
    "submission.head()\n",
    "\n",
    "submission.to_csv('submission.csv', index=False)\n",
    "print('submission saved.')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/home/ubuntu/miniconda3/envs/py35/lib/python3.5/site-packages/pandas/core/indexing.py:337: SettingWithCopyWarning: \n",
      "A value is trying to be set on a copy of a slice from a DataFrame.\n",
      "Try using .loc[row_indexer,col_indexer] = value instead\n",
      "\n",
      "See the caveats in the documentation: http://pandas.pydata.org/pandas-docs/stable/indexing.html#indexing-view-versus-copy\n",
      "  self.obj[key] = _infer_fill_value(value)\n",
      "/home/ubuntu/miniconda3/envs/py35/lib/python3.5/site-packages/pandas/core/indexing.py:337: SettingWithCopyWarning: \n",
      "A value is trying to be set on a copy of a slice from a DataFrame.\n",
      "Try using .loc[row_indexer,col_indexer] = value instead\n",
      "\n",
      "See the caveats in the documentation: http://pandas.pydata.org/pandas-docs/stable/indexing.html#indexing-view-versus-copy\n",
      "  self.obj[key] = _infer_fill_value(value)\n",
      "/home/ubuntu/miniconda3/envs/py35/lib/python3.5/site-packages/pandas/core/indexing.py:337: SettingWithCopyWarning: \n",
      "A value is trying to be set on a copy of a slice from a DataFrame.\n",
      "Try using .loc[row_indexer,col_indexer] = value instead\n",
      "\n",
      "See the caveats in the documentation: http://pandas.pydata.org/pandas-docs/stable/indexing.html#indexing-view-versus-copy\n",
      "  self.obj[key] = _infer_fill_value(value)\n",
      "/home/ubuntu/miniconda3/envs/py35/lib/python3.5/site-packages/pandas/core/indexing.py:337: SettingWithCopyWarning: \n",
      "A value is trying to be set on a copy of a slice from a DataFrame.\n",
      "Try using .loc[row_indexer,col_indexer] = value instead\n",
      "\n",
      "See the caveats in the documentation: http://pandas.pydata.org/pandas-docs/stable/indexing.html#indexing-view-versus-copy\n",
      "  self.obj[key] = _infer_fill_value(value)\n",
      "/home/ubuntu/miniconda3/envs/py35/lib/python3.5/site-packages/pandas/core/indexing.py:517: SettingWithCopyWarning: \n",
      "A value is trying to be set on a copy of a slice from a DataFrame.\n",
      "Try using .loc[row_indexer,col_indexer] = value instead\n",
      "\n",
      "See the caveats in the documentation: http://pandas.pydata.org/pandas-docs/stable/indexing.html#indexing-view-versus-copy\n",
      "  self.obj[item] = s\n",
      "/home/ubuntu/miniconda3/envs/py35/lib/python3.5/site-packages/pandas/core/indexing.py:517: SettingWithCopyWarning: \n",
      "A value is trying to be set on a copy of a slice from a DataFrame.\n",
      "Try using .loc[row_indexer,col_indexer] = value instead\n",
      "\n",
      "See the caveats in the documentation: http://pandas.pydata.org/pandas-docs/stable/indexing.html#indexing-view-versus-copy\n",
      "  self.obj[item] = s\n",
      "/home/ubuntu/miniconda3/envs/py35/lib/python3.5/site-packages/pandas/core/indexing.py:517: SettingWithCopyWarning: \n",
      "A value is trying to be set on a copy of a slice from a DataFrame.\n",
      "Try using .loc[row_indexer,col_indexer] = value instead\n",
      "\n",
      "See the caveats in the documentation: http://pandas.pydata.org/pandas-docs/stable/indexing.html#indexing-view-versus-copy\n",
      "  self.obj[item] = s\n",
      "/home/ubuntu/miniconda3/envs/py35/lib/python3.5/site-packages/pandas/core/indexing.py:517: SettingWithCopyWarning: \n",
      "A value is trying to be set on a copy of a slice from a DataFrame.\n",
      "Try using .loc[row_indexer,col_indexer] = value instead\n",
      "\n",
      "See the caveats in the documentation: http://pandas.pydata.org/pandas-docs/stable/indexing.html#indexing-view-versus-copy\n",
      "  self.obj[item] = s\n"
     ]
    }
   ],
   "source": [
    "# python3 f1_optimal.py\n",
    "\n",
    "%run utils.py\n",
    "\n",
    "import pandas as pd\n",
    "import numpy as np\n",
    "from joblib import Parallel, delayed\n",
    "import multiprocessing\n",
    "\n",
    "from utils import fast_search\n",
    "\n",
    "none_product = 50000\n",
    "\n",
    "def applyParallel(dfGrouped, func):\n",
    "    retLst = Parallel(n_jobs=multiprocessing.cpu_count())(delayed(func)(group) for name, group in dfGrouped)\n",
    "    return pd.concat(retLst)\n",
    "\n",
    "def create_products(df):\n",
    "    # print(df.product_id.values.shape)\n",
    "    products = df.product_id.values\n",
    "    prob = df.prediction.values\n",
    "\n",
    "    sort_index = np.argsort(prob)[::-1]\n",
    "\n",
    "    values = fast_search(prob[sort_index][0:80], dtype=np.float64)\n",
    "\n",
    "    index = np.argmax(values)\n",
    "\n",
    "#     print('iteration', df.shape[0], 'optimal value', index)\n",
    "\n",
    "    best = ' '.join(map(lambda x: str(x) if x != none_product else 'None', products[sort_index][0:index]))\n",
    "    df = df[0:1]\n",
    "    df.loc[:, 'products'] = best\n",
    "    return df\n",
    "\n",
    "if __name__ == '__main__':\n",
    "#     data = pd.read_pickle('data/prediction_rnn.pkl')\n",
    "#     data = pd.read_pickle('data/prediction_lgbm.pkl')\n",
    "    data = pd.read_pickle('data/prediction_lgbm_src.pkl')\n",
    "\n",
    "    data['not_a_product'] = 1. - data.prediction\n",
    "\n",
    "    gp = data.groupby('order_id')['not_a_product'].apply(lambda x: np.multiply.reduce(x.values)).reset_index()\n",
    "    gp.rename(columns={'not_a_product': 'prediction'}, inplace=True)\n",
    "    gp['product_id'] = none_product\n",
    "\n",
    "    data = pd.concat([data, gp], axis=0)\n",
    "    data.product_id = data.product_id.astype(np.uint32)\n",
    "\n",
    "#     data = data.loc[data.prediction > 0.01, ['order_id', 'prediction', 'product_id']]\n",
    "    data = data.loc[data.prediction > THRESHOLD, ['order_id', 'prediction', 'product_id']]\n",
    "\n",
    "    data = applyParallel(data.groupby(data.order_id), create_products).reset_index()\n",
    "\n",
    "    data[['order_id', 'products']].to_csv('data/sub.csv', index=False)\n",
    "    \n",
    "    print('sub created.')\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style>\n",
       "    .dataframe thead tr:only-child th {\n",
       "        text-align: right;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: left;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>order_id</th>\n",
       "      <th>prediction</th>\n",
       "      <th>product_id</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>65432</td>\n",
       "      <td>0.032399</td>\n",
       "      <td>17330</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>203668</td>\n",
       "      <td>0.050655</td>\n",
       "      <td>17330</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>1734476</td>\n",
       "      <td>0.044481</td>\n",
       "      <td>17330</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>1627971</td>\n",
       "      <td>0.038003</td>\n",
       "      <td>17330</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>1213682</td>\n",
       "      <td>0.033285</td>\n",
       "      <td>17330</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "   order_id  prediction  product_id\n",
       "0     65432    0.032399       17330\n",
       "1    203668    0.050655       17330\n",
       "2   1734476    0.044481       17330\n",
       "3   1627971    0.038003       17330\n",
       "4   1213682    0.033285       17330"
      ]
     },
     "execution_count": 7,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "text/plain": [
       "4833292"
      ]
     },
     "execution_count": 7,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "befor > 0.01 4833292\n",
      "len(data.groupby(data.order_id))= 75000\n",
      "after > 0.01 4235807\n",
      "len(data.groupby(data.order_id))= 75000\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/home/ubuntu/miniconda3/envs/py35/lib/python3.5/site-packages/pandas/core/indexing.py:337: SettingWithCopyWarning: \n",
      "A value is trying to be set on a copy of a slice from a DataFrame.\n",
      "Try using .loc[row_indexer,col_indexer] = value instead\n",
      "\n",
      "See the caveats in the documentation: http://pandas.pydata.org/pandas-docs/stable/indexing.html#indexing-view-versus-copy\n",
      "  self.obj[key] = _infer_fill_value(value)\n",
      "/home/ubuntu/miniconda3/envs/py35/lib/python3.5/site-packages/pandas/core/indexing.py:337: SettingWithCopyWarning: \n",
      "A value is trying to be set on a copy of a slice from a DataFrame.\n",
      "Try using .loc[row_indexer,col_indexer] = value instead\n",
      "\n",
      "See the caveats in the documentation: http://pandas.pydata.org/pandas-docs/stable/indexing.html#indexing-view-versus-copy\n",
      "  self.obj[key] = _infer_fill_value(value)\n",
      "/home/ubuntu/miniconda3/envs/py35/lib/python3.5/site-packages/pandas/core/indexing.py:337: SettingWithCopyWarning: \n",
      "A value is trying to be set on a copy of a slice from a DataFrame.\n",
      "Try using .loc[row_indexer,col_indexer] = value instead\n",
      "\n",
      "See the caveats in the documentation: http://pandas.pydata.org/pandas-docs/stable/indexing.html#indexing-view-versus-copy\n",
      "  self.obj[key] = _infer_fill_value(value)\n",
      "/home/ubuntu/miniconda3/envs/py35/lib/python3.5/site-packages/pandas/core/indexing.py:517: SettingWithCopyWarning: \n",
      "A value is trying to be set on a copy of a slice from a DataFrame.\n",
      "Try using .loc[row_indexer,col_indexer] = value instead\n",
      "\n",
      "See the caveats in the documentation: http://pandas.pydata.org/pandas-docs/stable/indexing.html#indexing-view-versus-copy\n",
      "  self.obj[item] = s\n",
      "/home/ubuntu/miniconda3/envs/py35/lib/python3.5/site-packages/pandas/core/indexing.py:517: SettingWithCopyWarning: \n",
      "A value is trying to be set on a copy of a slice from a DataFrame.\n",
      "Try using .loc[row_indexer,col_indexer] = value instead\n",
      "\n",
      "See the caveats in the documentation: http://pandas.pydata.org/pandas-docs/stable/indexing.html#indexing-view-versus-copy\n",
      "  self.obj[item] = s\n",
      "/home/ubuntu/miniconda3/envs/py35/lib/python3.5/site-packages/pandas/core/indexing.py:337: SettingWithCopyWarning: \n",
      "A value is trying to be set on a copy of a slice from a DataFrame.\n",
      "Try using .loc[row_indexer,col_indexer] = value instead\n",
      "\n",
      "See the caveats in the documentation: http://pandas.pydata.org/pandas-docs/stable/indexing.html#indexing-view-versus-copy\n",
      "  self.obj[key] = _infer_fill_value(value)\n",
      "/home/ubuntu/miniconda3/envs/py35/lib/python3.5/site-packages/pandas/core/indexing.py:517: SettingWithCopyWarning: \n",
      "A value is trying to be set on a copy of a slice from a DataFrame.\n",
      "Try using .loc[row_indexer,col_indexer] = value instead\n",
      "\n",
      "See the caveats in the documentation: http://pandas.pydata.org/pandas-docs/stable/indexing.html#indexing-view-versus-copy\n",
      "  self.obj[item] = s\n",
      "/home/ubuntu/miniconda3/envs/py35/lib/python3.5/site-packages/pandas/core/indexing.py:517: SettingWithCopyWarning: \n",
      "A value is trying to be set on a copy of a slice from a DataFrame.\n",
      "Try using .loc[row_indexer,col_indexer] = value instead\n",
      "\n",
      "See the caveats in the documentation: http://pandas.pydata.org/pandas-docs/stable/indexing.html#indexing-view-versus-copy\n",
      "  self.obj[item] = s\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "2729.2975668907166second\n",
      "sub created. len= 75000\n"
     ]
    }
   ],
   "source": [
    "# python3 f1_optimal.py\n",
    "\n",
    "%run utils.py\n",
    "\n",
    "import pandas as pd\n",
    "import numpy as np\n",
    "from joblib import Parallel, delayed\n",
    "import multiprocessing\n",
    "from time import time\n",
    "\n",
    "from utils import fast_search\n",
    "\n",
    "none_product = 50000\n",
    "\n",
    "\n",
    "class F1Optimizer():\n",
    "    def __init__(self):\n",
    "        pass\n",
    "\n",
    "    @staticmethod\n",
    "    def get_expectations(P, pNone=None):\n",
    "        expectations = []\n",
    "        P = np.sort(P)[::-1]\n",
    "\n",
    "        n = np.array(P).shape[0]\n",
    "        DP_C = np.zeros((n + 2, n + 1))\n",
    "        if pNone is None:\n",
    "            pNone = (1.0 - P).prod()\n",
    "\n",
    "        DP_C[0][0] = 1.0\n",
    "        for j in range(1, n):\n",
    "            DP_C[0][j] = (1.0 - P[j - 1]) * DP_C[0, j - 1]\n",
    "\n",
    "        for i in range(1, n + 1):\n",
    "            DP_C[i, i] = DP_C[i - 1, i - 1] * P[i - 1]\n",
    "            for j in range(i + 1, n + 1):\n",
    "                DP_C[i, j] = P[j - 1] * DP_C[i - 1, j - 1] + (1.0 - P[j - 1]) * DP_C[i, j - 1]\n",
    "\n",
    "        DP_S = np.zeros((2 * n + 1,))\n",
    "        DP_SNone = np.zeros((2 * n + 1,))\n",
    "        for i in range(1, 2 * n + 1):\n",
    "            DP_S[i] = 1. / (1. * i)\n",
    "            DP_SNone[i] = 1. / (1. * i + 1)\n",
    "        for k in range(n + 1)[::-1]:\n",
    "            f1 = 0\n",
    "            f1None = 0\n",
    "            for k1 in range(n + 1):\n",
    "                f1 += 2 * k1 * DP_C[k1][k] * DP_S[k + k1]\n",
    "                f1None += 2 * k1 * DP_C[k1][k] * DP_SNone[k + k1]\n",
    "            for i in range(1, 2 * k - 1):\n",
    "                DP_S[i] = (1 - P[k - 1]) * DP_S[i] + P[k - 1] * DP_S[i + 1]\n",
    "                DP_SNone[i] = (1 - P[k - 1]) * DP_SNone[i] + P[k - 1] * DP_SNone[i + 1]\n",
    "            expectations.append([f1None + 2 * pNone / (2 + k), f1])\n",
    "\n",
    "        return np.array(expectations[::-1]).T\n",
    "\n",
    "    @staticmethod\n",
    "    def maximize_expectation(P, pNone=None):\n",
    "        expectations = F1Optimizer.get_expectations(P, pNone)\n",
    "\n",
    "        ix_max = np.unravel_index(expectations.argmax(), expectations.shape)\n",
    "        max_f1 = expectations[ix_max]\n",
    "\n",
    "        predNone = True if ix_max[0] == 0 else False\n",
    "        best_k = ix_max[1]\n",
    "\n",
    "        return best_k, predNone, max_f1\n",
    "\n",
    "    @staticmethod\n",
    "    def _F1(tp, fp, fn):\n",
    "        return 2 * tp / (2 * tp + fp + fn)\n",
    "\n",
    "    @staticmethod\n",
    "    def _Fbeta(tp, fp, fn, beta=1.0):\n",
    "        beta_squared = beta ** 2\n",
    "        return (1.0 + beta_squared) * tp / ((1.0 + beta_squared) * tp + fp + beta_squared * fn)\n",
    "\n",
    "\n",
    "def applyParallel(dfGrouped, func):\n",
    "    retLst = Parallel(n_jobs=multiprocessing.cpu_count())(delayed(func)(group) for name, group in dfGrouped)\n",
    "    return pd.concat(retLst)\n",
    "\n",
    "def create_products(df):\n",
    "    # print(df.product_id.values.shape)\n",
    "    products = df.product_id.values\n",
    "    prob = df.prediction.values\n",
    "\n",
    "    sort_index = np.argsort(prob)[::-1]\n",
    "    L2 = products[sort_index]\n",
    "    P2 = prob[sort_index]\n",
    "    \n",
    "#     values = fast_search(prob[sort_index][0:80], dtype=np.float64)\n",
    "#     index = np.argmax(values)\n",
    "    \n",
    "    index, predNone, max_f1 = F1Optimizer.maximize_expectation(P2)\n",
    "#     print('iteration', df.shape[0], 'optimal value', index)\n",
    "\n",
    "#     best = ' '.join(map(lambda x: str(x) if x != none_product else 'None', products[sort_index][0:index]))\n",
    "    best = ' '.join(map(lambda x: str(x) if  not predNone else 'None', L2[:index]))\n",
    "    df = df[0:1]\n",
    "    df.loc[:, 'products'] = best\n",
    "    return df\n",
    "\n",
    "def create_products_faron(df):\n",
    "    # print(df.product_id.values.shape)\n",
    "    products = df.product_id.values\n",
    "    prob = df.prediction.values\n",
    "\n",
    "    sort_index = np.argsort(prob)[::-1]\n",
    "    L2 = products[sort_index]\n",
    "    P2 = prob[sort_index]\n",
    "\n",
    "    opt = F1Optimizer.maximize_expectation(P2)\n",
    "\n",
    "    best_prediction = ['None'] if opt[1] else []\n",
    "    best_prediction += list(L2[:opt[0]])\n",
    "\n",
    "    #print(\"Prediction {} ({} elements) yields best E[F1] of {}\\n\".format(best_prediction, len(best_prediction), opt[2]))\n",
    "    print('iteration', df.shape[0], 'optimal value', opt[0])\n",
    "\n",
    "    best = ' '.join(map(lambda x: str(x), best_prediction))\n",
    "    df = df[0:1]\n",
    "    df.loc[:, 'products'] = best\n",
    "    return df\n",
    "\n",
    "if __name__ == '__main__':\n",
    "#     data = pd.read_pickle('data/prediction_rnn.pkl')\n",
    "#     data = pd.read_pickle('data/prediction_lgbm.pkl')\n",
    "    data = pd.read_pickle('data/prediction_lgbm_src.pkl')\n",
    "#     data['not_a_product'] = 1. - data.prediction\n",
    "    data.head()\n",
    "    len(data)\n",
    "\n",
    "#     gp = data.groupby('order_id')['not_a_product'].apply(lambda x: np.multiply.reduce(x.values)).reset_index()\n",
    "#     gp.rename(columns={'not_a_product': 'prediction'}, inplace=True)\n",
    "#     gp['product_id'] = none_product\n",
    "#     gp.head()\n",
    "#     len(gp)\n",
    "# #     gp[gp.product_id==none_product].head()\n",
    "\n",
    "#     data = pd.concat([data, gp], axis=0)\n",
    "#     data.product_id = data.product_id.astype(np.uint32)\n",
    "#     data.head()\n",
    "#     len(data)\n",
    "#     data[data.product_id==none_product].head()\n",
    "\n",
    "#     data = data.loc[data.prediction > 0.01, ['order_id', 'prediction', 'product_id']]\n",
    "#     data = data[['order_id', 'prediction', 'product_id']].copy()\n",
    "    print('befor > 0.01', len(data))\n",
    "    print('len(data.groupby(data.order_id))=', len(data.groupby(data.order_id)))\n",
    "    data = data.loc[data.prediction > 0.01, ['order_id', 'prediction', 'product_id']]\n",
    "    print('after > 0.01', len(data))\n",
    "    print('len(data.groupby(data.order_id))=', len(data.groupby(data.order_id)))\n",
    "\n",
    "    start = time()\n",
    "    data = applyParallel(data.groupby(data.order_id), create_products).reset_index()\n",
    "    print(str(time()-start) + \"second\")\n",
    "#     data = applyParallel(data.groupby(data.order_id), create_products_faron).reset_index()\n",
    "\n",
    "    data[['order_id', 'products']].to_csv('data/sub.csv', index=False)\n",
    "    \n",
    "    print('sub created. len=', len(data))\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 37,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "72745"
      ]
     },
     "execution_count": 37,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "text/plain": [
       "72745"
      ]
     },
     "execution_count": 37,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "len(data)\n",
    "len(data_val.groupby(data.order_id))\n",
    "# len(result.groupby(data.order_id))\n",
    "# len(data.groupby(data.order_id))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 84,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "'\\n@author: Faron\\n'"
      ]
     },
     "execution_count": 84,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "text/plain": [
       "'\\nThis kernel implements the O(n²) F1-Score expectation maximization algorithm presented in\\n\"Ye, N., Chai, K., Lee, W., and Chieu, H.  Optimizing F-measures: A Tale of Two Approaches. In ICML, 2012.\"\\n\\nIt solves argmax_(0 <= k <= n,[[None]]) E[F1(P,k,[[None]])]\\nwith [[None]] being the indicator for predicting label \"None\"\\ngiven posteriors P = [p_1, p_2, ... , p_n], where p_1 > p_2 > ... > p_n\\nunder label independence assumption by means of dynamic programming in O(n²).\\n'"
      ]
     },
     "execution_count": 84,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "text/plain": [
       "4833292"
      ]
     },
     "execution_count": 84,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style>\n",
       "    .dataframe thead tr:only-child th {\n",
       "        text-align: right;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: left;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>order_id</th>\n",
       "      <th>prediction</th>\n",
       "      <th>product_id</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>65432</td>\n",
       "      <td>0.311053</td>\n",
       "      <td>17330</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>60</th>\n",
       "      <td>65432</td>\n",
       "      <td>0.311053</td>\n",
       "      <td>35419</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>276</th>\n",
       "      <td>65432</td>\n",
       "      <td>0.309240</td>\n",
       "      <td>27407</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>483</th>\n",
       "      <td>65432</td>\n",
       "      <td>0.309240</td>\n",
       "      <td>44635</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>660</th>\n",
       "      <td>65432</td>\n",
       "      <td>0.312832</td>\n",
       "      <td>7751</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3779</th>\n",
       "      <td>65432</td>\n",
       "      <td>0.316158</td>\n",
       "      <td>196</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>6700</th>\n",
       "      <td>65432</td>\n",
       "      <td>0.311053</td>\n",
       "      <td>23020</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>6974</th>\n",
       "      <td>65432</td>\n",
       "      <td>0.310750</td>\n",
       "      <td>26878</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>7008</th>\n",
       "      <td>65432</td>\n",
       "      <td>0.310750</td>\n",
       "      <td>25783</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>7481</th>\n",
       "      <td>65432</td>\n",
       "      <td>0.314170</td>\n",
       "      <td>41290</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>9844</th>\n",
       "      <td>65432</td>\n",
       "      <td>0.319533</td>\n",
       "      <td>33198</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>14023</th>\n",
       "      <td>65432</td>\n",
       "      <td>0.374244</td>\n",
       "      <td>12919</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>14523</th>\n",
       "      <td>65432</td>\n",
       "      <td>0.310276</td>\n",
       "      <td>36086</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>16139</th>\n",
       "      <td>65432</td>\n",
       "      <td>0.311426</td>\n",
       "      <td>3800</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>16685</th>\n",
       "      <td>65432</td>\n",
       "      <td>0.332279</td>\n",
       "      <td>11869</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>16932</th>\n",
       "      <td>65432</td>\n",
       "      <td>0.324497</td>\n",
       "      <td>20842</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>19857</th>\n",
       "      <td>65432</td>\n",
       "      <td>0.311715</td>\n",
       "      <td>25952</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>20523</th>\n",
       "      <td>65432</td>\n",
       "      <td>0.378618</td>\n",
       "      <td>21376</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22811</th>\n",
       "      <td>65432</td>\n",
       "      <td>0.330457</td>\n",
       "      <td>9755</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>24136</th>\n",
       "      <td>65432</td>\n",
       "      <td>0.424181</td>\n",
       "      <td>11331</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>24262</th>\n",
       "      <td>65432</td>\n",
       "      <td>0.359689</td>\n",
       "      <td>36259</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>24677</th>\n",
       "      <td>65432</td>\n",
       "      <td>0.362367</td>\n",
       "      <td>41355</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>24723</th>\n",
       "      <td>65432</td>\n",
       "      <td>0.363709</td>\n",
       "      <td>4447</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>24963</th>\n",
       "      <td>65432</td>\n",
       "      <td>0.359437</td>\n",
       "      <td>256</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>25037</th>\n",
       "      <td>65432</td>\n",
       "      <td>0.407026</td>\n",
       "      <td>41844</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>29170</th>\n",
       "      <td>65432</td>\n",
       "      <td>0.388016</td>\n",
       "      <td>6187</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>31391</th>\n",
       "      <td>65432</td>\n",
       "      <td>0.324877</td>\n",
       "      <td>24253</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>31984</th>\n",
       "      <td>65432</td>\n",
       "      <td>0.384426</td>\n",
       "      <td>10673</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>33946</th>\n",
       "      <td>65432</td>\n",
       "      <td>0.324877</td>\n",
       "      <td>5884</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>34264</th>\n",
       "      <td>65432</td>\n",
       "      <td>0.398852</td>\n",
       "      <td>26604</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>42026</th>\n",
       "      <td>65432</td>\n",
       "      <td>0.314470</td>\n",
       "      <td>21288</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>45150</th>\n",
       "      <td>65432</td>\n",
       "      <td>0.439960</td>\n",
       "      <td>24852</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>72142</th>\n",
       "      <td>65432</td>\n",
       "      <td>0.415981</td>\n",
       "      <td>5450</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>78025</th>\n",
       "      <td>65432</td>\n",
       "      <td>0.321832</td>\n",
       "      <td>19706</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>79872</th>\n",
       "      <td>65432</td>\n",
       "      <td>0.325349</td>\n",
       "      <td>43352</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>86356</th>\n",
       "      <td>65432</td>\n",
       "      <td>0.327281</td>\n",
       "      <td>49683</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>97156</th>\n",
       "      <td>65432</td>\n",
       "      <td>0.309732</td>\n",
       "      <td>22387</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>97196</th>\n",
       "      <td>65432</td>\n",
       "      <td>0.309240</td>\n",
       "      <td>23032</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>98309</th>\n",
       "      <td>65432</td>\n",
       "      <td>0.398073</td>\n",
       "      <td>9076</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "       order_id  prediction  product_id\n",
       "0         65432    0.311053       17330\n",
       "60        65432    0.311053       35419\n",
       "276       65432    0.309240       27407\n",
       "483       65432    0.309240       44635\n",
       "660       65432    0.312832        7751\n",
       "3779      65432    0.316158         196\n",
       "6700      65432    0.311053       23020\n",
       "6974      65432    0.310750       26878\n",
       "7008      65432    0.310750       25783\n",
       "7481      65432    0.314170       41290\n",
       "9844      65432    0.319533       33198\n",
       "14023     65432    0.374244       12919\n",
       "14523     65432    0.310276       36086\n",
       "16139     65432    0.311426        3800\n",
       "16685     65432    0.332279       11869\n",
       "16932     65432    0.324497       20842\n",
       "19857     65432    0.311715       25952\n",
       "20523     65432    0.378618       21376\n",
       "22811     65432    0.330457        9755\n",
       "24136     65432    0.424181       11331\n",
       "24262     65432    0.359689       36259\n",
       "24677     65432    0.362367       41355\n",
       "24723     65432    0.363709        4447\n",
       "24963     65432    0.359437         256\n",
       "25037     65432    0.407026       41844\n",
       "29170     65432    0.388016        6187\n",
       "31391     65432    0.324877       24253\n",
       "31984     65432    0.384426       10673\n",
       "33946     65432    0.324877        5884\n",
       "34264     65432    0.398852       26604\n",
       "42026     65432    0.314470       21288\n",
       "45150     65432    0.439960       24852\n",
       "72142     65432    0.415981        5450\n",
       "78025     65432    0.321832       19706\n",
       "79872     65432    0.325349       43352\n",
       "86356     65432    0.327281       49683\n",
       "97156     65432    0.309732       22387\n",
       "97196     65432    0.309240       23032\n",
       "98309     65432    0.398073        9076"
      ]
     },
     "execution_count": 84,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "65432\n",
      "group:\n",
      "[ 0.31105308  0.31105308  0.30923996  0.30923996  0.31283207  0.31615817\n",
      "  0.31105308  0.31075016  0.31075016  0.31417042  0.31953265  0.37424395\n",
      "  0.31027613  0.31142552  0.33227936  0.32449691  0.31171538  0.37861795\n",
      "  0.33045701  0.42418085  0.35968936  0.36236651  0.36370949  0.35943657\n",
      "  0.407026    0.38801623  0.32487651  0.38442625  0.32487651  0.39885217\n",
      "  0.31446953  0.43995998  0.41598121  0.3218324   0.32534878  0.32728103\n",
      "  0.30973238  0.30923989  0.39807322]\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "(39, False, 0.50580456352060776)"
      ]
     },
     "execution_count": 84,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "# -*- coding: utf-8 -*-\n",
    "\"\"\"\n",
    "@author: Faron\n",
    "\"\"\"\n",
    "import numpy as np\n",
    "import pandas as pd\n",
    "# import matplotlib.pylab as plt\n",
    "from datetime import datetime\n",
    "\n",
    "'''\n",
    "This kernel implements the O(n²) F1-Score expectation maximization algorithm presented in\n",
    "\"Ye, N., Chai, K., Lee, W., and Chieu, H.  Optimizing F-measures: A Tale of Two Approaches. In ICML, 2012.\"\n",
    "\n",
    "It solves argmax_(0 <= k <= n,[[None]]) E[F1(P,k,[[None]])]\n",
    "with [[None]] being the indicator for predicting label \"None\"\n",
    "given posteriors P = [p_1, p_2, ... , p_n], where p_1 > p_2 > ... > p_n\n",
    "under label independence assumption by means of dynamic programming in O(n²).\n",
    "'''\n",
    "\n",
    "\n",
    "class F1Optimizer():\n",
    "    def __init__(self):\n",
    "        pass\n",
    "\n",
    "    @staticmethod\n",
    "    def get_expectations(P, pNone=None):\n",
    "        expectations = []\n",
    "        P = np.sort(P)[::-1]\n",
    "\n",
    "        n = np.array(P).shape[0]\n",
    "        DP_C = np.zeros((n + 2, n + 1))\n",
    "        if pNone is None:\n",
    "            pNone = (1.0 - P).prod()\n",
    "\n",
    "        DP_C[0][0] = 1.0\n",
    "        for j in range(1, n):\n",
    "            DP_C[0][j] = (1.0 - P[j - 1]) * DP_C[0, j - 1]\n",
    "\n",
    "        for i in range(1, n + 1):\n",
    "            DP_C[i, i] = DP_C[i - 1, i - 1] * P[i - 1]\n",
    "            for j in range(i + 1, n + 1):\n",
    "                DP_C[i, j] = P[j - 1] * DP_C[i - 1, j - 1] + (1.0 - P[j - 1]) * DP_C[i, j - 1]\n",
    "\n",
    "        DP_S = np.zeros((2 * n + 1,))\n",
    "        DP_SNone = np.zeros((2 * n + 1,))\n",
    "        for i in range(1, 2 * n + 1):\n",
    "            DP_S[i] = 1. / (1. * i)\n",
    "            DP_SNone[i] = 1. / (1. * i + 1)\n",
    "        for k in range(n + 1)[::-1]:\n",
    "            f1 = 0\n",
    "            f1None = 0\n",
    "            for k1 in range(n + 1):\n",
    "                f1 += 2 * k1 * DP_C[k1][k] * DP_S[k + k1]\n",
    "                f1None += 2 * k1 * DP_C[k1][k] * DP_SNone[k + k1]\n",
    "            for i in range(1, 2 * k - 1):\n",
    "                DP_S[i] = (1 - P[k - 1]) * DP_S[i] + P[k - 1] * DP_S[i + 1]\n",
    "                DP_SNone[i] = (1 - P[k - 1]) * DP_SNone[i] + P[k - 1] * DP_SNone[i + 1]\n",
    "            expectations.append([f1None + 2 * pNone / (2 + k), f1])\n",
    "\n",
    "        return np.array(expectations[::-1]).T\n",
    "\n",
    "    @staticmethod\n",
    "    def maximize_expectation(P, pNone=None):\n",
    "        expectations = F1Optimizer.get_expectations(P, pNone)\n",
    "\n",
    "        ix_max = np.unravel_index(expectations.argmax(), expectations.shape)\n",
    "        max_f1 = expectations[ix_max]\n",
    "\n",
    "        predNone = True if ix_max[0] == 0 else False\n",
    "        best_k = ix_max[1]\n",
    "\n",
    "        return best_k, predNone, max_f1\n",
    "\n",
    "    @staticmethod\n",
    "    def _F1(tp, fp, fn):\n",
    "        return 2 * tp / (2 * tp + fp + fn)\n",
    "\n",
    "    @staticmethod\n",
    "    def _Fbeta(tp, fp, fn, beta=1.0):\n",
    "        beta_squared = beta ** 2\n",
    "        return (1.0 + beta_squared) * tp / ((1.0 + beta_squared) * tp + fp + beta_squared * fn)\n",
    "\n",
    "\n",
    "def best_prediction(P, pNone=None):\n",
    "    print(\"Maximize F1-Expectation\")\n",
    "    print(\"=\" * 23)\n",
    "    P = np.sort(P)[::-1]\n",
    "    n = P.shape[0]\n",
    "    L = ['L{}'.format(i + 1) for i in range(n)]\n",
    "\n",
    "    if pNone is None:\n",
    "        print(\"Estimate p(None|x) as (1-p_1)*(1-p_2)*...*(1-p_n)\")\n",
    "        pNone = (1.0 - P).prod()\n",
    "\n",
    "    PL = ['p({}|x)={}'.format(l, p) for l, p in zip(L, P)]\n",
    "    print(\"Posteriors: {} (n={})\".format(PL, n))\n",
    "    print(\"p(None|x)={}\".format(pNone))\n",
    "\n",
    "    opt = F1Optimizer.maximize_expectation(P, pNone)\n",
    "    best_prediction = ['None'] if opt[1] else []\n",
    "    best_prediction += (L[:opt[0]])\n",
    "    f1_max = opt[2]\n",
    "\n",
    "    print(\"Prediction {} yields best E[F1] of {}\\n\".format(best_prediction, f1_max))\n",
    "\n",
    "def print_best_prediction(P, pNone=None):\n",
    "    print(\"Maximize F1-Expectation\")\n",
    "    print(\"=\" * 23)\n",
    "    P = np.sort(P)[::-1]\n",
    "    n = P.shape[0]\n",
    "    L = ['L{}'.format(i + 1) for i in range(n)]\n",
    "\n",
    "    if pNone is None:\n",
    "        print(\"Estimate p(None|x) as (1-p_1)*(1-p_2)*...*(1-p_n)\")\n",
    "        pNone = (1.0 - P).prod()\n",
    "\n",
    "    PL = ['p({}|x)={}'.format(l, p) for l, p in zip(L, P)]\n",
    "    print(\"Posteriors: {} (n={})\".format(PL, n))\n",
    "    print(\"p(None|x)={}\".format(pNone))\n",
    "\n",
    "    opt = F1Optimizer.maximize_expectation(P, pNone)\n",
    "    best_prediction = ['None'] if opt[1] else []\n",
    "    best_prediction += (L[:opt[0]])\n",
    "    f1_max = opt[2]\n",
    "\n",
    "    print(\"Prediction {} yields best E[F1] of {}\\n\".format(best_prediction, f1_max))\n",
    "\n",
    "def save_plot(P, filename='expected_f1.png'):\n",
    "    E_F1 = pd.DataFrame(F1Optimizer.get_expectations(P).T, columns=[\"/w None\", \"/wo None\"])\n",
    "    best_k, _, max_f1 = F1Optimizer.maximize_expectation(P)\n",
    "\n",
    "    plt.style.use('ggplot')\n",
    "    plt.figure()\n",
    "    E_F1.plot()\n",
    "    plt.title('Expected F1-Score for \\n {}'.format(\"P = [{}]\".format(\",\".join(map(str, P)))), fontsize=12)\n",
    "    plt.xlabel('k')\n",
    "    plt.xticks(np.arange(0, len(P) + 1, 1.0))\n",
    "    plt.ylabel('E[F1(P,k)]')\n",
    "    plt.plot([best_k], [max_f1], 'o', color='#000000', markersize=4)\n",
    "    plt.annotate('max E[F1(P,k)] = E[F1(P,{})] = {:.5f}'.format(best_k, max_f1), xy=(best_k, max_f1),\n",
    "                 xytext=(best_k, max_f1 * 0.8), arrowprops=dict(facecolor='black', shrink=0.05, width=1, headwidth=7),\n",
    "                 horizontalalignment='center', verticalalignment='top')\n",
    "    plt.gcf().savefig(filename)\n",
    "\n",
    "\n",
    "\n",
    "def timeit(P):\n",
    "    s = datetime.now()\n",
    "    F1Optimizer.maximize_expectation(P)\n",
    "    e = datetime.now()\n",
    "    return (e-s).microseconds / 1E6\n",
    "\n",
    "\n",
    "def benchmark(n=100, filename='runtimes.png'):\n",
    "    results = pd.DataFrame(index=np.arange(1,n+1))\n",
    "    results['runtimes'] = 0\n",
    "\n",
    "    for i in range(1,n+1):\n",
    "        runtimes = []\n",
    "        for j in range(5):\n",
    "            runtimes.append(timeit(np.sort(np.random.rand(i))[::-1]))\n",
    "        results.iloc[i-1] = np.mean(runtimes)\n",
    "\n",
    "    x = results.index\n",
    "    y = results.runtimes\n",
    "    results['quadratic fit'] = np.poly1d(np.polyfit(x, y, deg=2))(x)\n",
    "\n",
    "    plt.style.use('ggplot')\n",
    "    plt.figure()\n",
    "    results.plot()\n",
    "    plt.title('Expectation Maximization Runtimes', fontsize=12)\n",
    "    plt.xlabel('n = |P|')\n",
    "    plt.ylabel('time in seconds')\n",
    "    plt.gcf().savefig(filename)\n",
    "\n",
    "\n",
    "# if __name__ == '__main__':\n",
    "#     print_best_prediction([0.3, 0.2])\n",
    "#     print_best_prediction([0.3, 0.2], 0.57)\n",
    "#     print_best_prediction([0.9, 0.6])\n",
    "#     print_best_prediction([0.5, 0.4, 0.3, 0.35, 0.33, 0.31, 0.29, 0.27, 0.25, 0.20, 0.15, 0.10])\n",
    "#     print_best_prediction([0.5, 0.4, 0.3, 0.35, 0.33, 0.31, 0.29, 0.27, 0.25, 0.20, 0.15, 0.10], 0.2)\n",
    "\n",
    "#     save_plot([0.45, 0.35, 0.31, 0.29, 0.27, 0.25, 0.22, 0.20, 0.17, 0.15, 0.10, 0.05, 0.02])\n",
    "#     benchmark()\n",
    "\n",
    "\n",
    "# F1Optimizer.maximize_expectation([0.3, 0.35, 0.33, 0.31, 0.29, 0.27, 0.25, 0.20, 0.15, 0.10])\n",
    "# F1Optimizer.maximize_expectation([0.3, 0.35, 0.33, 0.31, 0.29, 0.27, 0.25, 0.20, 0.15, 0.10], 0.2)\n",
    "data = pd.read_pickle('data/prediction_lgbm.pkl')\n",
    "len(data)\n",
    "data_t = data[data.order_id==65432]\n",
    "data_t\n",
    "for name, group in data_t.groupby('order_id'):\n",
    "    print(name)\n",
    "    print('group:')\n",
    "    print(group.prediction.values)\n",
    "    F1Optimizer.maximize_expectation(group.prediction.values)\n",
    "    \n",
    "# F1Optimizer.maximize_expectation(data_t.groupby('order_id').prediction.apply(lambda x: list(x)).values.tolist())\n",
    "\n",
    "# print_best_prediction([0.3, 0.35, 0.33, 0.31, 0.29, 0.27, 0.25, 0.20, 0.15, 0.10])\n",
    "# print_best_prediction([0.3, 0.35, 0.33, 0.31, 0.29, 0.27, 0.25, 0.20, 0.15, 0.10], 0.2)\n",
    "# # print_best_prediction([0.5, 0.4, 0.3, 0.35, 0.33, 0.31, 0.29, 0.27, 0.25, 0.20, 0.15, 0.10])\n",
    "# # print_best_prediction([0.5, 0.4, 0.3, 0.35, 0.33, 0.31, 0.29, 0.27, 0.25, 0.20, 0.15, 0.10], 0.2)\n",
    "# # data = pd.read_pickle('data/prediction_lgbm.pkl')\n",
    "# len(data.groupby(data.order_id))\n",
    "# data.head()\n",
    "\n",
    "# if __name__ == '__main__':\n",
    "# #     data = pd.read_pickle('data/prediction_rnn.pkl')\n",
    "#     data = pd.read_pickle('data/prediction_lgbm.pkl')\n",
    "#     data['not_a_product'] = 1. - data.prediction\n",
    "\n",
    "#     gp = data.groupby('order_id')['not_a_product'].apply(lambda x: np.multiply.reduce(x.values)).reset_index()\n",
    "#     gp.rename(columns={'not_a_product': 'prediction'}, inplace=True)\n",
    "#     gp['product_id'] = none_product\n",
    "\n",
    "#     data = pd.concat([data, gp], axis=0)\n",
    "#     data.product_id = data.product_id.astype(np.uint32)\n",
    "\n",
    "# #     data = data.loc[data.prediction > 0.01, ['order_id', 'prediction', 'product_id']]\n",
    "#     data = data.loc[data.prediction > THRESHOLD, ['order_id', 'prediction', 'product_id']]\n",
    "\n",
    "#     data = applyParallel(data.groupby(data.order_id), create_products).reset_index()\n",
    "#     best_prediction（）\n",
    "\n",
    "#     data[['order_id', 'products']].to_csv('data/sub.csv', index=False)\n",
    "    \n",
    "#     print('sub created.')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.5.3"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
