{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "from IPython.core.interactiveshell import InteractiveShell\n",
    "InteractiveShell.ast_node_interactivity = \"all\"\n",
    "\n",
    "import gc\n",
    "import pandas as pd\n",
    "import numpy as np\n",
    "import os"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "loaded\n",
      "          user_id  product_id\n",
      "0          202279       33120\n",
      "1          202279       28985\n",
      "2          202279        9327\n",
      "3          202279       45918\n",
      "4          202279       30035\n",
      "5          202279       17794\n",
      "6          202279       40141\n",
      "7          202279        1819\n",
      "8          202279       43668\n",
      "9          205970       33754\n",
      "10         205970       24838\n",
      "11         205970       17704\n",
      "12         205970       21903\n",
      "13         205970       17668\n",
      "14         205970       46667\n",
      "15         205970       17461\n",
      "16         205970       32665\n",
      "17         178520       46842\n",
      "18         178520       26434\n",
      "19         178520       39758\n",
      "20         178520       27761\n",
      "21         178520       10054\n",
      "22         178520       21351\n",
      "23         178520       22598\n",
      "24         178520       34862\n",
      "25         178520       40285\n",
      "26         178520       17616\n",
      "27         178520       25146\n",
      "28         178520       32645\n",
      "29         178520       41276\n",
      "...           ...         ...\n",
      "32434403   124485       26919\n",
      "32434407   124485       34270\n",
      "32434408    50050       31553\n",
      "32434409    50050        4302\n",
      "32434410    50050       10246\n",
      "32434411    50050        6999\n",
      "32434412    50050       26209\n",
      "32434418    50050        6473\n",
      "32434419    50050       20061\n",
      "32434420   103510       49187\n",
      "32434421   103510       20126\n",
      "32434424   167185       29066\n",
      "32434425   167185       49383\n",
      "32434434   193225       48101\n",
      "32434455   108687       30136\n",
      "32434464    52726       38061\n",
      "32434465   117076       38185\n",
      "32434467   117076       32299\n",
      "32434468   117076        3060\n",
      "32434469   117076       20539\n",
      "32434470   117076       35221\n",
      "32434471   117076       12861\n",
      "32434477   175185       12023\n",
      "32434478   175185       47941\n",
      "32434479    25247        7854\n",
      "32434480    25247       45309\n",
      "32434481    25247       21162\n",
      "32434483    25247       35211\n",
      "32434485    25247       11352\n",
      "32434486    25247        4600\n",
      "\n",
      "[13307953 rows x 2 columns]\n",
      "save\n",
      "(13307953, 2)\n",
      "Index([u'user_id', u'product_id'], dtype='object')\n"
     ]
    },
    {
     "ename": "IOError",
     "evalue": "[Errno 2] No such file or directory: 'data/previous_products.pkl'",
     "output_type": "error",
     "traceback": [
      "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
      "\u001b[1;31mIOError\u001b[0m                                   Traceback (most recent call last)",
      "\u001b[1;32m<ipython-input-1-5da10bc441c1>\u001b[0m in \u001b[0;36m<module>\u001b[1;34m()\u001b[0m\n\u001b[0;32m     33\u001b[0m     \u001b[1;32mprint\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mlabels\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mshape\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m     34\u001b[0m     \u001b[1;32mprint\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mlabels\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mcolumns\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m---> 35\u001b[1;33m     \u001b[0mlabels\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mto_pickle\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;34m'data/previous_products.pkl'\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m",
      "\u001b[1;32md:\\ProgramData\\Miniconda2\\envs\\jdata\\lib\\site-packages\\pandas\\core\\generic.pyc\u001b[0m in \u001b[0;36mto_pickle\u001b[1;34m(self, path)\u001b[0m\n\u001b[0;32m   1211\u001b[0m         \"\"\"\n\u001b[0;32m   1212\u001b[0m         \u001b[1;32mfrom\u001b[0m \u001b[0mpandas\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mio\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mpickle\u001b[0m \u001b[1;32mimport\u001b[0m \u001b[0mto_pickle\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m-> 1213\u001b[1;33m         \u001b[1;32mreturn\u001b[0m \u001b[0mto_pickle\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mpath\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m   1214\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m   1215\u001b[0m     \u001b[1;32mdef\u001b[0m \u001b[0mto_clipboard\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mexcel\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0mNone\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0msep\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0mNone\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;33m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
      "\u001b[1;32md:\\ProgramData\\Miniconda2\\envs\\jdata\\lib\\site-packages\\pandas\\io\\pickle.pyc\u001b[0m in \u001b[0;36mto_pickle\u001b[1;34m(obj, path)\u001b[0m\n\u001b[0;32m     17\u001b[0m         \u001b[0mFile\u001b[0m \u001b[0mpath\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m     18\u001b[0m     \"\"\"\n\u001b[1;32m---> 19\u001b[1;33m     \u001b[1;32mwith\u001b[0m \u001b[0mopen\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mpath\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;34m'wb'\u001b[0m\u001b[1;33m)\u001b[0m \u001b[1;32mas\u001b[0m \u001b[0mf\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m     20\u001b[0m         \u001b[0mpkl\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mdump\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mobj\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mf\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mprotocol\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0mpkl\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mHIGHEST_PROTOCOL\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m     21\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n",
      "\u001b[1;31mIOError\u001b[0m: [Errno 2] No such file or directory: 'data/previous_products.pkl'"
     ]
    }
   ],
   "source": [
    "# python3 create_products.py\n",
    "\n",
    "\n",
    "\n",
    "if __name__ == '__main__':\n",
    "    path = \"../input/\"\n",
    "\n",
    "    order_prior = pd.read_csv(os.path.join(path, \"order_products__prior.csv\"), dtype={'order_id': np.uint32,\n",
    "                                                                                      'product_id': np.uint16,\n",
    "                                                                                      'add_to_cart_order':np.uint8,\n",
    "                                                                                      'reordered': bool})\n",
    "    orders = pd.read_csv(os.path.join(path, \"orders.csv\"), dtype={'order_id':np.uint32,\n",
    "                                                                  'user_id': np.uint32,\n",
    "                                                                  'eval_set': 'category',\n",
    "                                                                  'order_number':np.uint8,\n",
    "                                                                  'order_dow': np.uint8,\n",
    "                                                                  'order_hour_of_day': np.uint8\n",
    "                                                                  })\n",
    "\n",
    "    print('loaded')\n",
    "\n",
    "    orders = orders.loc[orders.eval_set == 'prior', :]\n",
    "    orders_user = orders[['order_id', 'user_id']]\n",
    "    labels = pd.merge(order_prior, orders_user, on='order_id')\n",
    "    labels = labels.loc[:, ['user_id', 'product_id']].drop_duplicates()\n",
    "\n",
    "    print(labels)\n",
    "\n",
    "    print('save')\n",
    "    print(labels.shape)\n",
    "    print(labels.columns)\n",
    "    labels.to_pickle('./data/previous_products.pkl')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "data is loaded\n",
      "Index([u'order_id', u'product_id', u'reordered', u'eval_set'], dtype='object')\n",
      "(13307953, 4)\n"
     ]
    }
   ],
   "source": [
    "# python3 split_data_set.py\n",
    "\n",
    "import gc\n",
    "import pandas as pd\n",
    "import numpy as np\n",
    "import os\n",
    "\n",
    "if __name__ == '__main__':\n",
    "    path = \"../input/\"\n",
    "    folds = 1\n",
    "\n",
    "    aisles = pd.read_csv(os.path.join(path, \"aisles.csv\"), dtype={'aisle_id': np.uint8, 'aisle':'category'})\n",
    "    departments = pd.read_csv(os.path.join(path, \"departments.csv\"), dtype={'department_id':np.uint8, 'department': 'category'})\n",
    "    order_prior = pd.read_csv(os.path.join(path, \"order_products__prior.csv\"), dtype={'order_id': np.uint32,\n",
    "                                                                                      'product_id': np.uint16,\n",
    "                                                                                      'add_to_cart_order':np.uint8,\n",
    "                                                                                      'reordered': bool})\n",
    "    order_train = pd.read_csv(os.path.join(path, \"order_products__train.csv\"), dtype={'order_id': np.uint32,\n",
    "                                                                                      'product_id': np.uint16,\n",
    "                                                                                      'add_to_cart_order':np.uint8,\n",
    "                                                                                      'reordered': bool})\n",
    "    orders = pd.read_csv(os.path.join(path, \"orders.csv\"), dtype={'order_id':np.uint32,\n",
    "                                                                  'user_id': np.uint32,\n",
    "                                                                  'eval_set': 'category',\n",
    "                                                                  'order_number':np.uint8,\n",
    "                                                                  'order_dow': np.uint8,\n",
    "                                                                  'order_hour_of_day': np.uint8\n",
    "                                                                  })\n",
    "\n",
    "    products = pd.read_csv(os.path.join(path, \"products.csv\"), dtype={'product_id': np.uint16,\n",
    "                                                                      'aisle_id': np.uint8,\n",
    "                                                                      'department_id': np.uint8})\n",
    "\n",
    "    labels = pd.read_pickle('data/previous_products.pkl')\n",
    "    orders = orders.loc[(orders.eval_set == 'train') | (orders.eval_set == 'test'), :]\n",
    "    labels = pd.merge(labels, orders[['order_id', 'user_id', 'eval_set']], on='user_id').drop(['user_id'], axis=1)\n",
    "\n",
    "    order_train.drop(['add_to_cart_order'], axis=1, inplace=True)\n",
    "\n",
    "    print('data is loaded')\n",
    "\n",
    "    orders = np.unique(labels.order_id)\n",
    "\n",
    "    size = orders.shape[0] // folds\n",
    "\n",
    "    for fold in range(folds):\n",
    "\n",
    "        current = orders[fold * size:(fold + 1) * size]\n",
    "\n",
    "        current = labels.loc[np.in1d(labels.order_id, current), :]\n",
    "\n",
    "        current = pd.merge(order_train, current, on=['order_id', 'product_id'], how='right')\n",
    "        current.reordered.fillna(False, inplace=True)\n",
    "        print(current.columns)\n",
    "        print(current.shape)\n",
    "\n",
    "        current.to_pickle('data/chunk_{}.pkl'.format(fold))\n",
    "\n",
    "\n",
    "\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "13307953"
      ]
     },
     "execution_count": 18,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "text/plain": [
       "206209"
      ]
     },
     "execution_count": 18,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "d:\\ProgramData\\Miniconda2\\envs\\jdata\\lib\\site-packages\\ipykernel_launcher.py:3: FutureWarning: sort(columns=....) is deprecated, use sort_values(by=.....)\n",
      "  This is separate from the ipykernel package so we can avoid doing imports until\n"
     ]
    },
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>product_id</th>\n",
       "      <th>order_id</th>\n",
       "      <th>eval_set</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>8202483</th>\n",
       "      <td>2067</td>\n",
       "      <td>1</td>\n",
       "      <td>train</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>8202472</th>\n",
       "      <td>14947</td>\n",
       "      <td>1</td>\n",
       "      <td>train</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>8202473</th>\n",
       "      <td>5707</td>\n",
       "      <td>1</td>\n",
       "      <td>train</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>8202474</th>\n",
       "      <td>44632</td>\n",
       "      <td>1</td>\n",
       "      <td>train</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>8202475</th>\n",
       "      <td>30881</td>\n",
       "      <td>1</td>\n",
       "      <td>train</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>8202476</th>\n",
       "      <td>43633</td>\n",
       "      <td>1</td>\n",
       "      <td>train</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>8202477</th>\n",
       "      <td>42001</td>\n",
       "      <td>1</td>\n",
       "      <td>train</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>8202478</th>\n",
       "      <td>49302</td>\n",
       "      <td>1</td>\n",
       "      <td>train</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>8202479</th>\n",
       "      <td>22035</td>\n",
       "      <td>1</td>\n",
       "      <td>train</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>8202480</th>\n",
       "      <td>11109</td>\n",
       "      <td>1</td>\n",
       "      <td>train</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>8202481</th>\n",
       "      <td>44359</td>\n",
       "      <td>1</td>\n",
       "      <td>train</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>8202482</th>\n",
       "      <td>24852</td>\n",
       "      <td>1</td>\n",
       "      <td>train</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>9018241</th>\n",
       "      <td>6291</td>\n",
       "      <td>17</td>\n",
       "      <td>test</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>9018223</th>\n",
       "      <td>38777</td>\n",
       "      <td>17</td>\n",
       "      <td>test</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>9018222</th>\n",
       "      <td>13107</td>\n",
       "      <td>17</td>\n",
       "      <td>test</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>9018240</th>\n",
       "      <td>40002</td>\n",
       "      <td>17</td>\n",
       "      <td>test</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>9018224</th>\n",
       "      <td>48896</td>\n",
       "      <td>17</td>\n",
       "      <td>test</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>9018225</th>\n",
       "      <td>1283</td>\n",
       "      <td>17</td>\n",
       "      <td>test</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>9018221</th>\n",
       "      <td>21709</td>\n",
       "      <td>17</td>\n",
       "      <td>test</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>9018238</th>\n",
       "      <td>18288</td>\n",
       "      <td>17</td>\n",
       "      <td>test</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>9018239</th>\n",
       "      <td>16965</td>\n",
       "      <td>17</td>\n",
       "      <td>test</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>9018229</th>\n",
       "      <td>31964</td>\n",
       "      <td>17</td>\n",
       "      <td>test</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>9018236</th>\n",
       "      <td>15613</td>\n",
       "      <td>17</td>\n",
       "      <td>test</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>9018235</th>\n",
       "      <td>21463</td>\n",
       "      <td>17</td>\n",
       "      <td>test</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>9018234</th>\n",
       "      <td>39275</td>\n",
       "      <td>17</td>\n",
       "      <td>test</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>9018233</th>\n",
       "      <td>26429</td>\n",
       "      <td>17</td>\n",
       "      <td>test</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>9018232</th>\n",
       "      <td>11494</td>\n",
       "      <td>17</td>\n",
       "      <td>test</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>9018231</th>\n",
       "      <td>7035</td>\n",
       "      <td>17</td>\n",
       "      <td>test</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>9018230</th>\n",
       "      <td>21903</td>\n",
       "      <td>17</td>\n",
       "      <td>test</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>9018237</th>\n",
       "      <td>44056</td>\n",
       "      <td>17</td>\n",
       "      <td>test</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "         product_id  order_id eval_set\n",
       "8202483        2067         1    train\n",
       "8202472       14947         1    train\n",
       "8202473        5707         1    train\n",
       "8202474       44632         1    train\n",
       "8202475       30881         1    train\n",
       "8202476       43633         1    train\n",
       "8202477       42001         1    train\n",
       "8202478       49302         1    train\n",
       "8202479       22035         1    train\n",
       "8202480       11109         1    train\n",
       "8202481       44359         1    train\n",
       "8202482       24852         1    train\n",
       "9018241        6291        17     test\n",
       "9018223       38777        17     test\n",
       "9018222       13107        17     test\n",
       "9018240       40002        17     test\n",
       "9018224       48896        17     test\n",
       "9018225        1283        17     test\n",
       "9018221       21709        17     test\n",
       "9018238       18288        17     test\n",
       "9018239       16965        17     test\n",
       "9018229       31964        17     test\n",
       "9018236       15613        17     test\n",
       "9018235       21463        17     test\n",
       "9018234       39275        17     test\n",
       "9018233       26429        17     test\n",
       "9018232       11494        17     test\n",
       "9018231        7035        17     test\n",
       "9018230       21903        17     test\n",
       "9018237       44056        17     test"
      ]
     },
     "execution_count": 18,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>product_id</th>\n",
       "      <th>order_id</th>\n",
       "      <th>eval_set</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>33120</td>\n",
       "      <td>1050357</td>\n",
       "      <td>train</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>28985</td>\n",
       "      <td>1050357</td>\n",
       "      <td>train</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>9327</td>\n",
       "      <td>1050357</td>\n",
       "      <td>train</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>45918</td>\n",
       "      <td>1050357</td>\n",
       "      <td>train</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>30035</td>\n",
       "      <td>1050357</td>\n",
       "      <td>train</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "   product_id  order_id eval_set\n",
       "0       33120   1050357    train\n",
       "1       28985   1050357    train\n",
       "2        9327   1050357    train\n",
       "3       45918   1050357    train\n",
       "4       30035   1050357    train"
      ]
     },
     "execution_count": 18,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "text/plain": [
       "array([      1,      17,      34, ..., 3421058, 3421063, 3421070], dtype=uint32)"
      ]
     },
     "execution_count": 18,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>order_id</th>\n",
       "      <th>product_id</th>\n",
       "      <th>reordered</th>\n",
       "      <th>eval_set</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>1</td>\n",
       "      <td>49302</td>\n",
       "      <td>True</td>\n",
       "      <td>train</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>1</td>\n",
       "      <td>11109</td>\n",
       "      <td>True</td>\n",
       "      <td>train</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>1</td>\n",
       "      <td>43633</td>\n",
       "      <td>True</td>\n",
       "      <td>train</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>1</td>\n",
       "      <td>22035</td>\n",
       "      <td>True</td>\n",
       "      <td>train</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>36</td>\n",
       "      <td>19660</td>\n",
       "      <td>True</td>\n",
       "      <td>train</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "   order_id  product_id reordered eval_set\n",
       "0         1       49302      True    train\n",
       "1         1       11109      True    train\n",
       "2         1       43633      True    train\n",
       "3         1       22035      True    train\n",
       "4        36       19660      True    train"
      ]
     },
     "execution_count": 18,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "text/plain": [
       "13307953"
      ]
     },
     "execution_count": 18,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "len(labels)\n",
    "len(orders)\n",
    "labels.sort('order_id').head(30)\n",
    "\n",
    "labels.head()\n",
    "orders\n",
    "\n",
    "current.head()\n",
    "\n",
    "len(current)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Index([u'order_id', u'product_id', u'eval_set', u'user_id', u'order_number',\n",
      "       u'days_since_prior_order_comsum'],\n",
      "      dtype='object')\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "d:\\ProgramData\\Miniconda2\\envs\\jdata\\lib\\site-packages\\numpy\\core\\fromnumeric.py:2889: RuntimeWarning: Mean of empty slice.\n",
      "  out=out, **kwargs)\n"
     ]
    }
   ],
   "source": [
    "#  python3 orders_comsum.py\n",
    "\n",
    "import gc\n",
    "import pandas as pd\n",
    "import numpy as np\n",
    "import os\n",
    "import json\n",
    "import sklearn.metrics\n",
    "from sklearn.metrics import f1_score\n",
    "from sklearn.model_selection import train_test_split\n",
    "from scipy.sparse import dok_matrix, coo_matrix\n",
    "from sklearn.utils.multiclass import  type_of_target\n",
    "\n",
    "\n",
    "if __name__ == '__main__':\n",
    "    data_path = \"data\"\n",
    "    path = \"../input/\"\n",
    "\n",
    "    aisles = pd.read_csv(os.path.join(path, \"aisles.csv\"), dtype={'aisle_id': np.uint8, 'aisle': 'category'})\n",
    "    departments = pd.read_csv(os.path.join(path, \"departments.csv\"),\n",
    "                              dtype={'department_id': np.uint8, 'department': 'category'})\n",
    "    order_prior = pd.read_csv(os.path.join(path, \"order_products__prior.csv\"), dtype={'order_id': np.uint32,\n",
    "                                                                                      'product_id': np.uint16,\n",
    "                                                                                      'add_to_cart_order': np.uint8,\n",
    "                                                                                      'reordered': bool})\n",
    "    order_train = pd.read_csv(os.path.join(path, \"order_products__train.csv\"), dtype={'order_id': np.uint32,\n",
    "                                                                                      'product_id': np.uint16,\n",
    "                                                                                      'add_to_cart_order': np.uint8,\n",
    "                                                                                      'reordered': bool})\n",
    "    orders = pd.read_csv(os.path.join(path, \"orders.csv\"), dtype={'order_id': np.uint32,\n",
    "                                                                  'user_id': np.uint32,\n",
    "                                                                  'eval_set': 'category',\n",
    "                                                                  'order_number': np.uint8,\n",
    "                                                                  'order_dow': np.uint8,\n",
    "                                                                  'order_hour_of_day': np.uint8\n",
    "                                                                  })\n",
    "\n",
    "    products = pd.read_csv(os.path.join(path, \"products.csv\"), dtype={'product_id': np.uint16,\n",
    "                                                                      'aisle_id': np.uint8,\n",
    "                                                                      'department_id': np.uint8})\n",
    "\n",
    "    labels = pd.read_pickle(os.path.join(data_path, 'chunk_0.pkl'))\n",
    "    user_product = pd.read_pickle(os.path.join(data_path, 'previous_products.pkl'))\n",
    "\n",
    "    order_comsum = orders[['user_id', 'order_number', 'days_since_prior_order']].groupby(['user_id', 'order_number'])\\\n",
    "            ['days_since_prior_order'].sum().groupby(level=[0]).cumsum().reset_index().rename(columns={'days_since_prior_order':'days_since_prior_order_comsum'})\n",
    "\n",
    "    # order_comsum['days_since_prior_order_comsum'].fillna(0, inplace=True)\n",
    "    order_comsum.to_pickle('data/orders_comsum.pkl')\n",
    "\n",
    "    order_comsum = pd.merge(order_comsum, orders, on=['user_id', 'order_number'])[['user_id', 'order_number', 'days_since_prior_order_comsum', 'order_id']]\n",
    "\n",
    "    order_product = pd.merge(order_prior, orders, on='order_id')[['order_id', 'product_id', 'eval_set']]\n",
    "    order_product_train_test = labels[['order_id', 'product_id', 'eval_set']]\n",
    "\n",
    "    order_product = pd.concat([order_product, order_product_train_test])\n",
    "\n",
    "    order_product = pd.merge(order_product, order_comsum, on='order_id')\n",
    "\n",
    "    print(order_product.columns)\n",
    "\n",
    "    order_product = pd.merge(order_product, user_product, on=['user_id', 'product_id'])\n",
    "\n",
    "    temp = order_product.groupby(['user_id', 'product_id', 'order_number'])['days_since_prior_order_comsum'].sum().groupby(level=[0, 1]).apply(lambda x: np.diff(np.nan_to_num(x)))\n",
    "    temp = temp.to_frame('periods').reset_index()\n",
    "\n",
    "    temp.to_pickle('data/product_period.pkl')\n",
    "\n",
    "    aggregated = temp.copy()\n",
    "    aggregated['last'] = aggregated.periods.apply(lambda x: x[-1])\n",
    "    aggregated['prev1'] = aggregated.periods.apply(lambda x: x[-2] if len(x) > 1 else np.nan)\n",
    "    aggregated['prev2'] = aggregated.periods.apply(lambda x: x[-3] if len(x) > 2 else np.nan)\n",
    "    aggregated['median'] = aggregated.periods.apply(lambda x: np.median(x[:-1]))\n",
    "    aggregated['mean'] = aggregated.periods.apply(lambda x: np.mean(x[:-1]))\n",
    "    aggregated.drop('periods', axis=1, inplace=True)\n",
    "\n",
    "    aggregated.to_pickle('data/product_periods_stat.pkl')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 22,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Index([u'product_id', u'reordered'], dtype='object')\n",
      "Index([u'user_id', u'department_id', u'dep_products', u'dep_reordered'], dtype='object')\n",
      "Index([u'product_id', u'reordered'], dtype='object')\n"
     ]
    }
   ],
   "source": [
    "# python3 user_product_rank.py\n",
    "\n",
    "import gc\n",
    "import pandas as pd\n",
    "import numpy as np\n",
    "import os\n",
    "import json\n",
    "import sklearn.metrics\n",
    "from sklearn.metrics import f1_score\n",
    "from sklearn.model_selection import train_test_split\n",
    "from scipy.sparse import dok_matrix, coo_matrix\n",
    "from sklearn.utils.multiclass import  type_of_target\n",
    "\n",
    "\n",
    "if __name__ == '__main__':\n",
    "    data_path = \"data\"\n",
    "    path = \"../input/\"\n",
    "\n",
    "    aisles = pd.read_csv(os.path.join(path, \"aisles.csv\"), dtype={'aisle_id': np.uint8, 'aisle': 'category'})\n",
    "    departments = pd.read_csv(os.path.join(path, \"departments.csv\"),\n",
    "                              dtype={'department_id': np.uint8, 'department': 'category'})\n",
    "    order_prior = pd.read_csv(os.path.join(path, \"order_products__prior.csv\"), dtype={'order_id': np.uint32,\n",
    "                                                                                      'product_id': np.uint16,\n",
    "                                                                                      'add_to_cart_order': np.uint8,\n",
    "                                                                                      'reordered': bool})\n",
    "    order_train = pd.read_csv(os.path.join(path, \"order_products__train.csv\"), dtype={'order_id': np.uint32,\n",
    "                                                                                      'product_id': np.uint16,\n",
    "                                                                                      'add_to_cart_order': np.uint8,\n",
    "                                                                                      'reordered': bool})\n",
    "    orders = pd.read_csv(os.path.join(path, \"orders.csv\"), dtype={'order_id': np.uint32,\n",
    "                                                                  'user_id': np.uint32,\n",
    "                                                                  'eval_set': 'category',\n",
    "                                                                  'order_number': np.uint8,\n",
    "                                                                  'order_dow': np.uint8,\n",
    "                                                                  'order_hour_of_day': np.uint8\n",
    "                                                                  })\n",
    "\n",
    "    products = pd.read_csv(os.path.join(path, \"products.csv\"), dtype={'product_id': np.uint16,\n",
    "                                                                      'aisle_id': np.uint8,\n",
    "                                                                      'department_id': np.uint8})\n",
    "\n",
    "    order_train = pd.read_pickle(os.path.join(data_path, 'chunk_0.pkl'))\n",
    "\n",
    "    orders_products = pd.merge(orders, order_prior, on=\"order_id\")\n",
    "\n",
    "    orders_products_products = pd.merge(orders_products, products[['product_id', 'department_id', 'aisle_id']],\n",
    "                                        on='product_id')\n",
    "\n",
    "    user_dep_stat = orders_products_products.groupby(['user_id', 'department_id']).agg(\n",
    "        {'product_id': lambda x: x.nunique(),\n",
    "         'reordered': 'sum'\n",
    "         })\n",
    "    print(user_dep_stat.columns)\n",
    "    user_dep_stat.rename(columns={'product_id': 'dep_products',\n",
    "                                  'reordered': 'dep_reordered'}, inplace=True)\n",
    "    user_dep_stat.reset_index(inplace=True)\n",
    "    print(user_dep_stat.columns)\n",
    "    user_dep_stat.to_pickle('data/user_department_products.pkl')\n",
    "\n",
    "    user_aisle_stat = orders_products_products.groupby(['user_id', 'aisle_id']).agg(\n",
    "        {'product_id': lambda x: x.nunique(),\n",
    "         'reordered': 'sum'\n",
    "         })\n",
    "    print(user_aisle_stat.columns)\n",
    "    user_aisle_stat.rename(columns={'product_id': 'aisle_products',\n",
    "                                    'reordered': 'aisle_reordered'}, inplace=True)\n",
    "    user_aisle_stat.reset_index(inplace=True)\n",
    "    user_aisle_stat.to_pickle('data/user_aisle_products.pkl')\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 23,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "# python3 create_prod2vec_dataset.py\n",
    "\n",
    "import pandas as pd\n",
    "import numpy as np\n",
    "import os\n",
    "\n",
    "def create_list(df):\n",
    "    add_to_cart_order = df.add_to_cart_order.values\n",
    "    values = df.product_id.values\n",
    "    index = np.argsort(add_to_cart_order)\n",
    "    values = values[index].tolist()\n",
    "    return values\n",
    "\n",
    "\n",
    "\n",
    "if __name__ == '__main__':\n",
    "    data_path = \"data\"\n",
    "    path = \"../input/\"\n",
    "    \n",
    "    order_prior = pd.read_csv(os.path.join(path, \"order_products__prior.csv\"), dtype={'order_id': np.uint32,\n",
    "                                                                                      'product_id': np.uint16,\n",
    "                                                                                      'add_to_cart_order': np.uint8})\n",
    "    orders = pd.read_csv(os.path.join(path, \"orders.csv\"), dtype={'order_id': np.uint32,\n",
    "                                                                  'order_dow': np.uint8,\n",
    "                                                                  'order_hour_of_day': np.uint8\n",
    "                                                                  })\n",
    "\n",
    "    data = pd.merge(order_prior, orders, on='order_id')\n",
    "\n",
    "    data = order_prior.sort_values(['order_id']).groupby('order_id')['product_id']\\\n",
    "        .apply(lambda x: x.tolist()).to_frame('products').reset_index()\n",
    "    data = pd.merge(data, orders, on='order_id')\n",
    "    data.to_pickle(os.path.join(data_path, 'prod2vec.pkl'))\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "initial size 3214874\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "D:\\my\\ml\\kaggle\\Instacart\\imba\\Product2VecSkipGram.py:114: RuntimeWarning: lenght is one\n",
      "  warnings.warn(\"lenght is one\", RuntimeWarning)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Average loss at step  0 :  10.961151123\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "D:\\my\\ml\\kaggle\\Instacart\\imba\\Product2VecSkipGram.py:147: RuntimeWarning: lenght is one\n",
      "  warnings.warn(\"lenght is one\", RuntimeWarning)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "CV 10.940780795\n",
      "Average loss at step  2000 :  10.2638508077\n",
      "Average loss at step  4000 :  9.63205869198\n",
      "Average loss at step  6000 :  9.3425455246\n",
      "Average loss at step  8000 :  9.16802490139\n",
      "Average loss at step  10000 :  9.0450267086\n",
      "Average loss at step  12000 :  8.95447325897\n",
      "Average loss at step  14000 :  8.88511443663\n",
      "Average loss at step  16000 :  8.82962014055\n",
      "Average loss at step  18000 :  8.78484072685\n",
      "Average loss at step  20000 :  8.74855525398\n",
      "CV 8.74543614129\n",
      "Average loss at step  22000 :  8.71661719179\n",
      "Average loss at step  24000 :  8.68500909138\n",
      "Average loss at step  26000 :  8.66311082983\n",
      "Average loss at step  28000 :  8.64272364378\n",
      "Average loss at step  30000 :  8.62065003967\n",
      "Average loss at step  32000 :  8.60266777134\n"
     ]
    },
    {
     "ename": "KeyboardInterrupt",
     "evalue": "",
     "output_type": "error",
     "traceback": [
      "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
      "\u001b[1;31mKeyboardInterrupt\u001b[0m                         Traceback (most recent call last)",
      "\u001b[1;32m<ipython-input-4-736e5fb5e855>\u001b[0m in \u001b[0;36m<module>\u001b[1;34m()\u001b[0m\n\u001b[0;32m     23\u001b[0m              500000: 0.1}\n\u001b[0;32m     24\u001b[0m     \u001b[0mmodel\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mProduct2VecSkipGram\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mdf_train\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mdf_cv\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mbatch_size\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;36m1\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;36m1\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mnp\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mmax\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mproducts\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mproduct_id\u001b[0m\u001b[1;33m)\u001b[0m \u001b[1;33m+\u001b[0m \u001b[1;36m1\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m---> 25\u001b[1;33m     \u001b[0mmodel\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mtrain\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;36m120001\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;36m20000\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mlen\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mdf_cv\u001b[0m\u001b[1;33m)\u001b[0m \u001b[1;33m//\u001b[0m \u001b[0mbatch_size\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mrates\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m",
      "\u001b[1;32mD:\\my\\ml\\kaggle\\Instacart\\imba\\Product2VecSkipGram.py\u001b[0m in \u001b[0;36mtrain\u001b[1;34m(self, num_steps, cv_every_n_steps, cv_steps, lrs)\u001b[0m\n\u001b[0;32m     48\u001b[0m                              self.learning_rate: learning_rate}\n\u001b[0;32m     49\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m---> 50\u001b[1;33m                 \u001b[0m_\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mloss_val\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0msess\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mrun\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m[\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0moptimizer\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mloss\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mfeed_dict\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0mfeed_dict\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m     51\u001b[0m                 \u001b[0maverage_loss\u001b[0m \u001b[1;33m+=\u001b[0m \u001b[0mloss_val\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m     52\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n",
      "\u001b[1;32md:\\programdata\\miniconda2\\envs\\py35\\lib\\site-packages\\tensorflow\\python\\client\\session.py\u001b[0m in \u001b[0;36mrun\u001b[1;34m(self, fetches, feed_dict, options, run_metadata)\u001b[0m\n\u001b[0;32m    887\u001b[0m     \u001b[1;32mtry\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m    888\u001b[0m       result = self._run(None, fetches, feed_dict, options_ptr,\n\u001b[1;32m--> 889\u001b[1;33m                          run_metadata_ptr)\n\u001b[0m\u001b[0;32m    890\u001b[0m       \u001b[1;32mif\u001b[0m \u001b[0mrun_metadata\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m    891\u001b[0m         \u001b[0mproto_data\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mtf_session\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mTF_GetBuffer\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mrun_metadata_ptr\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
      "\u001b[1;32md:\\programdata\\miniconda2\\envs\\py35\\lib\\site-packages\\tensorflow\\python\\client\\session.py\u001b[0m in \u001b[0;36m_run\u001b[1;34m(self, handle, fetches, feed_dict, options, run_metadata)\u001b[0m\n\u001b[0;32m   1116\u001b[0m     \u001b[1;32mif\u001b[0m \u001b[0mfinal_fetches\u001b[0m \u001b[1;32mor\u001b[0m \u001b[0mfinal_targets\u001b[0m \u001b[1;32mor\u001b[0m \u001b[1;33m(\u001b[0m\u001b[0mhandle\u001b[0m \u001b[1;32mand\u001b[0m \u001b[0mfeed_dict_tensor\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m   1117\u001b[0m       results = self._do_run(handle, final_targets, final_fetches,\n\u001b[1;32m-> 1118\u001b[1;33m                              feed_dict_tensor, options, run_metadata)\n\u001b[0m\u001b[0;32m   1119\u001b[0m     \u001b[1;32melse\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m   1120\u001b[0m       \u001b[0mresults\u001b[0m \u001b[1;33m=\u001b[0m \u001b[1;33m[\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
      "\u001b[1;32md:\\programdata\\miniconda2\\envs\\py35\\lib\\site-packages\\tensorflow\\python\\client\\session.py\u001b[0m in \u001b[0;36m_do_run\u001b[1;34m(self, handle, target_list, fetch_list, feed_dict, options, run_metadata)\u001b[0m\n\u001b[0;32m   1313\u001b[0m     \u001b[1;32mif\u001b[0m \u001b[0mhandle\u001b[0m \u001b[1;32mis\u001b[0m \u001b[1;32mNone\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m   1314\u001b[0m       return self._do_call(_run_fn, self._session, feeds, fetches, targets,\n\u001b[1;32m-> 1315\u001b[1;33m                            options, run_metadata)\n\u001b[0m\u001b[0;32m   1316\u001b[0m     \u001b[1;32melse\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m   1317\u001b[0m       \u001b[1;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_do_call\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0m_prun_fn\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_session\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mhandle\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mfeeds\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mfetches\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
      "\u001b[1;32md:\\programdata\\miniconda2\\envs\\py35\\lib\\site-packages\\tensorflow\\python\\client\\session.py\u001b[0m in \u001b[0;36m_do_call\u001b[1;34m(self, fn, *args)\u001b[0m\n\u001b[0;32m   1319\u001b[0m   \u001b[1;32mdef\u001b[0m \u001b[0m_do_call\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mfn\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;33m*\u001b[0m\u001b[0margs\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m   1320\u001b[0m     \u001b[1;32mtry\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m-> 1321\u001b[1;33m       \u001b[1;32mreturn\u001b[0m \u001b[0mfn\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m*\u001b[0m\u001b[0margs\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m   1322\u001b[0m     \u001b[1;32mexcept\u001b[0m \u001b[0merrors\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mOpError\u001b[0m \u001b[1;32mas\u001b[0m \u001b[0me\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m   1323\u001b[0m       \u001b[0mmessage\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mcompat\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mas_text\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0me\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mmessage\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
      "\u001b[1;32md:\\programdata\\miniconda2\\envs\\py35\\lib\\site-packages\\tensorflow\\python\\client\\session.py\u001b[0m in \u001b[0;36m_run_fn\u001b[1;34m(session, feed_dict, fetch_list, target_list, options, run_metadata)\u001b[0m\n\u001b[0;32m   1298\u001b[0m           return tf_session.TF_Run(session, options,\n\u001b[0;32m   1299\u001b[0m                                    \u001b[0mfeed_dict\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mfetch_list\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mtarget_list\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m-> 1300\u001b[1;33m                                    status, run_metadata)\n\u001b[0m\u001b[0;32m   1301\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m   1302\u001b[0m     \u001b[1;32mdef\u001b[0m \u001b[0m_prun_fn\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0msession\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mhandle\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mfeed_dict\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mfetch_list\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
      "\u001b[1;31mKeyboardInterrupt\u001b[0m: "
     ]
    }
   ],
   "source": [
    "# python3 skip_gram_train.py\n",
    "\n",
    "%run Product2VecSkipGram.py\n",
    "\n",
    "from Product2VecSkipGram import Product2VecSkipGram\n",
    "import pandas as pd\n",
    "import numpy as np\n",
    "from sklearn.model_selection import train_test_split\n",
    "\n",
    "if __name__ == '__main__':\n",
    "    data_path = \"data\"\n",
    "    path = \"../input/\"\n",
    "    \n",
    "    np.random.seed(2017)\n",
    "    products = pd.read_csv(os.path.join(path, 'products.csv'))\n",
    "    df = pd.read_pickle(os.path.join(data_path, 'prod2vec.pkl')).products\n",
    "    print('initial size', len(df))\n",
    "\n",
    "    df_train, df_cv = train_test_split(df, test_size=0.1, random_state=2017)\n",
    "    batch_size = 1024\n",
    "    rates = {100000: 0.5,\n",
    "             200000: 0.25,\n",
    "             500000: 0.1}\n",
    "    model = Product2VecSkipGram(df_train, df_cv, batch_size, 1, 1, np.max(products.product_id) + 1)\n",
    "    model.train(120001, 20000, len(df_cv) // batch_size, rates)\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "# python3 skip_gram_get.py\n",
    "\n",
    "from Product2VecSkipGram import Product2VecSkipGram\n",
    "import pandas as pd\n",
    "import numpy as np\n",
    "from sklearn.model_selection import train_test_split\n",
    "\n",
    "if __name__ == '__main__':\n",
    "    data_path = \"data\"\n",
    "    path = \"../input/\"\n",
    "  \n",
    "    np.random.seed(2017)\n",
    "    products = pd.read_csv(os.path.join(path, 'products.csv'))\n",
    "    df = pd.read_pickle(os.path.join(data_path, 'prod2vec.pkl')).products.tolist()\n",
    "    print('initial size', len(df))\n",
    "\n",
    "    df_train, df_cv = train_test_split(df, test_size = 0.1, random_state=2017)\n",
    "    batch_size = 128\n",
    "    rates = {100000: 0.5,\n",
    "             200000: 0.25,\n",
    "             500000: 0.1}\n",
    "    model = Product2VecSkipGram(df_train, df_cv, len(products), 1, 1, np.max(products.product_id) + 1)\n",
    "    model.load_model('models/prod2vec_skip_gram-120000')\n",
    "    embd = model.predict(products.product_id.values)\n",
    "    products = pd.concat([products, pd.DataFrame(embd)], axis=1)\n",
    "    products.to_pickle('data/product_embeddings.pkl')\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "# python3 lgbm_cv.py \n",
    "\n",
    "import gc\n",
    "from concurrent.futures import ThreadPoolExecutor\n",
    "\n",
    "import pandas as pd\n",
    "import numpy as np\n",
    "import os\n",
    "import arboretum\n",
    "import lightgbm as lgb\n",
    "import json\n",
    "import sklearn.metrics\n",
    "from sklearn.metrics import f1_score, roc_auc_score\n",
    "from sklearn.model_selection import train_test_split\n",
    "from scipy.sparse import dok_matrix, coo_matrix\n",
    "from sklearn.utils.multiclass import  type_of_target\n",
    "\n",
    "if __name__ == '__main__':\n",
    "    data_path = \"data\"\n",
    "    path = \"../input/\"\n",
    "  \n",
    "    aisles = pd.read_csv(os.path.join(path, \"aisles.csv\"), dtype={'aisle_id': np.uint8, 'aisle': 'category'})\n",
    "    departments = pd.read_csv(os.path.join(path, \"departments.csv\"),\n",
    "                              dtype={'department_id': np.uint8, 'department': 'category'})\n",
    "    order_prior = pd.read_csv(os.path.join(path, \"order_products__prior.csv\"), dtype={'order_id': np.uint32,\n",
    "                                                                                      'product_id': np.uint16,\n",
    "                                                                                      'add_to_cart_order': np.uint8,\n",
    "                                                                                      'reordered': bool})\n",
    "\n",
    "    order_train = pd.read_csv(os.path.join(path, \"order_products__train.csv\"), dtype={'order_id': np.uint32,\n",
    "                                                                                      'product_id': np.uint16,\n",
    "                                                                                      'add_to_cart_order': np.uint8,\n",
    "                                                                                      'reordered': bool})\n",
    "    orders = pd.read_csv(os.path.join(path, \"orders.csv\"), dtype={'order_id': np.uint32,\n",
    "                                                                  'user_id': np.uint32,\n",
    "                                                                  'eval_set': 'category',\n",
    "                                                                  'order_number': np.uint8,\n",
    "                                                                  'order_dow': np.uint8,\n",
    "                                                                  'order_hour_of_day': np.uint8\n",
    "                                                                  })\n",
    "\n",
    "    product_embeddings = pd.read_pickle('data/product_embeddings.pkl')\n",
    "    embedings = list(range(32))\n",
    "    product_embeddings = product_embeddings[embedings + ['product_id']]\n",
    "\n",
    "    order_prev = pd.merge(order_train, orders, on='order_id')\n",
    "    order_prev.order_number -= 1\n",
    "    order_prev = pd.merge(order_prev[\n",
    "                              ['user_id', 'order_number', 'product_id', 'reordered', 'add_to_cart_order', 'order_dow',\n",
    "                               'order_hour_of_day']], orders[['user_id', 'order_number', 'order_id']],\n",
    "                          on=['user_id', 'order_number'])\n",
    "\n",
    "    order_prev.drop(['order_number', 'user_id'], axis=1, inplace=True)\n",
    "\n",
    "    order_prev.rename(columns={\n",
    "        'reordered': 'reordered_prev',\n",
    "        'add_to_cart_order': 'add_to_cart_order_prev',\n",
    "        'order_dow': 'order_dow_prev',\n",
    "        'order_hour_of_day': 'order_hour_of_day_prev'\n",
    "    }, inplace=True)\n",
    "\n",
    "    products = pd.read_csv(os.path.join(path, \"products.csv\"), dtype={'product_id': np.uint16,\n",
    "                                                                      'aisle_id': np.uint8,\n",
    "                                                                      'department_id': np.uint8})\n",
    "\n",
    "    order_train = pd.read_pickle(os.path.join(data_path, 'chunk_0.pkl'))\n",
    "    order_train = order_train.loc[order_train.eval_set == \"train\", ['order_id',  'product_id',  'reordered']]\n",
    "\n",
    "    product_periods = pd.read_pickle(os.path.join(data_path, 'product_periods_stat.pkl')).fillna(9999)\n",
    "    # product_periods.prev1 = product_periods['last'] / product_periods.prev1\n",
    "    # product_periods.prev2 = product_periods['last'] / product_periods.prev2\n",
    "    # product_periods['mean'] = product_periods['last'] / product_periods['mean']\n",
    "    # product_periods['median'] = product_periods['last'] / product_periods['median']\n",
    "\n",
    "    print(order_train.columns)\n",
    "\n",
    "    ###########################\n",
    "\n",
    "    weights = order_train.groupby('order_id')['reordered'].sum().to_frame('weights')\n",
    "    weights.reset_index(inplace=True)\n",
    "\n",
    "\n",
    "    prob = pd.merge(order_prior, orders, on='order_id')\n",
    "    print(prob.columns)\n",
    "    prob = prob.groupby(['product_id', 'user_id'])\\\n",
    "        .agg({'reordered':'sum', 'user_id': 'size'})\n",
    "    print(prob.columns)\n",
    "\n",
    "    prob.rename(columns={'sum': 'reordered',\n",
    "                         'user_id': 'total'}, inplace=True)\n",
    "\n",
    "    prob.reordered = (prob.reordered > 0).astype(np.float32)\n",
    "    prob.total = (prob.total > 0).astype(np.float32)\n",
    "    prob['reorder_prob'] = prob.reordered / prob.total\n",
    "    prob = prob.groupby('product_id').agg({'reorder_prob': 'mean'}).rename(columns={'mean': 'reorder_prob'})\\\n",
    "        .reset_index()\n",
    "\n",
    "    prod_stat = order_prior.groupby('product_id').agg({'reordered': ['sum', 'size'],\n",
    "                                                       'add_to_cart_order':'mean'})\n",
    "    prod_stat.columns = prod_stat.columns.levels[1]\n",
    "    prod_stat.rename(columns={'sum':'prod_reorders',\n",
    "                              'size':'prod_orders',\n",
    "                              'mean': 'prod_add_to_card_mean'}, inplace=True)\n",
    "    prod_stat.reset_index(inplace=True)\n",
    "\n",
    "    prod_stat['reorder_ration'] = prod_stat['prod_reorders'] / prod_stat['prod_orders']\n",
    "\n",
    "    prod_stat = pd.merge(prod_stat, prob, on='product_id')\n",
    "\n",
    "    # prod_stat.drop(['prod_reorders'], axis=1, inplace=True)\n",
    "\n",
    "    user_stat = orders.loc[orders.eval_set == 'prior', :].groupby('user_id').agg({'order_number': 'max',\n",
    "                                                                                  'days_since_prior_order': ['sum',\n",
    "                                                                                                             'mean',\n",
    "                                                                                                             'median']})\n",
    "    user_stat.columns = user_stat.columns.droplevel(0)\n",
    "    user_stat.rename(columns={'max': 'user_orders',\n",
    "                              'sum': 'user_order_starts_at',\n",
    "                              'mean': 'user_mean_days_since_prior',\n",
    "                              'median': 'user_median_days_since_prior'}, inplace=True)\n",
    "    user_stat.reset_index(inplace=True)\n",
    "\n",
    "    orders_products = pd.merge(orders, order_prior, on=\"order_id\")\n",
    "\n",
    "    user_order_stat = orders_products.groupby('user_id').agg({'user_id': 'size',\n",
    "                                                              'reordered': 'sum',\n",
    "                                                              \"product_id\": lambda x: x.nunique()})\n",
    "\n",
    "    user_order_stat.rename(columns={'user_id': 'user_total_products',\n",
    "                                    'product_id': 'user_distinct_products',\n",
    "                                    'reordered': 'user_reorder_ratio'}, inplace=True)\n",
    "    user_order_stat.reset_index(inplace=True)\n",
    "    user_order_stat.user_reorder_ratio = user_order_stat.user_reorder_ratio / user_order_stat.user_total_products\n",
    "\n",
    "    user_stat = pd.merge(user_stat, user_order_stat, on='user_id')\n",
    "    user_stat['user_average_basket'] = user_stat.user_total_products / user_stat.user_orders\n",
    "\n",
    "    ########################### products\n",
    "\n",
    "    prod_usr = orders_products.groupby(['product_id']).agg({'user_id': lambda x: x.nunique()})\n",
    "    prod_usr.rename(columns={'user_id':'prod_users_unq'}, inplace=True)\n",
    "    prod_usr.reset_index(inplace=True)\n",
    "\n",
    "    prod_usr_reordered = orders_products.loc[orders_products.reordered, :].groupby(['product_id']).agg({'user_id': lambda x: x.nunique()})\n",
    "    prod_usr_reordered.rename(columns={'user_id': 'prod_users_unq_reordered'}, inplace=True)\n",
    "    prod_usr_reordered.reset_index(inplace=True)\n",
    "\n",
    "    order_stat = orders_products.groupby('order_id').agg({'order_id': 'size'})\\\n",
    "        .rename(columns = {'order_id': 'order_size'}).reset_index()\n",
    "\n",
    "    orders_products = pd.merge(orders_products, order_stat, on='order_id')\n",
    "    orders_products['add_to_cart_order_inverted'] = orders_products.order_size - orders_products.add_to_cart_order\n",
    "    orders_products['add_to_cart_order_relative'] = orders_products.add_to_cart_order / orders_products.order_size\n",
    "\n",
    "    data_dow = orders_products.groupby(['user_id', 'product_id', 'order_dow']).agg({\n",
    "                                                                   'reordered': ['sum', 'size']})\n",
    "    data_dow.columns = data_dow.columns.droplevel(0)\n",
    "    data_dow.columns = ['reordered_dow', 'reordered_dow_size']\n",
    "    data_dow['reordered_dow_ration'] = data_dow.reordered_dow / data_dow.reordered_dow_size\n",
    "    data_dow.reset_index(inplace=True)\n",
    "\n",
    "    data = orders_products.groupby(['user_id', 'product_id']).agg({'user_id': 'size',\n",
    "                                                                   'order_number': ['min', 'max'],\n",
    "                                                                   'add_to_cart_order': ['mean', 'median'],\n",
    "                                                                   'days_since_prior_order': ['mean', 'median'],\n",
    "                                                                   'order_dow': ['mean', 'median'],\n",
    "                                                                   'order_hour_of_day': ['mean', 'median'],\n",
    "                                                                   'add_to_cart_order_inverted': ['mean', 'median'],\n",
    "                                                                   'add_to_cart_order_relative': ['mean', 'median'],\n",
    "                                                                   'reordered':['sum']})\n",
    "\n",
    "    data.columns = data.columns.droplevel(0)\n",
    "    data.columns = ['up_orders', 'up_first_order', 'up_last_order', 'up_mean_cart_position', 'up_median_cart_position',\n",
    "                             'days_since_prior_order_mean', 'days_since_prior_order_median', 'order_dow_mean', 'order_dow_median',\n",
    "                             'order_hour_of_day_mean', 'order_hour_of_day_median',\n",
    "                    'add_to_cart_order_inverted_mean', 'add_to_cart_order_inverted_median',\n",
    "                    'add_to_cart_order_relative_mean', 'add_to_cart_order_relative_median',\n",
    "                    'reordered_sum'\n",
    "                    ]\n",
    "\n",
    "    data['user_product_reordered_ratio'] = (data.reordered_sum + 1.0) / data.up_orders\n",
    "\n",
    "    # data['first_order'] = data['up_orders'] > 0\n",
    "    # data['second_order'] = data['up_orders'] > 1\n",
    "    #\n",
    "    # data.groupby('product_id')['']\n",
    "\n",
    "    data.reset_index(inplace=True)\n",
    "\n",
    "    data = pd.merge(data, prod_stat, on='product_id')\n",
    "    data = pd.merge(data, user_stat, on='user_id')\n",
    "\n",
    "    data['up_order_rate'] = data.up_orders / data.user_orders\n",
    "    data['up_orders_since_last_order'] = data.user_orders - data.up_last_order\n",
    "    data['up_order_rate_since_first_order'] = data.user_orders / (data.user_orders - data.up_first_order + 1)\n",
    "\n",
    "    ############################\n",
    "\n",
    "    user_dep_stat = pd.read_pickle('data/user_department_products.pkl')\n",
    "    user_aisle_stat = pd.read_pickle('data/user_aisle_products.pkl')\n",
    "\n",
    "    order_train = pd.merge(order_train, products, on='product_id')\n",
    "    order_train = pd.merge(order_train, orders, on='order_id')\n",
    "    order_train = pd.merge(order_train, user_dep_stat, on=['user_id', 'department_id'])\n",
    "    order_train = pd.merge(order_train, user_aisle_stat, on=['user_id', 'aisle_id'])\n",
    "\n",
    "    order_train = pd.merge(order_train, prod_usr, on='product_id')\n",
    "    order_train = pd.merge(order_train, prod_usr_reordered, on='product_id', how='left')\n",
    "    order_train.prod_users_unq_reordered.fillna(0, inplace=True)\n",
    "\n",
    "    order_train = pd.merge(order_train, data, on=['product_id', 'user_id'])\n",
    "    order_train = pd.merge(order_train, data_dow, on=['product_id', 'user_id', 'order_dow'], how='left')\n",
    "\n",
    "    order_train['aisle_reordered_ratio'] = order_train.aisle_reordered / order_train.user_orders\n",
    "    order_train['dep_reordered_ratio'] = order_train.dep_reordered / order_train.user_orders\n",
    "\n",
    "    order_train = pd.merge(order_train, product_periods, on=['user_id',  'product_id'])\n",
    "    order_train = pd.merge(order_train, product_embeddings, on=['product_id'])\n",
    "    # order_train = pd.merge(order_train, weights, on='order_id')\n",
    "\n",
    "    # order_train = pd.merge(order_train, order_prev, on=['order_id', 'product_id'], how='left')\n",
    "    # order_train.reordered_prev = order_train.reordered_prev.astype(np.float32) + 1.\n",
    "    # order_train['reordered_prev'].fillna(0, inplace=True)\n",
    "    # order_train[['add_to_cart_order_prev', 'order_dow_prev', 'order_hour_of_day_prev']].fillna(255, inplace=True)\n",
    "\n",
    "    print('data is joined')\n",
    "\n",
    "    # order_train.days_since_prior_order_mean -= order_train.days_since_prior_order\n",
    "    # order_train.days_since_prior_order_median -= order_train.days_since_prior_order\n",
    "    #\n",
    "    # order_train.order_dow_mean -= order_train.order_dow\n",
    "    # order_train.order_dow_median -= order_train.order_dow\n",
    "    #\n",
    "    # order_train.order_hour_of_day_mean -= order_train.order_hour_of_day\n",
    "    # order_train.order_hour_of_day_median -= order_train.order_hour_of_day\n",
    "\n",
    "    unique_orders = np.unique(order_train.order_id)\n",
    "    orders_train, orders_test = train_test_split(unique_orders, test_size=0.25, random_state=2017)\n",
    "\n",
    "    order_test = order_train.loc[np.in1d(order_train.order_id, orders_test)]\n",
    "    order_train = order_train.loc[np.in1d(order_train.order_id, orders_train)]\n",
    "\n",
    "    features = [\n",
    "        # 'reordered_dow_ration', 'reordered_dow', 'reordered_dow_size',\n",
    "        # 'reordered_prev', 'add_to_cart_order_prev', 'order_dow_prev', 'order_hour_of_day_prev',\n",
    "        'user_product_reordered_ratio', 'reordered_sum',\n",
    "        'add_to_cart_order_inverted_mean', 'add_to_cart_order_relative_mean',\n",
    "        'reorder_prob',\n",
    "        'last', 'prev1', 'prev2', 'median', 'mean',\n",
    "        'dep_reordered_ratio', 'aisle_reordered_ratio',\n",
    "        'aisle_products',\n",
    "        'aisle_reordered',\n",
    "        'dep_products',\n",
    "        'dep_reordered',\n",
    "        'prod_users_unq', 'prod_users_unq_reordered',\n",
    "        'order_number', 'prod_add_to_card_mean',\n",
    "                'days_since_prior_order',\n",
    "        'order_dow', 'order_hour_of_day',\n",
    "                'reorder_ration',\n",
    "                        'user_orders', 'user_order_starts_at', 'user_mean_days_since_prior',\n",
    "        # 'user_median_days_since_prior',\n",
    "                        'user_average_basket', 'user_distinct_products', 'user_reorder_ratio', 'user_total_products',\n",
    "                        'prod_orders', 'prod_reorders',\n",
    "                        'up_order_rate', 'up_orders_since_last_order', 'up_order_rate_since_first_order',\n",
    "                        'up_orders', 'up_first_order', 'up_last_order', 'up_mean_cart_position',\n",
    "        # 'up_median_cart_position',\n",
    "                             'days_since_prior_order_mean',\n",
    "        # 'days_since_prior_order_median',\n",
    "        'order_dow_mean',\n",
    "        # 'order_dow_median',\n",
    "        #                      'order_hour_of_day_mean',\n",
    "        # 'order_hour_of_day_median'\n",
    "                ]\n",
    "    categories = ['product_id', 'aisle_id', 'department_id']\n",
    "    features.extend(embedings)\n",
    "    cat_features = ','.join(map(lambda x: str(x + len(features)), range(len(categories))))\n",
    "    features.extend(categories)\n",
    "\n",
    "\n",
    "    print('not included', set(order_train.columns.tolist()) - set(features))\n",
    "\n",
    "    data = order_train[features]\n",
    "    labels = order_train[['reordered']].values.astype(np.float32).flatten()\n",
    "\n",
    "    data_val = order_test[features]\n",
    "    labels_val = order_test[['reordered']].values.astype(np.float32).flatten()\n",
    "\n",
    "    lgb_train = lgb.Dataset(data, labels, categorical_feature=cat_features)\n",
    "    lgb_eval = lgb.Dataset(data_val, labels_val, reference=lgb_train, categorical_feature=cat_features)\n",
    "\n",
    "    # specify your configurations as a dict\n",
    "    params = {\n",
    "        'task': 'train',\n",
    "        'boosting_type': 'gbdt',\n",
    "        'objective': 'binary',\n",
    "        'metric': {'binary_logloss', 'auc'},\n",
    "        'num_leaves': 256,\n",
    "        'min_sum_hessian_in_leaf':20,\n",
    "        'max_depth': -12,\n",
    "        'learning_rate': 0.05,\n",
    "        'feature_fraction': 0.6,\n",
    "        # 'bagging_fraction': 0.9,\n",
    "        # 'bagging_freq': 3,\n",
    "        'verbose': 1\n",
    "    }\n",
    "\n",
    "    print('Start training...')\n",
    "    # train\n",
    "    gbm = lgb.train(params,\n",
    "                    lgb_train,\n",
    "                    num_boost_round=2000,\n",
    "                    valid_sets=lgb_eval,\n",
    "                    early_stopping_rounds=30)\n",
    "\n",
    "    print('Feature names:', gbm.feature_name())\n",
    "\n",
    "    print('Calculate feature importances...')\n",
    "    # feature importances\n",
    "    print('Feature importances:', list(gbm.feature_importance()))\n",
    "\n",
    "    df = pd.DataFrame({'feature':gbm.feature_name(), 'importances': gbm.feature_importance()})\n",
    "    print(df.sort_values('importances'))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "#  python3 lgbm_submition.py \n",
    "\n",
    "import gc\n",
    "import pandas as pd\n",
    "import numpy as np\n",
    "import os\n",
    "import arboretum\n",
    "import lightgbm as lgb\n",
    "import json\n",
    "import sklearn.metrics\n",
    "from sklearn.metrics import f1_score, roc_auc_score\n",
    "from sklearn.model_selection import train_test_split\n",
    "from scipy.sparse import dok_matrix, coo_matrix\n",
    "from sklearn.utils.multiclass import  type_of_target\n",
    "\n",
    "\n",
    "\n",
    "def fscore(true_value_matrix, prediction, order_index, product_index, rows, cols, threshold=[0.5]):\n",
    "\n",
    "    prediction_value_matrix = coo_matrix((prediction, (order_index, product_index)), shape=(rows, cols), dtype=np.float32)\n",
    "    # prediction_value_matrix.eliminate_zeros()\n",
    "\n",
    "    return list(map(lambda x: f1_score(true_value_matrix, prediction_value_matrix > x, average='samples'), threshold))\n",
    "\n",
    "\n",
    "if __name__ == '__main__':\n",
    "    data_path = \"data\"\n",
    "    path = \"../input/\"\n",
    " \n",
    "    aisles = pd.read_csv(os.path.join(path, \"aisles.csv\"), dtype={'aisle_id': np.uint8, 'aisle': 'category'})\n",
    "    departments = pd.read_csv(os.path.join(path, \"departments.csv\"),\n",
    "                              dtype={'department_id': np.uint8, 'department': 'category'})\n",
    "    order_prior = pd.read_csv(os.path.join(path, \"order_products__prior.csv\"), dtype={'order_id': np.uint32,\n",
    "                                                                                      'product_id': np.uint16,\n",
    "                                                                                      'add_to_cart_order': np.uint8,\n",
    "                                                                                      'reordered': bool})\n",
    "    order_train = pd.read_csv(os.path.join(path, \"order_products__train.csv\"), dtype={'order_id': np.uint32,\n",
    "                                                                                      'product_id': np.uint16,\n",
    "                                                                                      'add_to_cart_order': np.uint8,\n",
    "                                                                                      'reordered': bool})\n",
    "    orders = pd.read_csv(os.path.join(path, \"orders.csv\"), dtype={'order_id': np.uint32,\n",
    "                                                                  'user_id': np.uint32,\n",
    "                                                                  'eval_set': 'category',\n",
    "                                                                  'order_number': np.uint8,\n",
    "                                                                  'order_dow': np.uint8,\n",
    "                                                                  'order_hour_of_day': np.uint8\n",
    "                                                                  })\n",
    "\n",
    "    products = pd.read_csv(os.path.join(path, \"products.csv\"), dtype={'product_id': np.uint16,\n",
    "                                                                      'aisle_id': np.uint8,\n",
    "                                                                      'department_id': np.uint8})\n",
    "\n",
    "    product_embeddings = pd.read_pickle('data/product_embeddings.pkl')\n",
    "    embedings = list(range(32))\n",
    "    product_embeddings = product_embeddings[embedings + ['product_id']]\n",
    "\n",
    "    order_train = pd.read_pickle(os.path.join(data_path, 'chunk_0.pkl'))\n",
    "    order_test = order_train.loc[order_train.eval_set == \"test\", ['order_id', 'product_id']]\n",
    "    order_train = order_train.loc[order_train.eval_set == \"train\", ['order_id',  'product_id',  'reordered']]\n",
    "    product_periods = pd.read_pickle(os.path.join(data_path, 'product_periods_stat.pkl')).fillna(9999)\n",
    "\n",
    "    print(order_train.columns)\n",
    "\n",
    "    ###########################\n",
    "\n",
    "    prob = pd.merge(order_prior, orders, on='order_id')\n",
    "    print(prob.columns)\n",
    "    prob = prob.groupby(['product_id', 'user_id'])\\\n",
    "        .agg({'reordered':'sum', 'user_id': 'size'})\n",
    "    print(prob.columns)\n",
    "\n",
    "    prob.rename(columns={'sum': 'reordered',\n",
    "                         'user_id': 'total'}, inplace=True)\n",
    "\n",
    "    prob.reordered = (prob.reordered > 0).astype(np.float32)\n",
    "    prob.total = (prob.total > 0).astype(np.float32)\n",
    "    prob['reorder_prob'] = prob.reordered / prob.total\n",
    "    prob = prob.groupby('product_id').agg({'reorder_prob': 'mean'}).rename(columns={'mean': 'reorder_prob'})\\\n",
    "        .reset_index()\n",
    "\n",
    "\n",
    "    prod_stat = order_prior.groupby('product_id').agg({'reordered': ['sum', 'size'],\n",
    "                                                       'add_to_cart_order':'mean'})\n",
    "    prod_stat.columns = prod_stat.columns.levels[1]\n",
    "    prod_stat.rename(columns={'sum':'prod_reorders',\n",
    "                              'size':'prod_orders',\n",
    "                              'mean': 'prod_add_to_card_mean'}, inplace=True)\n",
    "    prod_stat.reset_index(inplace=True)\n",
    "\n",
    "    prod_stat['reorder_ration'] = prod_stat['prod_reorders'] / prod_stat['prod_orders']\n",
    "\n",
    "    prod_stat = pd.merge(prod_stat, prob, on='product_id')\n",
    "\n",
    "    # prod_stat.drop(['prod_reorders'], axis=1, inplace=True)\n",
    "\n",
    "    user_stat = orders.loc[orders.eval_set == 'prior', :].groupby('user_id').agg({'order_number': 'max',\n",
    "                                                                                  'days_since_prior_order': ['sum',\n",
    "                                                                                                             'mean',\n",
    "                                                                                                             'median']})\n",
    "    user_stat.columns = user_stat.columns.droplevel(0)\n",
    "    user_stat.rename(columns={'max': 'user_orders',\n",
    "                              'sum': 'user_order_starts_at',\n",
    "                              'mean': 'user_mean_days_since_prior',\n",
    "                              'median': 'user_median_days_since_prior'}, inplace=True)\n",
    "    user_stat.reset_index(inplace=True)\n",
    "\n",
    "    orders_products = pd.merge(orders, order_prior, on=\"order_id\")\n",
    "\n",
    "    user_order_stat = orders_products.groupby('user_id').agg({'user_id': 'size',\n",
    "                                                              'reordered': 'sum',\n",
    "                                                              \"product_id\": lambda x: x.nunique()})\n",
    "\n",
    "    user_order_stat.rename(columns={'user_id': 'user_total_products',\n",
    "                                    'product_id': 'user_distinct_products',\n",
    "                                    'reordered': 'user_reorder_ratio'}, inplace=True)\n",
    "\n",
    "    user_order_stat.reset_index(inplace=True)\n",
    "    user_order_stat.user_reorder_ratio = user_order_stat.user_reorder_ratio / user_order_stat.user_total_products\n",
    "\n",
    "    user_stat = pd.merge(user_stat, user_order_stat, on='user_id')\n",
    "    user_stat['user_average_basket'] = user_stat.user_total_products / user_stat.user_orders\n",
    "\n",
    "    ########################### products\n",
    "\n",
    "    prod_usr = orders_products.groupby(['product_id']).agg({'user_id': lambda x: x.nunique()})\n",
    "    prod_usr.rename(columns={'user_id':'prod_users_unq'}, inplace=True)\n",
    "    prod_usr.reset_index(inplace=True)\n",
    "\n",
    "    prod_usr_reordered = orders_products.loc[orders_products.reordered, :].groupby(['product_id']).agg({'user_id': lambda x: x.nunique()})\n",
    "    prod_usr_reordered.rename(columns={'user_id': 'prod_users_unq_reordered'}, inplace=True)\n",
    "    prod_usr_reordered.reset_index(inplace=True)\n",
    "\n",
    "    order_stat = orders_products.groupby('order_id').agg({'order_id': 'size'}) \\\n",
    "        .rename(columns={'order_id': 'order_size'}).reset_index()\n",
    "\n",
    "    orders_products = pd.merge(orders_products, order_stat, on='order_id')\n",
    "    orders_products['add_to_cart_order_inverted'] = orders_products.order_size - orders_products.add_to_cart_order\n",
    "    orders_products['add_to_cart_order_relative'] = orders_products.add_to_cart_order / orders_products.order_size\n",
    "\n",
    "    data = orders_products.groupby(['user_id', 'product_id']).agg({'user_id': 'size',\n",
    "                                                                   'order_number': ['min', 'max'],\n",
    "                                                                   'add_to_cart_order': ['mean', 'median'],\n",
    "                                                                   'days_since_prior_order': ['mean', 'median'],\n",
    "                                                                   'order_dow': ['mean', 'median'],\n",
    "                                                                   'order_hour_of_day': ['mean', 'median'],\n",
    "                                                                   'add_to_cart_order_inverted': ['mean', 'median'],\n",
    "                                                                   'add_to_cart_order_relative': ['mean', 'median'],\n",
    "                                                                   'reordered': ['sum']})\n",
    "\n",
    "    data.columns = data.columns.droplevel(0)\n",
    "    data.columns = ['up_orders', 'up_first_order', 'up_last_order', 'up_mean_cart_position', 'up_median_cart_position',\n",
    "                    'days_since_prior_order_mean', 'days_since_prior_order_median', 'order_dow_mean',\n",
    "                    'order_dow_median',\n",
    "                    'order_hour_of_day_mean', 'order_hour_of_day_median',\n",
    "                    'add_to_cart_order_inverted_mean', 'add_to_cart_order_inverted_median',\n",
    "                    'add_to_cart_order_relative_mean', 'add_to_cart_order_relative_median',\n",
    "                    'reordered_sum'\n",
    "                    ]\n",
    "\n",
    "    data['user_product_reordered_ratio'] = (data.reordered_sum + 1.0) / data.up_orders\n",
    "\n",
    "    # data['first_order'] = data['up_orders'] > 0\n",
    "    # data['second_order'] = data['up_orders'] > 1\n",
    "    #\n",
    "    # data.groupby('product_id')['']\n",
    "\n",
    "    data.reset_index(inplace=True)\n",
    "\n",
    "    data = pd.merge(data, prod_stat, on='product_id')\n",
    "    data = pd.merge(data, user_stat, on='user_id')\n",
    "\n",
    "    data['up_order_rate'] = data.up_orders / data.user_orders\n",
    "    data['up_orders_since_last_order'] = data.user_orders - data.up_last_order\n",
    "    data['up_order_rate_since_first_order'] = data.user_orders / (data.user_orders - data.up_first_order + 1)\n",
    "\n",
    "    ############################\n",
    "\n",
    "    user_dep_stat = pd.read_pickle('data/user_department_products.pkl')\n",
    "    user_aisle_stat = pd.read_pickle('data/user_aisle_products.pkl')\n",
    "\n",
    "    ############### train\n",
    "\n",
    "    print(order_train.shape)\n",
    "    order_train = pd.merge(order_train, products, on='product_id')\n",
    "    print(order_train.shape)\n",
    "    order_train = pd.merge(order_train, orders, on='order_id')\n",
    "    print(order_train.shape)\n",
    "    order_train = pd.merge(order_train, user_dep_stat, on=['user_id', 'department_id'])\n",
    "    print(order_train.shape)\n",
    "    order_train = pd.merge(order_train, user_aisle_stat, on=['user_id', 'aisle_id'])\n",
    "    print(order_train.shape)\n",
    "\n",
    "    order_train = pd.merge(order_train, prod_usr, on='product_id')\n",
    "    print(order_train.shape)\n",
    "    order_train = pd.merge(order_train, prod_usr_reordered, on='product_id', how='left')\n",
    "    order_train.prod_users_unq_reordered.fillna(0, inplace=True)\n",
    "    print(order_train.shape)\n",
    "\n",
    "    order_train = pd.merge(order_train, data, on=['product_id', 'user_id'])\n",
    "    print(order_train.shape)\n",
    "\n",
    "    order_train['aisle_reordered_ratio'] = order_train.aisle_reordered / order_train.user_orders\n",
    "    order_train['dep_reordered_ratio'] = order_train.dep_reordered / order_train.user_orders\n",
    "\n",
    "    order_train = pd.merge(order_train, product_periods, on=['user_id',  'product_id'])\n",
    "\n",
    "    ##############\n",
    "\n",
    "    order_test = pd.merge(order_test, products, on='product_id')\n",
    "    order_test = pd.merge(order_test, orders, on='order_id')\n",
    "    order_test = pd.merge(order_test, user_dep_stat, on=['user_id', 'department_id'])\n",
    "    order_test = pd.merge(order_test, user_aisle_stat, on=['user_id', 'aisle_id'])\n",
    "\n",
    "    order_test = pd.merge(order_test, prod_usr, on='product_id')\n",
    "    order_test = pd.merge(order_test, prod_usr_reordered, on='product_id', how='left')\n",
    "    order_train.prod_users_unq_reordered.fillna(0, inplace=True)\n",
    "\n",
    "    order_test = pd.merge(order_test, data, on=['product_id', 'user_id'])\n",
    "\n",
    "    order_test['aisle_reordered_ratio'] = order_test.aisle_reordered / order_test.user_orders\n",
    "    order_test['dep_reordered_ratio'] = order_test.dep_reordered / order_test.user_orders\n",
    "\n",
    "    order_test = pd.merge(order_test, product_periods, on=['user_id', 'product_id'])\n",
    "\n",
    "    order_train = pd.merge(order_train, product_embeddings, on=['product_id'])\n",
    "    order_test = pd.merge(order_test, product_embeddings, on=['product_id'])\n",
    "\n",
    "    print('data is joined')\n",
    "\n",
    "    features = [\n",
    "        # 'reordered_dow_ration', 'reordered_dow', 'reordered_dow_size',\n",
    "        # 'reordered_prev', 'add_to_cart_order_prev', 'order_dow_prev', 'order_hour_of_day_prev',\n",
    "        'user_product_reordered_ratio', 'reordered_sum',\n",
    "        'add_to_cart_order_inverted_mean', 'add_to_cart_order_relative_mean',\n",
    "        'reorder_prob',\n",
    "        'last', 'prev1', 'prev2', 'median', 'mean',\n",
    "        'dep_reordered_ratio', 'aisle_reordered_ratio',\n",
    "        'aisle_products',\n",
    "        'aisle_reordered',\n",
    "        'dep_products',\n",
    "        'dep_reordered',\n",
    "        'prod_users_unq', 'prod_users_unq_reordered',\n",
    "        'order_number', 'prod_add_to_card_mean',\n",
    "        'days_since_prior_order',\n",
    "        'order_dow', 'order_hour_of_day',\n",
    "        'reorder_ration',\n",
    "        'user_orders', 'user_order_starts_at', 'user_mean_days_since_prior',\n",
    "        # 'user_median_days_since_prior',\n",
    "        'user_average_basket', 'user_distinct_products', 'user_reorder_ratio', 'user_total_products',\n",
    "        'prod_orders', 'prod_reorders',\n",
    "        'up_order_rate', 'up_orders_since_last_order', 'up_order_rate_since_first_order',\n",
    "        'up_orders', 'up_first_order', 'up_last_order', 'up_mean_cart_position',\n",
    "        # 'up_median_cart_position',\n",
    "        'days_since_prior_order_mean',\n",
    "        # 'days_since_prior_order_median',\n",
    "        'order_dow_mean',\n",
    "        # 'order_dow_median',\n",
    "        'order_hour_of_day_mean',\n",
    "        # 'order_hour_of_day_median'\n",
    "    ]\n",
    "    features.extend(embedings)\n",
    "    categories = ['product_id', 'aisle_id', 'department_id']\n",
    "    features.extend(embedings)\n",
    "    cat_features = ','.join(map(lambda x: str(x + len(features)), range(len(categories))))\n",
    "    features.extend(categories)\n",
    "\n",
    "    print('not included', set(order_train.columns.tolist()) - set(features))\n",
    "\n",
    "    data = order_train[features]\n",
    "    labels = order_train[['reordered']].values.astype(np.float32).flatten()\n",
    "\n",
    "    data_val = order_test[features]\n",
    "\n",
    "    assert data.shape[0] == 8474661\n",
    "\n",
    "    lgb_train = lgb.Dataset(data, labels, categorical_feature=cat_features)\n",
    "\n",
    "    # specify your configurations as a dict\n",
    "    params = {\n",
    "        'task': 'train',\n",
    "        'boosting_type': 'gbdt',\n",
    "        'objective': 'binary',\n",
    "        'metric': {'binary_logloss', 'auc'},\n",
    "        'num_leaves': 256,\n",
    "        'min_sum_hessian_in_leaf': 20,\n",
    "        'max_depth': 12,\n",
    "        'learning_rate': 0.05,\n",
    "        'feature_fraction': 0.6,\n",
    "        # 'bagging_fraction': 0.9,\n",
    "        # 'bagging_freq': 3,\n",
    "        'verbose': 1\n",
    "    }\n",
    "\n",
    "    print('Start training...')\n",
    "    # train\n",
    "    gbm = lgb.train(params,\n",
    "                    lgb_train,\n",
    "                    num_boost_round=380)\n",
    "\n",
    "    prediction = gbm.predict(data_val)\n",
    "    # prediction = model.predict(data_val)\n",
    "    orders = order_test.order_id.values\n",
    "    products = order_test.product_id.values\n",
    "\n",
    "    result = pd.DataFrame({'product_id': products, 'order_id': orders, 'prediction': prediction})\n",
    "    result.to_pickle('data/prediction_lgbm.pkl')\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "# python3 f1_optimal.py\n",
    "\n",
    "%run utils.py\n",
    "\n",
    "import pandas as pd\n",
    "import numpy as np\n",
    "from joblib import Parallel, delayed\n",
    "import multiprocessing\n",
    "\n",
    "from utils import fast_search\n",
    "\n",
    "none_product = 50000\n",
    "\n",
    "def applyParallel(dfGrouped, func):\n",
    "    retLst = Parallel(n_jobs=multiprocessing.cpu_count())(delayed(func)(group) for name, group in dfGrouped)\n",
    "    return pd.concat(retLst)\n",
    "\n",
    "def create_products(df):\n",
    "    # print(df.product_id.values.shape)\n",
    "    products = df.product_id.values\n",
    "    prob = df.prediction.values\n",
    "\n",
    "    sort_index = np.argsort(prob)[::-1]\n",
    "\n",
    "    values = fast_search(prob[sort_index][0:80], dtype=np.float64)\n",
    "\n",
    "    index = np.argmax(values)\n",
    "\n",
    "    print('iteration', df.shape[0], 'optimal value', index)\n",
    "\n",
    "    best = ' '.join(map(lambda x: str(x) if x != none_product else 'None', products[sort_index][0:index]))\n",
    "    df = df[0:1]\n",
    "    df.loc[:, 'products'] = best\n",
    "    return df\n",
    "\n",
    "if __name__ == '__main__':\n",
    "    data = pd.read_pickle('data/prediction_rnn.pkl')\n",
    "    data['not_a_product'] = 1. - data.prediction\n",
    "\n",
    "    gp = data.groupby('order_id')['not_a_product'].apply(lambda x: np.multiply.reduce(x.values)).reset_index()\n",
    "    gp.rename(columns={'not_a_product': 'prediction'}, inplace=True)\n",
    "    gp['product_id'] = none_product\n",
    "\n",
    "    data = pd.concat([data, gp], axis=0)\n",
    "    data.product_id = data.product_id.astype(np.uint32)\n",
    "\n",
    "    data = data.loc[data.prediction > 0.01, ['order_id', 'prediction', 'product_id']]\n",
    "\n",
    "    data = applyParallel(data.groupby(data.order_id), create_products).reset_index()\n",
    "\n",
    "    data[['order_id', 'products']].to_csv('data/sub.csv', index=False)\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.5.3"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
