{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "import pandas as pd\n",
    "from src import config, utils\n",
    "import logging\n",
    "from sklearn.model_selection import KFold\n",
    "import pickle\n",
    "import numpy as np"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {
    "collapsed": false
   },
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "WARNING:root:Will drop ['a_feature', 'UserInfo_270'] len=2\n"
     ]
    }
   ],
   "source": [
    "assert config.dataset == 'val'\n",
    "train = pd.read_csv(config.pj_root + 'data/' + config.dataset + '.csv', index_col='no')\n",
    "\n",
    "assert train.columns[0] == 'flag'\n",
    "a_feature = pd.read_csv(config.pj_root + 'data/a_feature.csv', index_col='no')\n",
    "train = train.join(a_feature)\n",
    "\n",
    "if len(config.drop_columns) != 0:\n",
    "    logging.warning('Will drop %s len=%d' % (str(config.drop_columns), len(config.drop_columns)))\n",
    "    train = train.drop(config.drop_columns, axis=1)\n",
    "\n",
    "if len(config.select_columns) != 0:\n",
    "    config.select_columns.insert(0, 'flag')  # use flag in training\n",
    "    logging.warning('Will select %s len=%d' % (str(config.select_columns), len(config.select_columns)))\n",
    "    train = train[train.columns[train.columns.isin(config.select_columns)]]\n",
    "else:\n",
    "    config.select_columns = list(train.columns)[1:]\n",
    "\n",
    "\n",
    "\n",
    "if config.use_basic_process:\n",
    "    train, col_func_map = utils.basic_process(train, has_flag=True)\n",
    "    utils.dump_to_data(col_func_map, 'col_func_map.pkl')\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "X = train.values[:, 1:]\n",
    "Y = train.values[:, 0:1].reshape((-1,))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {
    "collapsed": false
   },
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "INFO:root:Use 4 Folds...\n",
      "INFO:root:{'n_estimators': 200, 'max_features': 'sqrt', 'max_depth': 7, 'min_samples_leaf': 5, 'criterion': 'entropy'}\n",
      "INFO:root:Fold 1/4 Score: 0.583418 \n",
      "INFO:root:Fold 2/4 Score: 0.506486 \n",
      "INFO:root:Fold 3/4 Score: 0.586266 \n",
      "INFO:root:Fold 4/4 Score: 0.587276 \n"
     ]
    }
   ],
   "source": [
    "logging.info('Use %d Folds...' % config.kfold_k)\n",
    "logging.info(config.model_para)\n",
    "kf = KFold(n_splits=config.kfold_k)\n",
    "all_score = 0\n",
    "for i, (train_index, test_index) in enumerate(kf.split(X)):\n",
    "    X_train, X_test = X[train_index], X[test_index]\n",
    "    y_train, y_test = Y[train_index], Y[test_index]\n",
    "    model = config.model(**config.model_para)\n",
    "    model.fit(X_train, y_train)\n",
    "    y_pred = model.predict_proba(X_test)\n",
    "    score = utils.report(y_test, y_pred[:, 1])\n",
    "    all_score += score\n",
    "    logging.info('Fold %d/%d Score: %f ' % (i + 1, config.kfold_k, score))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 22,
   "metadata": {
    "collapsed": false
   },
   "outputs": [],
   "source": [
    "models = [model, model]\n",
    "normalized_score = np.zeros((X_test.shape[0], ))\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 24,
   "metadata": {
    "collapsed": false
   },
   "outputs": [],
   "source": [
    "for model in models:\n",
    "    model.fit(X_train ,y_train)\n",
    "    y_pred = model.predict_proba(X_test)\n",
    "    y_pred = y_pred[:, 1]\n",
    "    pred_rank = np.argsort(y_pred)\n",
    "    normalized_score += pred_rank / len(pred_rank)\n",
    "\n",
    "normalized_score /= len(models)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 78,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "class rank_ensemble(object):\n",
    "    \n",
    "    def __init__(self, models, weight=None):\n",
    "        self.models = models\n",
    "        if weight is None:\n",
    "            self.weight = np.zeros((len(self.models), ))\n",
    "            weight[:] = 1 / len(self.models)\n",
    "        else:\n",
    "            self.weight = np.array(weight)\n",
    "            self.weight =  self.weight /  self.weight.sum()\n",
    "    \n",
    "    def fit(self, X_train, y_train):\n",
    "        for model in self.models:\n",
    "            model.fit(X_train ,y_train)\n",
    "    \n",
    "    def predict_rank(self, X_test):\n",
    "        normalized_score = np.zeros((X_test.shape[0], ))\n",
    "        for model_index, model in enumerate(self.models):\n",
    "            y_pred = model.predict_proba(X_test)\n",
    "            y_pred = y_pred[:, 1]\n",
    "            pred_rank = np.argsort(y_pred)\n",
    "            for i, index in enumerate(pred_rank):\n",
    "                normalized_score[index] += i / len(pred_rank) * self.weight[model_index]\n",
    "        return normalized_score / len(self.models)\n",
    "            "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 82,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "import xgboost\n",
    "m1 =  xgboost.XGBClassifier(\n",
    "**dict(\n",
    "    max_depth=5,\n",
    "    n_estimators=20,\n",
    "    base_score=0.5,\n",
    "    learning_rate=0.06,\n",
    "    objective='rank:pairwise',\n",
    "    min_child_weight=100,\n",
    "    subsample=0.75,\n",
    "    # silent=False\n",
    ")\n",
    ")\n",
    "\n",
    "from sklearn.ensemble import RandomForestClassifier\n",
    "m2 = RandomForestClassifier( **{'n_estimators': 50, 'min_samples_leaf': 5, 'max_depth': 7, 'criterion': 'entropy', 'max_features': 'sqrt'})"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 87,
   "metadata": {
    "collapsed": false
   },
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "INFO:root:Use 4 Folds...\n",
      "INFO:root:{'n_estimators': 200, 'max_features': 'sqrt', 'max_depth': 7, 'min_samples_leaf': 5, 'criterion': 'entropy'}\n",
      "INFO:root:Fold 1/4 Score: 0.595388 \n",
      "INFO:root:Fold 2/4 Score: 0.514936 \n",
      "INFO:root:Fold 3/4 Score: 0.576093 \n",
      "INFO:root:Fold 4/4 Score: 0.583224 \n"
     ]
    }
   ],
   "source": [
    "logging.info('Use %d Folds...' % config.kfold_k)\n",
    "logging.info(config.model_para)\n",
    "kf = KFold(n_splits=config.kfold_k)\n",
    "all_score = 0\n",
    "for i, (train_index, test_index) in enumerate(kf.split(X)):\n",
    "    X_train, X_test = X[train_index], X[test_index]\n",
    "    y_train, y_test = Y[train_index], Y[test_index]\n",
    "    model = rank_ensemble([m1, m2], [8, 2])\n",
    "    model.fit(X_train, y_train)\n",
    "    y_pred = model.predict_rank(X_test)\n",
    "    score = utils.report(y_test, y_pred)\n",
    "    all_score += score\n",
    "    logging.info('Fold %d/%d Score: %f ' % (i + 1, config.kfold_k, score))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 51,
   "metadata": {
    "collapsed": false
   },
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "INFO:root:Use 4 Folds...\n",
      "INFO:root:{'n_estimators': 200, 'max_features': 'sqrt', 'max_depth': 7, 'min_samples_leaf': 5, 'criterion': 'entropy'}\n",
      "INFO:root:Fold 1/4 Score: 0.586491 \n",
      "INFO:root:Fold 2/4 Score: 0.507821 \n",
      "INFO:root:Fold 3/4 Score: 0.578541 \n",
      "INFO:root:Fold 4/4 Score: 0.590374 \n"
     ]
    }
   ],
   "source": [
    "logging.info('Use %d Folds...' % config.kfold_k)\n",
    "logging.info(config.model_para)\n",
    "kf = KFold(n_splits=config.kfold_k)\n",
    "all_score = 0\n",
    "for i, (train_index, test_index) in enumerate(kf.split(X)):\n",
    "    X_train, X_test = X[train_index], X[test_index]\n",
    "    y_train, y_test = Y[train_index], Y[test_index]\n",
    "    model = m1\n",
    "    model.fit(X_train, y_train)\n",
    "    y_pred = model.predict_proba(X_test)\n",
    "    score = utils.report(y_test, y_pred[:, 1])\n",
    "    all_score += score\n",
    "    logging.info('Fold %d/%d Score: %f ' % (i + 1, config.kfold_k, score))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 53,
   "metadata": {
    "collapsed": false
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "0.59037446639889057"
      ]
     },
     "execution_count": 53,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "utils.report(y_test, y_pred[:, 1])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 54,
   "metadata": {
    "collapsed": false
   },
   "outputs": [],
   "source": [
    "pred_rank = np.argsort(y_pred[:, 1])\n",
    "normalized_score = pred_rank / len(pred_rank)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 57,
   "metadata": {
    "collapsed": false
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "0.49168753039499991"
      ]
     },
     "execution_count": 57,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "utils.report(y_test, normalized_score)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 61,
   "metadata": {
    "collapsed": false,
    "scrolled": true
   },
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 62,
   "metadata": {
    "collapsed": false
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "0.59159927232118736"
      ]
     },
     "execution_count": 62,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "utils.report(y_test, normalized_score)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 59,
   "metadata": {
    "collapsed": false
   },
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>rank0</th>\n",
       "      <th>rank1</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>0.547080</td>\n",
       "      <td>0.757</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>0.299401</td>\n",
       "      <td>0.607</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>0.522694</td>\n",
       "      <td>0.046</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>0.547080</td>\n",
       "      <td>0.979</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>0.714497</td>\n",
       "      <td>0.515</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>5</th>\n",
       "      <td>0.654840</td>\n",
       "      <td>0.401</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>6</th>\n",
       "      <td>0.185504</td>\n",
       "      <td>0.407</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>7</th>\n",
       "      <td>0.651493</td>\n",
       "      <td>0.163</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>8</th>\n",
       "      <td>0.312353</td>\n",
       "      <td>0.404</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>9</th>\n",
       "      <td>0.547080</td>\n",
       "      <td>0.703</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>10</th>\n",
       "      <td>0.491668</td>\n",
       "      <td>0.939</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11</th>\n",
       "      <td>0.492092</td>\n",
       "      <td>0.805</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>12</th>\n",
       "      <td>0.335079</td>\n",
       "      <td>0.146</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>13</th>\n",
       "      <td>0.142092</td>\n",
       "      <td>0.250</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>14</th>\n",
       "      <td>0.547080</td>\n",
       "      <td>0.983</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>15</th>\n",
       "      <td>0.734392</td>\n",
       "      <td>0.832</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>16</th>\n",
       "      <td>0.388166</td>\n",
       "      <td>0.403</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>17</th>\n",
       "      <td>0.547080</td>\n",
       "      <td>0.544</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>18</th>\n",
       "      <td>0.336294</td>\n",
       "      <td>0.118</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>19</th>\n",
       "      <td>0.491668</td>\n",
       "      <td>0.333</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>20</th>\n",
       "      <td>0.528339</td>\n",
       "      <td>0.512</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>21</th>\n",
       "      <td>0.547080</td>\n",
       "      <td>0.584</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22</th>\n",
       "      <td>0.557054</td>\n",
       "      <td>0.185</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>23</th>\n",
       "      <td>0.541983</td>\n",
       "      <td>0.169</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>24</th>\n",
       "      <td>0.528339</td>\n",
       "      <td>0.686</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>25</th>\n",
       "      <td>0.410677</td>\n",
       "      <td>0.315</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>26</th>\n",
       "      <td>0.547080</td>\n",
       "      <td>0.362</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>27</th>\n",
       "      <td>0.211733</td>\n",
       "      <td>0.085</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>28</th>\n",
       "      <td>0.458053</td>\n",
       "      <td>0.318</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>29</th>\n",
       "      <td>0.261992</td>\n",
       "      <td>0.505</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>...</th>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>970</th>\n",
       "      <td>0.557054</td>\n",
       "      <td>0.568</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>971</th>\n",
       "      <td>0.115717</td>\n",
       "      <td>0.326</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>972</th>\n",
       "      <td>0.491668</td>\n",
       "      <td>0.168</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>973</th>\n",
       "      <td>0.676438</td>\n",
       "      <td>0.004</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>974</th>\n",
       "      <td>0.547080</td>\n",
       "      <td>0.119</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>975</th>\n",
       "      <td>0.547080</td>\n",
       "      <td>0.863</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>976</th>\n",
       "      <td>0.462889</td>\n",
       "      <td>0.384</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>977</th>\n",
       "      <td>0.325105</td>\n",
       "      <td>0.349</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>978</th>\n",
       "      <td>0.299831</td>\n",
       "      <td>0.015</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>979</th>\n",
       "      <td>0.000607</td>\n",
       "      <td>0.684</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>980</th>\n",
       "      <td>0.184722</td>\n",
       "      <td>0.502</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>981</th>\n",
       "      <td>0.671299</td>\n",
       "      <td>0.431</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>982</th>\n",
       "      <td>0.351344</td>\n",
       "      <td>0.649</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>983</th>\n",
       "      <td>0.057574</td>\n",
       "      <td>0.243</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>984</th>\n",
       "      <td>0.145452</td>\n",
       "      <td>0.712</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>985</th>\n",
       "      <td>0.573120</td>\n",
       "      <td>0.192</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>986</th>\n",
       "      <td>0.292211</td>\n",
       "      <td>0.365</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>987</th>\n",
       "      <td>0.371501</td>\n",
       "      <td>0.313</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>988</th>\n",
       "      <td>0.530427</td>\n",
       "      <td>0.771</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>989</th>\n",
       "      <td>0.547080</td>\n",
       "      <td>0.082</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>990</th>\n",
       "      <td>0.353046</td>\n",
       "      <td>0.759</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>991</th>\n",
       "      <td>0.298479</td>\n",
       "      <td>0.102</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>992</th>\n",
       "      <td>0.528339</td>\n",
       "      <td>0.592</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>993</th>\n",
       "      <td>0.547080</td>\n",
       "      <td>0.343</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>994</th>\n",
       "      <td>0.662776</td>\n",
       "      <td>0.516</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>995</th>\n",
       "      <td>0.491668</td>\n",
       "      <td>0.802</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>996</th>\n",
       "      <td>0.547080</td>\n",
       "      <td>0.931</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>997</th>\n",
       "      <td>0.330391</td>\n",
       "      <td>0.083</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>998</th>\n",
       "      <td>0.251660</td>\n",
       "      <td>0.853</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>999</th>\n",
       "      <td>0.448824</td>\n",
       "      <td>0.825</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "<p>1000 rows × 2 columns</p>\n",
       "</div>"
      ],
      "text/plain": [
       "        rank0  rank1\n",
       "0    0.547080  0.757\n",
       "1    0.299401  0.607\n",
       "2    0.522694  0.046\n",
       "3    0.547080  0.979\n",
       "4    0.714497  0.515\n",
       "5    0.654840  0.401\n",
       "6    0.185504  0.407\n",
       "7    0.651493  0.163\n",
       "8    0.312353  0.404\n",
       "9    0.547080  0.703\n",
       "10   0.491668  0.939\n",
       "11   0.492092  0.805\n",
       "12   0.335079  0.146\n",
       "13   0.142092  0.250\n",
       "14   0.547080  0.983\n",
       "15   0.734392  0.832\n",
       "16   0.388166  0.403\n",
       "17   0.547080  0.544\n",
       "18   0.336294  0.118\n",
       "19   0.491668  0.333\n",
       "20   0.528339  0.512\n",
       "21   0.547080  0.584\n",
       "22   0.557054  0.185\n",
       "23   0.541983  0.169\n",
       "24   0.528339  0.686\n",
       "25   0.410677  0.315\n",
       "26   0.547080  0.362\n",
       "27   0.211733  0.085\n",
       "28   0.458053  0.318\n",
       "29   0.261992  0.505\n",
       "..        ...    ...\n",
       "970  0.557054  0.568\n",
       "971  0.115717  0.326\n",
       "972  0.491668  0.168\n",
       "973  0.676438  0.004\n",
       "974  0.547080  0.119\n",
       "975  0.547080  0.863\n",
       "976  0.462889  0.384\n",
       "977  0.325105  0.349\n",
       "978  0.299831  0.015\n",
       "979  0.000607  0.684\n",
       "980  0.184722  0.502\n",
       "981  0.671299  0.431\n",
       "982  0.351344  0.649\n",
       "983  0.057574  0.243\n",
       "984  0.145452  0.712\n",
       "985  0.573120  0.192\n",
       "986  0.292211  0.365\n",
       "987  0.371501  0.313\n",
       "988  0.530427  0.771\n",
       "989  0.547080  0.082\n",
       "990  0.353046  0.759\n",
       "991  0.298479  0.102\n",
       "992  0.528339  0.592\n",
       "993  0.547080  0.343\n",
       "994  0.662776  0.516\n",
       "995  0.491668  0.802\n",
       "996  0.547080  0.931\n",
       "997  0.330391  0.083\n",
       "998  0.251660  0.853\n",
       "999  0.448824  0.825\n",
       "\n",
       "[1000 rows x 2 columns]"
      ]
     },
     "execution_count": 59,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "pd.DataFrame({'rank0': y_pred[:, 1], 'rank1':normalized_score })"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "anaconda-cloud": {},
  "kernelspec": {
   "display_name": "Python [conda root]",
   "language": "python",
   "name": "conda-root-py"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.5.2"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 1
}
