{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/"
    },
    "id": "Sa6t_iMfQN3N",
    "outputId": "f7e31ad2-7446-4ce3-c3e8-b48e0bb5a0b8"
   },
   "source": [
    "# 基于lightGBM的贷款预测\n",
    "* 目标：根据用户信息，判断是否贷款。\n",
    "* 结果：能够有效预测用户是否贷款，F1得分接近0.95。\n",
    "* 方法：\n",
    "1. lightGBM分类模型。\n",
    "2. hyperopt贝叶斯网络自动寻优。\n",
    "3. Featuretools特征工程。\n",
    "4. Stack模型集成。  \n",
    "* 感想：\n",
    "* 本项目是极客大学机器学习训练营的作业，整体难度较低，主要是熟悉各类方法与参数。\n",
    "* 自动参数寻优,自动特征工程，模型集成都很耗费运算资源，运算很慢，在估计工期的时候要考虑。"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## 一.数据预处理"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Cloning into 'cardashians'...\n",
      "remote: Enumerating objects: 64, done.\u001b[K\n",
      "remote: Total 64 (delta 0), reused 0 (delta 0), pack-reused 64\u001b[K\n",
      "Receiving objects: 100% (64/64), 15.99 MiB | 2.79 MiB/s, done.\n",
      "Resolving deltas: 100% (31/31), done.\n"
     ]
    }
   ],
   "source": [
    "#前人已经整理好数据\n",
    "!git clone https://github.com/tolarteh/cardashians.git"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "# 安装必要的包\n",
    "#!pip install lightgbm xgboost catboost category-encoders sklearn pandas"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {
    "id": "gZoA3hv5RwBB"
   },
   "outputs": [],
   "source": [
    "#读取数据\n",
    "import pandas as pd\n",
    "import numpy as np\n",
    "\n",
    "test_final = pd.read_csv('./test_final.csv', engine='python')\n",
    "train_final = pd.read_csv('./train_final.csv', engine='python')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "continuous_annual_inc                     float64\n",
       "continuous_annual_inc_joint               float64\n",
       "continuous_delinq_2yrs                    float64\n",
       "continuous_dti                            float64\n",
       "continuous_dti_joint                      float64\n",
       "continuous_fico_range_high                float64\n",
       "continuous_fico_range_low                 float64\n",
       "continuous_funded_amnt                    float64\n",
       "continuous_funded_amnt_inv                float64\n",
       "continuous_inq_last_6mths                 float64\n",
       "continuous_installment                    float64\n",
       "continuous_int_rate                       float64\n",
       "continuous_last_fico_range_high           float64\n",
       "continuous_last_fico_range_low            float64\n",
       "continuous_loan_amnt                      float64\n",
       "loan_status                                 int64\n",
       "continuous_mths_since_last_delinq         float64\n",
       "continuous_mths_since_last_major_derog    float64\n",
       "continuous_mths_since_last_record         float64\n",
       "continuous_open_acc                       float64\n",
       "continuous_pub_rec                        float64\n",
       "discrete_addr_state_1_one_hot               int64\n",
       "discrete_addr_state_2_one_hot               int64\n",
       "discrete_addr_state_3_one_hot               int64\n",
       "discrete_addr_state_4_one_hot               int64\n",
       "discrete_addr_state_5_one_hot               int64\n",
       "discrete_addr_state_6_one_hot               int64\n",
       "discrete_addr_state_7_one_hot               int64\n",
       "discrete_addr_state_8_one_hot               int64\n",
       "discrete_addr_state_9_one_hot               int64\n",
       "discrete_addr_state_10_one_hot              int64\n",
       "discrete_addr_state_11_one_hot              int64\n",
       "discrete_addr_state_12_one_hot              int64\n",
       "discrete_addr_state_13_one_hot              int64\n",
       "discrete_addr_state_14_one_hot              int64\n",
       "discrete_addr_state_15_one_hot              int64\n",
       "discrete_addr_state_16_one_hot              int64\n",
       "discrete_addr_state_17_one_hot              int64\n",
       "discrete_addr_state_18_one_hot              int64\n",
       "discrete_addr_state_19_one_hot              int64\n",
       "discrete_addr_state_20_one_hot              int64\n",
       "discrete_addr_state_21_one_hot              int64\n",
       "discrete_addr_state_22_one_hot              int64\n",
       "discrete_addr_state_23_one_hot              int64\n",
       "discrete_addr_state_24_one_hot              int64\n",
       "discrete_addr_state_25_one_hot              int64\n",
       "discrete_addr_state_26_one_hot              int64\n",
       "discrete_addr_state_27_one_hot              int64\n",
       "discrete_addr_state_28_one_hot              int64\n",
       "discrete_addr_state_29_one_hot              int64\n",
       "discrete_addr_state_30_one_hot              int64\n",
       "discrete_addr_state_31_one_hot              int64\n",
       "discrete_addr_state_32_one_hot              int64\n",
       "discrete_addr_state_33_one_hot              int64\n",
       "discrete_addr_state_34_one_hot              int64\n",
       "discrete_addr_state_35_one_hot              int64\n",
       "discrete_addr_state_36_one_hot              int64\n",
       "discrete_addr_state_37_one_hot              int64\n",
       "discrete_addr_state_38_one_hot              int64\n",
       "discrete_addr_state_39_one_hot              int64\n",
       "discrete_addr_state_40_one_hot              int64\n",
       "discrete_addr_state_41_one_hot              int64\n",
       "discrete_addr_state_42_one_hot              int64\n",
       "discrete_addr_state_43_one_hot              int64\n",
       "discrete_addr_state_44_one_hot              int64\n",
       "discrete_addr_state_45_one_hot              int64\n",
       "discrete_addr_state_46_one_hot              int64\n",
       "discrete_addr_state_47_one_hot              int64\n",
       "discrete_addr_state_48_one_hot              int64\n",
       "discrete_addr_state_49_one_hot              int64\n",
       "discrete_application_type_1_one_hot         int64\n",
       "discrete_application_type_2_one_hot         int64\n",
       "discrete_emp_length_1_one_hot               int64\n",
       "discrete_emp_length_2_one_hot               int64\n",
       "discrete_emp_length_3_one_hot               int64\n",
       "discrete_emp_length_4_one_hot               int64\n",
       "discrete_emp_length_5_one_hot               int64\n",
       "discrete_emp_length_6_one_hot               int64\n",
       "discrete_emp_length_7_one_hot               int64\n",
       "discrete_emp_length_8_one_hot               int64\n",
       "discrete_emp_length_9_one_hot               int64\n",
       "discrete_emp_length_10_one_hot              int64\n",
       "discrete_emp_length_11_one_hot              int64\n",
       "discrete_emp_length_12_one_hot              int64\n",
       "discrete_grade_1_one_hot                    int64\n",
       "discrete_grade_2_one_hot                    int64\n",
       "discrete_grade_3_one_hot                    int64\n",
       "discrete_grade_4_one_hot                    int64\n",
       "discrete_grade_5_one_hot                    int64\n",
       "discrete_grade_6_one_hot                    int64\n",
       "discrete_grade_7_one_hot                    int64\n",
       "discrete_home_ownership_1_one_hot           int64\n",
       "discrete_home_ownership_2_one_hot           int64\n",
       "discrete_home_ownership_3_one_hot           int64\n",
       "discrete_home_ownership_4_one_hot           int64\n",
       "discrete_policy_code_1_one_hot              int64\n",
       "discrete_purpose_1_one_hot                  int64\n",
       "discrete_purpose_2_one_hot                  int64\n",
       "discrete_purpose_3_one_hot                  int64\n",
       "discrete_purpose_4_one_hot                  int64\n",
       "discrete_purpose_5_one_hot                  int64\n",
       "discrete_purpose_6_one_hot                  int64\n",
       "discrete_purpose_7_one_hot                  int64\n",
       "discrete_purpose_8_one_hot                  int64\n",
       "discrete_purpose_9_one_hot                  int64\n",
       "discrete_purpose_10_one_hot                 int64\n",
       "discrete_purpose_11_one_hot                 int64\n",
       "discrete_purpose_12_one_hot                 int64\n",
       "discrete_pymnt_plan_1_one_hot               int64\n",
       "discrete_sub_grade_1_one_hot                int64\n",
       "discrete_sub_grade_2_one_hot                int64\n",
       "discrete_sub_grade_3_one_hot                int64\n",
       "discrete_sub_grade_4_one_hot                int64\n",
       "discrete_sub_grade_5_one_hot                int64\n",
       "discrete_sub_grade_6_one_hot                int64\n",
       "discrete_sub_grade_7_one_hot                int64\n",
       "discrete_sub_grade_8_one_hot                int64\n",
       "discrete_sub_grade_9_one_hot                int64\n",
       "discrete_sub_grade_10_one_hot               int64\n",
       "discrete_sub_grade_11_one_hot               int64\n",
       "discrete_sub_grade_12_one_hot               int64\n",
       "discrete_sub_grade_13_one_hot               int64\n",
       "discrete_sub_grade_14_one_hot               int64\n",
       "discrete_sub_grade_15_one_hot               int64\n",
       "discrete_sub_grade_16_one_hot               int64\n",
       "discrete_sub_grade_17_one_hot               int64\n",
       "discrete_sub_grade_18_one_hot               int64\n",
       "discrete_sub_grade_19_one_hot               int64\n",
       "discrete_sub_grade_20_one_hot               int64\n",
       "discrete_sub_grade_21_one_hot               int64\n",
       "discrete_sub_grade_22_one_hot               int64\n",
       "discrete_sub_grade_23_one_hot               int64\n",
       "discrete_sub_grade_24_one_hot               int64\n",
       "discrete_sub_grade_25_one_hot               int64\n",
       "discrete_sub_grade_26_one_hot               int64\n",
       "discrete_sub_grade_27_one_hot               int64\n",
       "discrete_sub_grade_28_one_hot               int64\n",
       "discrete_sub_grade_29_one_hot               int64\n",
       "discrete_sub_grade_30_one_hot               int64\n",
       "discrete_sub_grade_31_one_hot               int64\n",
       "discrete_sub_grade_32_one_hot               int64\n",
       "discrete_sub_grade_33_one_hot               int64\n",
       "discrete_sub_grade_34_one_hot               int64\n",
       "discrete_sub_grade_35_one_hot               int64\n",
       "discrete_term_1_one_hot                     int64\n",
       "discrete_term_2_one_hot                     int64\n",
       "dtype: object"
      ]
     },
     "execution_count": 20,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "#查看变量属性\n",
    "#print(test_final.columns.to_list())\n",
    "#print(train_final.columns.to_list())\n",
    "#print(train_final.info())\n",
    "#显示所有列\n",
    "pd.set_option('display.max_columns', None)\n",
    "train_final.dtypes"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>continuous_annual_inc</th>\n",
       "      <th>continuous_annual_inc_joint</th>\n",
       "      <th>continuous_delinq_2yrs</th>\n",
       "      <th>continuous_dti</th>\n",
       "      <th>continuous_dti_joint</th>\n",
       "      <th>continuous_fico_range_high</th>\n",
       "      <th>continuous_fico_range_low</th>\n",
       "      <th>continuous_funded_amnt</th>\n",
       "      <th>continuous_funded_amnt_inv</th>\n",
       "      <th>continuous_inq_last_6mths</th>\n",
       "      <th>continuous_installment</th>\n",
       "      <th>continuous_int_rate</th>\n",
       "      <th>continuous_last_fico_range_high</th>\n",
       "      <th>continuous_last_fico_range_low</th>\n",
       "      <th>continuous_loan_amnt</th>\n",
       "      <th>loan_status</th>\n",
       "      <th>continuous_mths_since_last_delinq</th>\n",
       "      <th>continuous_mths_since_last_major_derog</th>\n",
       "      <th>continuous_mths_since_last_record</th>\n",
       "      <th>continuous_open_acc</th>\n",
       "      <th>continuous_pub_rec</th>\n",
       "      <th>discrete_addr_state_1_one_hot</th>\n",
       "      <th>discrete_addr_state_2_one_hot</th>\n",
       "      <th>discrete_addr_state_3_one_hot</th>\n",
       "      <th>discrete_addr_state_4_one_hot</th>\n",
       "      <th>discrete_addr_state_5_one_hot</th>\n",
       "      <th>discrete_addr_state_6_one_hot</th>\n",
       "      <th>discrete_addr_state_7_one_hot</th>\n",
       "      <th>discrete_addr_state_8_one_hot</th>\n",
       "      <th>discrete_addr_state_9_one_hot</th>\n",
       "      <th>discrete_addr_state_10_one_hot</th>\n",
       "      <th>discrete_addr_state_11_one_hot</th>\n",
       "      <th>discrete_addr_state_12_one_hot</th>\n",
       "      <th>discrete_addr_state_13_one_hot</th>\n",
       "      <th>discrete_addr_state_14_one_hot</th>\n",
       "      <th>discrete_addr_state_15_one_hot</th>\n",
       "      <th>discrete_addr_state_16_one_hot</th>\n",
       "      <th>discrete_addr_state_17_one_hot</th>\n",
       "      <th>discrete_addr_state_18_one_hot</th>\n",
       "      <th>discrete_addr_state_19_one_hot</th>\n",
       "      <th>discrete_addr_state_20_one_hot</th>\n",
       "      <th>discrete_addr_state_21_one_hot</th>\n",
       "      <th>discrete_addr_state_22_one_hot</th>\n",
       "      <th>discrete_addr_state_23_one_hot</th>\n",
       "      <th>discrete_addr_state_24_one_hot</th>\n",
       "      <th>discrete_addr_state_25_one_hot</th>\n",
       "      <th>discrete_addr_state_26_one_hot</th>\n",
       "      <th>discrete_addr_state_27_one_hot</th>\n",
       "      <th>discrete_addr_state_28_one_hot</th>\n",
       "      <th>discrete_addr_state_29_one_hot</th>\n",
       "      <th>discrete_addr_state_30_one_hot</th>\n",
       "      <th>discrete_addr_state_31_one_hot</th>\n",
       "      <th>discrete_addr_state_32_one_hot</th>\n",
       "      <th>discrete_addr_state_33_one_hot</th>\n",
       "      <th>discrete_addr_state_34_one_hot</th>\n",
       "      <th>discrete_addr_state_35_one_hot</th>\n",
       "      <th>discrete_addr_state_36_one_hot</th>\n",
       "      <th>discrete_addr_state_37_one_hot</th>\n",
       "      <th>discrete_addr_state_38_one_hot</th>\n",
       "      <th>discrete_addr_state_39_one_hot</th>\n",
       "      <th>discrete_addr_state_40_one_hot</th>\n",
       "      <th>discrete_addr_state_41_one_hot</th>\n",
       "      <th>discrete_addr_state_42_one_hot</th>\n",
       "      <th>discrete_addr_state_43_one_hot</th>\n",
       "      <th>discrete_addr_state_44_one_hot</th>\n",
       "      <th>discrete_addr_state_45_one_hot</th>\n",
       "      <th>discrete_addr_state_46_one_hot</th>\n",
       "      <th>discrete_addr_state_47_one_hot</th>\n",
       "      <th>discrete_addr_state_48_one_hot</th>\n",
       "      <th>discrete_addr_state_49_one_hot</th>\n",
       "      <th>discrete_application_type_1_one_hot</th>\n",
       "      <th>discrete_application_type_2_one_hot</th>\n",
       "      <th>discrete_emp_length_1_one_hot</th>\n",
       "      <th>discrete_emp_length_2_one_hot</th>\n",
       "      <th>discrete_emp_length_3_one_hot</th>\n",
       "      <th>discrete_emp_length_4_one_hot</th>\n",
       "      <th>discrete_emp_length_5_one_hot</th>\n",
       "      <th>discrete_emp_length_6_one_hot</th>\n",
       "      <th>discrete_emp_length_7_one_hot</th>\n",
       "      <th>discrete_emp_length_8_one_hot</th>\n",
       "      <th>discrete_emp_length_9_one_hot</th>\n",
       "      <th>discrete_emp_length_10_one_hot</th>\n",
       "      <th>discrete_emp_length_11_one_hot</th>\n",
       "      <th>discrete_emp_length_12_one_hot</th>\n",
       "      <th>discrete_grade_1_one_hot</th>\n",
       "      <th>discrete_grade_2_one_hot</th>\n",
       "      <th>discrete_grade_3_one_hot</th>\n",
       "      <th>discrete_grade_4_one_hot</th>\n",
       "      <th>discrete_grade_5_one_hot</th>\n",
       "      <th>discrete_grade_6_one_hot</th>\n",
       "      <th>discrete_grade_7_one_hot</th>\n",
       "      <th>discrete_home_ownership_1_one_hot</th>\n",
       "      <th>discrete_home_ownership_2_one_hot</th>\n",
       "      <th>discrete_home_ownership_3_one_hot</th>\n",
       "      <th>discrete_home_ownership_4_one_hot</th>\n",
       "      <th>discrete_policy_code_1_one_hot</th>\n",
       "      <th>discrete_purpose_1_one_hot</th>\n",
       "      <th>discrete_purpose_2_one_hot</th>\n",
       "      <th>discrete_purpose_3_one_hot</th>\n",
       "      <th>discrete_purpose_4_one_hot</th>\n",
       "      <th>discrete_purpose_5_one_hot</th>\n",
       "      <th>discrete_purpose_6_one_hot</th>\n",
       "      <th>discrete_purpose_7_one_hot</th>\n",
       "      <th>discrete_purpose_8_one_hot</th>\n",
       "      <th>discrete_purpose_9_one_hot</th>\n",
       "      <th>discrete_purpose_10_one_hot</th>\n",
       "      <th>discrete_purpose_11_one_hot</th>\n",
       "      <th>discrete_purpose_12_one_hot</th>\n",
       "      <th>discrete_pymnt_plan_1_one_hot</th>\n",
       "      <th>discrete_sub_grade_1_one_hot</th>\n",
       "      <th>discrete_sub_grade_2_one_hot</th>\n",
       "      <th>discrete_sub_grade_3_one_hot</th>\n",
       "      <th>discrete_sub_grade_4_one_hot</th>\n",
       "      <th>discrete_sub_grade_5_one_hot</th>\n",
       "      <th>discrete_sub_grade_6_one_hot</th>\n",
       "      <th>discrete_sub_grade_7_one_hot</th>\n",
       "      <th>discrete_sub_grade_8_one_hot</th>\n",
       "      <th>discrete_sub_grade_9_one_hot</th>\n",
       "      <th>discrete_sub_grade_10_one_hot</th>\n",
       "      <th>discrete_sub_grade_11_one_hot</th>\n",
       "      <th>discrete_sub_grade_12_one_hot</th>\n",
       "      <th>discrete_sub_grade_13_one_hot</th>\n",
       "      <th>discrete_sub_grade_14_one_hot</th>\n",
       "      <th>discrete_sub_grade_15_one_hot</th>\n",
       "      <th>discrete_sub_grade_16_one_hot</th>\n",
       "      <th>discrete_sub_grade_17_one_hot</th>\n",
       "      <th>discrete_sub_grade_18_one_hot</th>\n",
       "      <th>discrete_sub_grade_19_one_hot</th>\n",
       "      <th>discrete_sub_grade_20_one_hot</th>\n",
       "      <th>discrete_sub_grade_21_one_hot</th>\n",
       "      <th>discrete_sub_grade_22_one_hot</th>\n",
       "      <th>discrete_sub_grade_23_one_hot</th>\n",
       "      <th>discrete_sub_grade_24_one_hot</th>\n",
       "      <th>discrete_sub_grade_25_one_hot</th>\n",
       "      <th>discrete_sub_grade_26_one_hot</th>\n",
       "      <th>discrete_sub_grade_27_one_hot</th>\n",
       "      <th>discrete_sub_grade_28_one_hot</th>\n",
       "      <th>discrete_sub_grade_29_one_hot</th>\n",
       "      <th>discrete_sub_grade_30_one_hot</th>\n",
       "      <th>discrete_sub_grade_31_one_hot</th>\n",
       "      <th>discrete_sub_grade_32_one_hot</th>\n",
       "      <th>discrete_sub_grade_33_one_hot</th>\n",
       "      <th>discrete_sub_grade_34_one_hot</th>\n",
       "      <th>discrete_sub_grade_35_one_hot</th>\n",
       "      <th>discrete_term_1_one_hot</th>\n",
       "      <th>discrete_term_2_one_hot</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>count</th>\n",
       "      <td>5.000000e+04</td>\n",
       "      <td>220.000000</td>\n",
       "      <td>50000.00000</td>\n",
       "      <td>49999.000000</td>\n",
       "      <td>220.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.00000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.00000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>26083.000000</td>\n",
       "      <td>15052.000000</td>\n",
       "      <td>9495.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.00000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.00000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.00000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.00000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.00000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.0</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.0</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.00000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.00000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "      <td>50000.000000</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>mean</th>\n",
       "      <td>7.835886e+04</td>\n",
       "      <td>109248.802545</td>\n",
       "      <td>0.34738</td>\n",
       "      <td>19.080239</td>\n",
       "      <td>18.831864</td>\n",
       "      <td>698.404460</td>\n",
       "      <td>694.404300</td>\n",
       "      <td>14332.53650</td>\n",
       "      <td>14325.533000</td>\n",
       "      <td>0.620820</td>\n",
       "      <td>428.892109</td>\n",
       "      <td>11.986125</td>\n",
       "      <td>675.644860</td>\n",
       "      <td>658.428400</td>\n",
       "      <td>14332.53650</td>\n",
       "      <td>0.795760</td>\n",
       "      <td>34.197485</td>\n",
       "      <td>44.214589</td>\n",
       "      <td>65.500790</td>\n",
       "      <td>11.944960</td>\n",
       "      <td>0.252820</td>\n",
       "      <td>0.033880</td>\n",
       "      <td>0.002000</td>\n",
       "      <td>0.038880</td>\n",
       "      <td>0.033380</td>\n",
       "      <td>0.018000</td>\n",
       "      <td>0.012260</td>\n",
       "      <td>0.004400</td>\n",
       "      <td>0.02756</td>\n",
       "      <td>0.137380</td>\n",
       "      <td>0.028700</td>\n",
       "      <td>0.025140</td>\n",
       "      <td>0.016960</td>\n",
       "      <td>0.023600</td>\n",
       "      <td>0.08170</td>\n",
       "      <td>0.083120</td>\n",
       "      <td>0.008160</td>\n",
       "      <td>0.005800</td>\n",
       "      <td>0.012500</td>\n",
       "      <td>0.019380</td>\n",
       "      <td>0.033520</td>\n",
       "      <td>0.011400</td>\n",
       "      <td>0.071380</td>\n",
       "      <td>0.020820</td>\n",
       "      <td>0.026920</td>\n",
       "      <td>0.015980</td>\n",
       "      <td>0.002400</td>\n",
       "      <td>0.022720</td>\n",
       "      <td>0.012980</td>\n",
       "      <td>0.004700</td>\n",
       "      <td>0.001820</td>\n",
       "      <td>0.037180</td>\n",
       "      <td>0.002920</td>\n",
       "      <td>0.017400</td>\n",
       "      <td>0.004960</td>\n",
       "      <td>0.004740</td>\n",
       "      <td>0.011040</td>\n",
       "      <td>0.014720</td>\n",
       "      <td>0.00742</td>\n",
       "      <td>0.013760</td>\n",
       "      <td>0.003480</td>\n",
       "      <td>0.002600</td>\n",
       "      <td>0.001840</td>\n",
       "      <td>0.008940</td>\n",
       "      <td>0.010180</td>\n",
       "      <td>0.005720</td>\n",
       "      <td>0.006480</td>\n",
       "      <td>0.002780</td>\n",
       "      <td>0.004400</td>\n",
       "      <td>0.002000</td>\n",
       "      <td>0.995600</td>\n",
       "      <td>0.004400</td>\n",
       "      <td>0.331520</td>\n",
       "      <td>0.081180</td>\n",
       "      <td>0.057220</td>\n",
       "      <td>0.039380</td>\n",
       "      <td>0.03538</td>\n",
       "      <td>0.049620</td>\n",
       "      <td>0.087340</td>\n",
       "      <td>0.061060</td>\n",
       "      <td>0.038000</td>\n",
       "      <td>0.089660</td>\n",
       "      <td>0.066180</td>\n",
       "      <td>0.063460</td>\n",
       "      <td>0.278480</td>\n",
       "      <td>0.315720</td>\n",
       "      <td>0.018980</td>\n",
       "      <td>0.191520</td>\n",
       "      <td>0.062880</td>\n",
       "      <td>0.12806</td>\n",
       "      <td>0.004360</td>\n",
       "      <td>0.486440</td>\n",
       "      <td>0.397960</td>\n",
       "      <td>0.115580</td>\n",
       "      <td>0.000020</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.562380</td>\n",
       "      <td>0.009600</td>\n",
       "      <td>0.061500</td>\n",
       "      <td>0.021300</td>\n",
       "      <td>0.249700</td>\n",
       "      <td>0.057880</td>\n",
       "      <td>0.003760</td>\n",
       "      <td>0.005900</td>\n",
       "      <td>0.008860</td>\n",
       "      <td>0.012560</td>\n",
       "      <td>0.006040</td>\n",
       "      <td>0.000520</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.056600</td>\n",
       "      <td>0.065920</td>\n",
       "      <td>0.067100</td>\n",
       "      <td>0.005860</td>\n",
       "      <td>0.055120</td>\n",
       "      <td>0.061300</td>\n",
       "      <td>0.056820</td>\n",
       "      <td>0.030020</td>\n",
       "      <td>0.064140</td>\n",
       "      <td>0.059300</td>\n",
       "      <td>0.015560</td>\n",
       "      <td>0.036840</td>\n",
       "      <td>0.012960</td>\n",
       "      <td>0.041540</td>\n",
       "      <td>0.044880</td>\n",
       "      <td>0.02210</td>\n",
       "      <td>0.003780</td>\n",
       "      <td>0.036220</td>\n",
       "      <td>0.066360</td>\n",
       "      <td>0.023940</td>\n",
       "      <td>0.017880</td>\n",
       "      <td>0.05160</td>\n",
       "      <td>0.004620</td>\n",
       "      <td>0.009440</td>\n",
       "      <td>0.027920</td>\n",
       "      <td>0.016740</td>\n",
       "      <td>0.002320</td>\n",
       "      <td>0.008180</td>\n",
       "      <td>0.028180</td>\n",
       "      <td>0.001420</td>\n",
       "      <td>0.001260</td>\n",
       "      <td>0.000880</td>\n",
       "      <td>0.000520</td>\n",
       "      <td>0.002400</td>\n",
       "      <td>0.000280</td>\n",
       "      <td>0.773740</td>\n",
       "      <td>0.226260</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>std</th>\n",
       "      <td>9.783053e+04</td>\n",
       "      <td>52319.230212</td>\n",
       "      <td>0.91874</td>\n",
       "      <td>9.802002</td>\n",
       "      <td>7.586033</td>\n",
       "      <td>31.180915</td>\n",
       "      <td>31.180139</td>\n",
       "      <td>8617.58487</td>\n",
       "      <td>8612.853833</td>\n",
       "      <td>0.906035</td>\n",
       "      <td>254.569184</td>\n",
       "      <td>4.182402</td>\n",
       "      <td>81.571458</td>\n",
       "      <td>133.005552</td>\n",
       "      <td>8617.58487</td>\n",
       "      <td>0.403149</td>\n",
       "      <td>21.828204</td>\n",
       "      <td>21.352291</td>\n",
       "      <td>23.659021</td>\n",
       "      <td>5.676976</td>\n",
       "      <td>0.665455</td>\n",
       "      <td>0.180922</td>\n",
       "      <td>0.044677</td>\n",
       "      <td>0.193311</td>\n",
       "      <td>0.179629</td>\n",
       "      <td>0.132952</td>\n",
       "      <td>0.110045</td>\n",
       "      <td>0.066187</td>\n",
       "      <td>0.16371</td>\n",
       "      <td>0.344252</td>\n",
       "      <td>0.166964</td>\n",
       "      <td>0.156552</td>\n",
       "      <td>0.129123</td>\n",
       "      <td>0.151801</td>\n",
       "      <td>0.27391</td>\n",
       "      <td>0.276066</td>\n",
       "      <td>0.089964</td>\n",
       "      <td>0.075937</td>\n",
       "      <td>0.111104</td>\n",
       "      <td>0.137858</td>\n",
       "      <td>0.179992</td>\n",
       "      <td>0.106162</td>\n",
       "      <td>0.257461</td>\n",
       "      <td>0.142783</td>\n",
       "      <td>0.161851</td>\n",
       "      <td>0.125399</td>\n",
       "      <td>0.048931</td>\n",
       "      <td>0.149011</td>\n",
       "      <td>0.113189</td>\n",
       "      <td>0.068396</td>\n",
       "      <td>0.042623</td>\n",
       "      <td>0.189205</td>\n",
       "      <td>0.053959</td>\n",
       "      <td>0.130758</td>\n",
       "      <td>0.070253</td>\n",
       "      <td>0.068685</td>\n",
       "      <td>0.104491</td>\n",
       "      <td>0.120431</td>\n",
       "      <td>0.08582</td>\n",
       "      <td>0.116494</td>\n",
       "      <td>0.058889</td>\n",
       "      <td>0.050924</td>\n",
       "      <td>0.042856</td>\n",
       "      <td>0.094129</td>\n",
       "      <td>0.100382</td>\n",
       "      <td>0.075415</td>\n",
       "      <td>0.080238</td>\n",
       "      <td>0.052653</td>\n",
       "      <td>0.066187</td>\n",
       "      <td>0.044677</td>\n",
       "      <td>0.066187</td>\n",
       "      <td>0.066187</td>\n",
       "      <td>0.470764</td>\n",
       "      <td>0.273114</td>\n",
       "      <td>0.232265</td>\n",
       "      <td>0.194499</td>\n",
       "      <td>0.18474</td>\n",
       "      <td>0.217161</td>\n",
       "      <td>0.282335</td>\n",
       "      <td>0.239443</td>\n",
       "      <td>0.191198</td>\n",
       "      <td>0.285697</td>\n",
       "      <td>0.248599</td>\n",
       "      <td>0.243791</td>\n",
       "      <td>0.448255</td>\n",
       "      <td>0.464807</td>\n",
       "      <td>0.136456</td>\n",
       "      <td>0.393501</td>\n",
       "      <td>0.242749</td>\n",
       "      <td>0.33416</td>\n",
       "      <td>0.065887</td>\n",
       "      <td>0.499821</td>\n",
       "      <td>0.489482</td>\n",
       "      <td>0.319724</td>\n",
       "      <td>0.004472</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.496098</td>\n",
       "      <td>0.097509</td>\n",
       "      <td>0.240248</td>\n",
       "      <td>0.144384</td>\n",
       "      <td>0.432844</td>\n",
       "      <td>0.233519</td>\n",
       "      <td>0.061204</td>\n",
       "      <td>0.076585</td>\n",
       "      <td>0.093711</td>\n",
       "      <td>0.111366</td>\n",
       "      <td>0.077483</td>\n",
       "      <td>0.022798</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.231079</td>\n",
       "      <td>0.248145</td>\n",
       "      <td>0.250198</td>\n",
       "      <td>0.076327</td>\n",
       "      <td>0.228217</td>\n",
       "      <td>0.239882</td>\n",
       "      <td>0.231501</td>\n",
       "      <td>0.170644</td>\n",
       "      <td>0.245005</td>\n",
       "      <td>0.236188</td>\n",
       "      <td>0.123767</td>\n",
       "      <td>0.188371</td>\n",
       "      <td>0.113103</td>\n",
       "      <td>0.199538</td>\n",
       "      <td>0.207043</td>\n",
       "      <td>0.14701</td>\n",
       "      <td>0.061366</td>\n",
       "      <td>0.186839</td>\n",
       "      <td>0.248913</td>\n",
       "      <td>0.152864</td>\n",
       "      <td>0.132517</td>\n",
       "      <td>0.22122</td>\n",
       "      <td>0.067814</td>\n",
       "      <td>0.096701</td>\n",
       "      <td>0.164745</td>\n",
       "      <td>0.128297</td>\n",
       "      <td>0.048111</td>\n",
       "      <td>0.090074</td>\n",
       "      <td>0.165488</td>\n",
       "      <td>0.037656</td>\n",
       "      <td>0.035474</td>\n",
       "      <td>0.029652</td>\n",
       "      <td>0.022798</td>\n",
       "      <td>0.048931</td>\n",
       "      <td>0.016731</td>\n",
       "      <td>0.418414</td>\n",
       "      <td>0.418414</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>min</th>\n",
       "      <td>0.000000e+00</td>\n",
       "      <td>28000.000000</td>\n",
       "      <td>0.00000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>3.000000</td>\n",
       "      <td>664.000000</td>\n",
       "      <td>660.000000</td>\n",
       "      <td>1000.00000</td>\n",
       "      <td>950.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>14.770000</td>\n",
       "      <td>5.320000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>1000.00000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.00000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.00000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.00000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.00000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.00000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.00000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.00000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>25%</th>\n",
       "      <td>4.700000e+04</td>\n",
       "      <td>76000.000000</td>\n",
       "      <td>0.00000</td>\n",
       "      <td>12.410000</td>\n",
       "      <td>13.805000</td>\n",
       "      <td>674.000000</td>\n",
       "      <td>670.000000</td>\n",
       "      <td>7775.00000</td>\n",
       "      <td>7750.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>244.242500</td>\n",
       "      <td>9.170000</td>\n",
       "      <td>624.000000</td>\n",
       "      <td>620.000000</td>\n",
       "      <td>7775.00000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>16.000000</td>\n",
       "      <td>27.000000</td>\n",
       "      <td>51.000000</td>\n",
       "      <td>8.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.00000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.00000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.00000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.00000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.00000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.00000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.00000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.000000</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>50%</th>\n",
       "      <td>6.500000e+04</td>\n",
       "      <td>99000.000000</td>\n",
       "      <td>0.00000</td>\n",
       "      <td>18.520000</td>\n",
       "      <td>17.940000</td>\n",
       "      <td>689.000000</td>\n",
       "      <td>685.000000</td>\n",
       "      <td>12000.00000</td>\n",
       "      <td>12000.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>369.520000</td>\n",
       "      <td>11.490000</td>\n",
       "      <td>689.000000</td>\n",
       "      <td>685.000000</td>\n",
       "      <td>12000.00000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>31.000000</td>\n",
       "      <td>44.000000</td>\n",
       "      <td>67.000000</td>\n",
       "      <td>11.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.00000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.00000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.00000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.00000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.00000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>1.0</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.00000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.00000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.000000</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>75%</th>\n",
       "      <td>9.400000e+04</td>\n",
       "      <td>132700.000000</td>\n",
       "      <td>0.00000</td>\n",
       "      <td>25.320000</td>\n",
       "      <td>23.477500</td>\n",
       "      <td>714.000000</td>\n",
       "      <td>710.000000</td>\n",
       "      <td>20000.00000</td>\n",
       "      <td>20000.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>572.850000</td>\n",
       "      <td>14.330000</td>\n",
       "      <td>734.000000</td>\n",
       "      <td>730.000000</td>\n",
       "      <td>20000.00000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>50.000000</td>\n",
       "      <td>63.000000</td>\n",
       "      <td>81.000000</td>\n",
       "      <td>15.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.00000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.00000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.00000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.00000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.00000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>1.0</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.00000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.00000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.000000</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>max</th>\n",
       "      <td>9.000000e+06</td>\n",
       "      <td>500000.000000</td>\n",
       "      <td>15.00000</td>\n",
       "      <td>999.000000</td>\n",
       "      <td>43.860000</td>\n",
       "      <td>850.000000</td>\n",
       "      <td>845.000000</td>\n",
       "      <td>35000.00000</td>\n",
       "      <td>35000.000000</td>\n",
       "      <td>5.000000</td>\n",
       "      <td>1354.660000</td>\n",
       "      <td>28.990000</td>\n",
       "      <td>850.000000</td>\n",
       "      <td>845.000000</td>\n",
       "      <td>35000.00000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>152.000000</td>\n",
       "      <td>152.000000</td>\n",
       "      <td>120.000000</td>\n",
       "      <td>67.000000</td>\n",
       "      <td>23.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.00000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.00000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.00000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.00000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.00000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.0</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.0</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.00000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.00000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>1.000000</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "       continuous_annual_inc  continuous_annual_inc_joint  \\\n",
       "count           5.000000e+04                   220.000000   \n",
       "mean            7.835886e+04                109248.802545   \n",
       "std             9.783053e+04                 52319.230212   \n",
       "min             0.000000e+00                 28000.000000   \n",
       "25%             4.700000e+04                 76000.000000   \n",
       "50%             6.500000e+04                 99000.000000   \n",
       "75%             9.400000e+04                132700.000000   \n",
       "max             9.000000e+06                500000.000000   \n",
       "\n",
       "       continuous_delinq_2yrs  continuous_dti  continuous_dti_joint  \\\n",
       "count             50000.00000    49999.000000            220.000000   \n",
       "mean                  0.34738       19.080239             18.831864   \n",
       "std                   0.91874        9.802002              7.586033   \n",
       "min                   0.00000        0.000000              3.000000   \n",
       "25%                   0.00000       12.410000             13.805000   \n",
       "50%                   0.00000       18.520000             17.940000   \n",
       "75%                   0.00000       25.320000             23.477500   \n",
       "max                  15.00000      999.000000             43.860000   \n",
       "\n",
       "       continuous_fico_range_high  continuous_fico_range_low  \\\n",
       "count                50000.000000               50000.000000   \n",
       "mean                   698.404460                 694.404300   \n",
       "std                     31.180915                  31.180139   \n",
       "min                    664.000000                 660.000000   \n",
       "25%                    674.000000                 670.000000   \n",
       "50%                    689.000000                 685.000000   \n",
       "75%                    714.000000                 710.000000   \n",
       "max                    850.000000                 845.000000   \n",
       "\n",
       "       continuous_funded_amnt  continuous_funded_amnt_inv  \\\n",
       "count             50000.00000                50000.000000   \n",
       "mean              14332.53650                14325.533000   \n",
       "std                8617.58487                 8612.853833   \n",
       "min                1000.00000                  950.000000   \n",
       "25%                7775.00000                 7750.000000   \n",
       "50%               12000.00000                12000.000000   \n",
       "75%               20000.00000                20000.000000   \n",
       "max               35000.00000                35000.000000   \n",
       "\n",
       "       continuous_inq_last_6mths  continuous_installment  continuous_int_rate  \\\n",
       "count               50000.000000            50000.000000         50000.000000   \n",
       "mean                    0.620820              428.892109            11.986125   \n",
       "std                     0.906035              254.569184             4.182402   \n",
       "min                     0.000000               14.770000             5.320000   \n",
       "25%                     0.000000              244.242500             9.170000   \n",
       "50%                     0.000000              369.520000            11.490000   \n",
       "75%                     1.000000              572.850000            14.330000   \n",
       "max                     5.000000             1354.660000            28.990000   \n",
       "\n",
       "       continuous_last_fico_range_high  continuous_last_fico_range_low  \\\n",
       "count                     50000.000000                    50000.000000   \n",
       "mean                        675.644860                      658.428400   \n",
       "std                          81.571458                      133.005552   \n",
       "min                           0.000000                        0.000000   \n",
       "25%                         624.000000                      620.000000   \n",
       "50%                         689.000000                      685.000000   \n",
       "75%                         734.000000                      730.000000   \n",
       "max                         850.000000                      845.000000   \n",
       "\n",
       "       continuous_loan_amnt   loan_status  continuous_mths_since_last_delinq  \\\n",
       "count           50000.00000  50000.000000                       26083.000000   \n",
       "mean            14332.53650      0.795760                          34.197485   \n",
       "std              8617.58487      0.403149                          21.828204   \n",
       "min              1000.00000      0.000000                           0.000000   \n",
       "25%              7775.00000      1.000000                          16.000000   \n",
       "50%             12000.00000      1.000000                          31.000000   \n",
       "75%             20000.00000      1.000000                          50.000000   \n",
       "max             35000.00000      1.000000                         152.000000   \n",
       "\n",
       "       continuous_mths_since_last_major_derog  \\\n",
       "count                            15052.000000   \n",
       "mean                                44.214589   \n",
       "std                                 21.352291   \n",
       "min                                  0.000000   \n",
       "25%                                 27.000000   \n",
       "50%                                 44.000000   \n",
       "75%                                 63.000000   \n",
       "max                                152.000000   \n",
       "\n",
       "       continuous_mths_since_last_record  continuous_open_acc  \\\n",
       "count                        9495.000000         50000.000000   \n",
       "mean                           65.500790            11.944960   \n",
       "std                            23.659021             5.676976   \n",
       "min                             0.000000             1.000000   \n",
       "25%                            51.000000             8.000000   \n",
       "50%                            67.000000            11.000000   \n",
       "75%                            81.000000            15.000000   \n",
       "max                           120.000000            67.000000   \n",
       "\n",
       "       continuous_pub_rec  discrete_addr_state_1_one_hot  \\\n",
       "count        50000.000000                   50000.000000   \n",
       "mean             0.252820                       0.033880   \n",
       "std              0.665455                       0.180922   \n",
       "min              0.000000                       0.000000   \n",
       "25%              0.000000                       0.000000   \n",
       "50%              0.000000                       0.000000   \n",
       "75%              0.000000                       0.000000   \n",
       "max             23.000000                       1.000000   \n",
       "\n",
       "       discrete_addr_state_2_one_hot  discrete_addr_state_3_one_hot  \\\n",
       "count                   50000.000000                   50000.000000   \n",
       "mean                        0.002000                       0.038880   \n",
       "std                         0.044677                       0.193311   \n",
       "min                         0.000000                       0.000000   \n",
       "25%                         0.000000                       0.000000   \n",
       "50%                         0.000000                       0.000000   \n",
       "75%                         0.000000                       0.000000   \n",
       "max                         1.000000                       1.000000   \n",
       "\n",
       "       discrete_addr_state_4_one_hot  discrete_addr_state_5_one_hot  \\\n",
       "count                   50000.000000                   50000.000000   \n",
       "mean                        0.033380                       0.018000   \n",
       "std                         0.179629                       0.132952   \n",
       "min                         0.000000                       0.000000   \n",
       "25%                         0.000000                       0.000000   \n",
       "50%                         0.000000                       0.000000   \n",
       "75%                         0.000000                       0.000000   \n",
       "max                         1.000000                       1.000000   \n",
       "\n",
       "       discrete_addr_state_6_one_hot  discrete_addr_state_7_one_hot  \\\n",
       "count                   50000.000000                   50000.000000   \n",
       "mean                        0.012260                       0.004400   \n",
       "std                         0.110045                       0.066187   \n",
       "min                         0.000000                       0.000000   \n",
       "25%                         0.000000                       0.000000   \n",
       "50%                         0.000000                       0.000000   \n",
       "75%                         0.000000                       0.000000   \n",
       "max                         1.000000                       1.000000   \n",
       "\n",
       "       discrete_addr_state_8_one_hot  discrete_addr_state_9_one_hot  \\\n",
       "count                    50000.00000                   50000.000000   \n",
       "mean                         0.02756                       0.137380   \n",
       "std                          0.16371                       0.344252   \n",
       "min                          0.00000                       0.000000   \n",
       "25%                          0.00000                       0.000000   \n",
       "50%                          0.00000                       0.000000   \n",
       "75%                          0.00000                       0.000000   \n",
       "max                          1.00000                       1.000000   \n",
       "\n",
       "       discrete_addr_state_10_one_hot  discrete_addr_state_11_one_hot  \\\n",
       "count                    50000.000000                    50000.000000   \n",
       "mean                         0.028700                        0.025140   \n",
       "std                          0.166964                        0.156552   \n",
       "min                          0.000000                        0.000000   \n",
       "25%                          0.000000                        0.000000   \n",
       "50%                          0.000000                        0.000000   \n",
       "75%                          0.000000                        0.000000   \n",
       "max                          1.000000                        1.000000   \n",
       "\n",
       "       discrete_addr_state_12_one_hot  discrete_addr_state_13_one_hot  \\\n",
       "count                    50000.000000                    50000.000000   \n",
       "mean                         0.016960                        0.023600   \n",
       "std                          0.129123                        0.151801   \n",
       "min                          0.000000                        0.000000   \n",
       "25%                          0.000000                        0.000000   \n",
       "50%                          0.000000                        0.000000   \n",
       "75%                          0.000000                        0.000000   \n",
       "max                          1.000000                        1.000000   \n",
       "\n",
       "       discrete_addr_state_14_one_hot  discrete_addr_state_15_one_hot  \\\n",
       "count                     50000.00000                    50000.000000   \n",
       "mean                          0.08170                        0.083120   \n",
       "std                           0.27391                        0.276066   \n",
       "min                           0.00000                        0.000000   \n",
       "25%                           0.00000                        0.000000   \n",
       "50%                           0.00000                        0.000000   \n",
       "75%                           0.00000                        0.000000   \n",
       "max                           1.00000                        1.000000   \n",
       "\n",
       "       discrete_addr_state_16_one_hot  discrete_addr_state_17_one_hot  \\\n",
       "count                    50000.000000                    50000.000000   \n",
       "mean                         0.008160                        0.005800   \n",
       "std                          0.089964                        0.075937   \n",
       "min                          0.000000                        0.000000   \n",
       "25%                          0.000000                        0.000000   \n",
       "50%                          0.000000                        0.000000   \n",
       "75%                          0.000000                        0.000000   \n",
       "max                          1.000000                        1.000000   \n",
       "\n",
       "       discrete_addr_state_18_one_hot  discrete_addr_state_19_one_hot  \\\n",
       "count                    50000.000000                    50000.000000   \n",
       "mean                         0.012500                        0.019380   \n",
       "std                          0.111104                        0.137858   \n",
       "min                          0.000000                        0.000000   \n",
       "25%                          0.000000                        0.000000   \n",
       "50%                          0.000000                        0.000000   \n",
       "75%                          0.000000                        0.000000   \n",
       "max                          1.000000                        1.000000   \n",
       "\n",
       "       discrete_addr_state_20_one_hot  discrete_addr_state_21_one_hot  \\\n",
       "count                    50000.000000                    50000.000000   \n",
       "mean                         0.033520                        0.011400   \n",
       "std                          0.179992                        0.106162   \n",
       "min                          0.000000                        0.000000   \n",
       "25%                          0.000000                        0.000000   \n",
       "50%                          0.000000                        0.000000   \n",
       "75%                          0.000000                        0.000000   \n",
       "max                          1.000000                        1.000000   \n",
       "\n",
       "       discrete_addr_state_22_one_hot  discrete_addr_state_23_one_hot  \\\n",
       "count                    50000.000000                    50000.000000   \n",
       "mean                         0.071380                        0.020820   \n",
       "std                          0.257461                        0.142783   \n",
       "min                          0.000000                        0.000000   \n",
       "25%                          0.000000                        0.000000   \n",
       "50%                          0.000000                        0.000000   \n",
       "75%                          0.000000                        0.000000   \n",
       "max                          1.000000                        1.000000   \n",
       "\n",
       "       discrete_addr_state_24_one_hot  discrete_addr_state_25_one_hot  \\\n",
       "count                    50000.000000                    50000.000000   \n",
       "mean                         0.026920                        0.015980   \n",
       "std                          0.161851                        0.125399   \n",
       "min                          0.000000                        0.000000   \n",
       "25%                          0.000000                        0.000000   \n",
       "50%                          0.000000                        0.000000   \n",
       "75%                          0.000000                        0.000000   \n",
       "max                          1.000000                        1.000000   \n",
       "\n",
       "       discrete_addr_state_26_one_hot  discrete_addr_state_27_one_hot  \\\n",
       "count                    50000.000000                    50000.000000   \n",
       "mean                         0.002400                        0.022720   \n",
       "std                          0.048931                        0.149011   \n",
       "min                          0.000000                        0.000000   \n",
       "25%                          0.000000                        0.000000   \n",
       "50%                          0.000000                        0.000000   \n",
       "75%                          0.000000                        0.000000   \n",
       "max                          1.000000                        1.000000   \n",
       "\n",
       "       discrete_addr_state_28_one_hot  discrete_addr_state_29_one_hot  \\\n",
       "count                    50000.000000                    50000.000000   \n",
       "mean                         0.012980                        0.004700   \n",
       "std                          0.113189                        0.068396   \n",
       "min                          0.000000                        0.000000   \n",
       "25%                          0.000000                        0.000000   \n",
       "50%                          0.000000                        0.000000   \n",
       "75%                          0.000000                        0.000000   \n",
       "max                          1.000000                        1.000000   \n",
       "\n",
       "       discrete_addr_state_30_one_hot  discrete_addr_state_31_one_hot  \\\n",
       "count                    50000.000000                    50000.000000   \n",
       "mean                         0.001820                        0.037180   \n",
       "std                          0.042623                        0.189205   \n",
       "min                          0.000000                        0.000000   \n",
       "25%                          0.000000                        0.000000   \n",
       "50%                          0.000000                        0.000000   \n",
       "75%                          0.000000                        0.000000   \n",
       "max                          1.000000                        1.000000   \n",
       "\n",
       "       discrete_addr_state_32_one_hot  discrete_addr_state_33_one_hot  \\\n",
       "count                    50000.000000                    50000.000000   \n",
       "mean                         0.002920                        0.017400   \n",
       "std                          0.053959                        0.130758   \n",
       "min                          0.000000                        0.000000   \n",
       "25%                          0.000000                        0.000000   \n",
       "50%                          0.000000                        0.000000   \n",
       "75%                          0.000000                        0.000000   \n",
       "max                          1.000000                        1.000000   \n",
       "\n",
       "       discrete_addr_state_34_one_hot  discrete_addr_state_35_one_hot  \\\n",
       "count                    50000.000000                    50000.000000   \n",
       "mean                         0.004960                        0.004740   \n",
       "std                          0.070253                        0.068685   \n",
       "min                          0.000000                        0.000000   \n",
       "25%                          0.000000                        0.000000   \n",
       "50%                          0.000000                        0.000000   \n",
       "75%                          0.000000                        0.000000   \n",
       "max                          1.000000                        1.000000   \n",
       "\n",
       "       discrete_addr_state_36_one_hot  discrete_addr_state_37_one_hot  \\\n",
       "count                    50000.000000                    50000.000000   \n",
       "mean                         0.011040                        0.014720   \n",
       "std                          0.104491                        0.120431   \n",
       "min                          0.000000                        0.000000   \n",
       "25%                          0.000000                        0.000000   \n",
       "50%                          0.000000                        0.000000   \n",
       "75%                          0.000000                        0.000000   \n",
       "max                          1.000000                        1.000000   \n",
       "\n",
       "       discrete_addr_state_38_one_hot  discrete_addr_state_39_one_hot  \\\n",
       "count                     50000.00000                    50000.000000   \n",
       "mean                          0.00742                        0.013760   \n",
       "std                           0.08582                        0.116494   \n",
       "min                           0.00000                        0.000000   \n",
       "25%                           0.00000                        0.000000   \n",
       "50%                           0.00000                        0.000000   \n",
       "75%                           0.00000                        0.000000   \n",
       "max                           1.00000                        1.000000   \n",
       "\n",
       "       discrete_addr_state_40_one_hot  discrete_addr_state_41_one_hot  \\\n",
       "count                    50000.000000                    50000.000000   \n",
       "mean                         0.003480                        0.002600   \n",
       "std                          0.058889                        0.050924   \n",
       "min                          0.000000                        0.000000   \n",
       "25%                          0.000000                        0.000000   \n",
       "50%                          0.000000                        0.000000   \n",
       "75%                          0.000000                        0.000000   \n",
       "max                          1.000000                        1.000000   \n",
       "\n",
       "       discrete_addr_state_42_one_hot  discrete_addr_state_43_one_hot  \\\n",
       "count                    50000.000000                    50000.000000   \n",
       "mean                         0.001840                        0.008940   \n",
       "std                          0.042856                        0.094129   \n",
       "min                          0.000000                        0.000000   \n",
       "25%                          0.000000                        0.000000   \n",
       "50%                          0.000000                        0.000000   \n",
       "75%                          0.000000                        0.000000   \n",
       "max                          1.000000                        1.000000   \n",
       "\n",
       "       discrete_addr_state_44_one_hot  discrete_addr_state_45_one_hot  \\\n",
       "count                    50000.000000                    50000.000000   \n",
       "mean                         0.010180                        0.005720   \n",
       "std                          0.100382                        0.075415   \n",
       "min                          0.000000                        0.000000   \n",
       "25%                          0.000000                        0.000000   \n",
       "50%                          0.000000                        0.000000   \n",
       "75%                          0.000000                        0.000000   \n",
       "max                          1.000000                        1.000000   \n",
       "\n",
       "       discrete_addr_state_46_one_hot  discrete_addr_state_47_one_hot  \\\n",
       "count                    50000.000000                    50000.000000   \n",
       "mean                         0.006480                        0.002780   \n",
       "std                          0.080238                        0.052653   \n",
       "min                          0.000000                        0.000000   \n",
       "25%                          0.000000                        0.000000   \n",
       "50%                          0.000000                        0.000000   \n",
       "75%                          0.000000                        0.000000   \n",
       "max                          1.000000                        1.000000   \n",
       "\n",
       "       discrete_addr_state_48_one_hot  discrete_addr_state_49_one_hot  \\\n",
       "count                    50000.000000                    50000.000000   \n",
       "mean                         0.004400                        0.002000   \n",
       "std                          0.066187                        0.044677   \n",
       "min                          0.000000                        0.000000   \n",
       "25%                          0.000000                        0.000000   \n",
       "50%                          0.000000                        0.000000   \n",
       "75%                          0.000000                        0.000000   \n",
       "max                          1.000000                        1.000000   \n",
       "\n",
       "       discrete_application_type_1_one_hot  \\\n",
       "count                         50000.000000   \n",
       "mean                              0.995600   \n",
       "std                               0.066187   \n",
       "min                               0.000000   \n",
       "25%                               1.000000   \n",
       "50%                               1.000000   \n",
       "75%                               1.000000   \n",
       "max                               1.000000   \n",
       "\n",
       "       discrete_application_type_2_one_hot  discrete_emp_length_1_one_hot  \\\n",
       "count                         50000.000000                   50000.000000   \n",
       "mean                              0.004400                       0.331520   \n",
       "std                               0.066187                       0.470764   \n",
       "min                               0.000000                       0.000000   \n",
       "25%                               0.000000                       0.000000   \n",
       "50%                               0.000000                       0.000000   \n",
       "75%                               0.000000                       1.000000   \n",
       "max                               1.000000                       1.000000   \n",
       "\n",
       "       discrete_emp_length_2_one_hot  discrete_emp_length_3_one_hot  \\\n",
       "count                   50000.000000                   50000.000000   \n",
       "mean                        0.081180                       0.057220   \n",
       "std                         0.273114                       0.232265   \n",
       "min                         0.000000                       0.000000   \n",
       "25%                         0.000000                       0.000000   \n",
       "50%                         0.000000                       0.000000   \n",
       "75%                         0.000000                       0.000000   \n",
       "max                         1.000000                       1.000000   \n",
       "\n",
       "       discrete_emp_length_4_one_hot  discrete_emp_length_5_one_hot  \\\n",
       "count                   50000.000000                    50000.00000   \n",
       "mean                        0.039380                        0.03538   \n",
       "std                         0.194499                        0.18474   \n",
       "min                         0.000000                        0.00000   \n",
       "25%                         0.000000                        0.00000   \n",
       "50%                         0.000000                        0.00000   \n",
       "75%                         0.000000                        0.00000   \n",
       "max                         1.000000                        1.00000   \n",
       "\n",
       "       discrete_emp_length_6_one_hot  discrete_emp_length_7_one_hot  \\\n",
       "count                   50000.000000                   50000.000000   \n",
       "mean                        0.049620                       0.087340   \n",
       "std                         0.217161                       0.282335   \n",
       "min                         0.000000                       0.000000   \n",
       "25%                         0.000000                       0.000000   \n",
       "50%                         0.000000                       0.000000   \n",
       "75%                         0.000000                       0.000000   \n",
       "max                         1.000000                       1.000000   \n",
       "\n",
       "       discrete_emp_length_8_one_hot  discrete_emp_length_9_one_hot  \\\n",
       "count                   50000.000000                   50000.000000   \n",
       "mean                        0.061060                       0.038000   \n",
       "std                         0.239443                       0.191198   \n",
       "min                         0.000000                       0.000000   \n",
       "25%                         0.000000                       0.000000   \n",
       "50%                         0.000000                       0.000000   \n",
       "75%                         0.000000                       0.000000   \n",
       "max                         1.000000                       1.000000   \n",
       "\n",
       "       discrete_emp_length_10_one_hot  discrete_emp_length_11_one_hot  \\\n",
       "count                    50000.000000                    50000.000000   \n",
       "mean                         0.089660                        0.066180   \n",
       "std                          0.285697                        0.248599   \n",
       "min                          0.000000                        0.000000   \n",
       "25%                          0.000000                        0.000000   \n",
       "50%                          0.000000                        0.000000   \n",
       "75%                          0.000000                        0.000000   \n",
       "max                          1.000000                        1.000000   \n",
       "\n",
       "       discrete_emp_length_12_one_hot  discrete_grade_1_one_hot  \\\n",
       "count                    50000.000000              50000.000000   \n",
       "mean                         0.063460                  0.278480   \n",
       "std                          0.243791                  0.448255   \n",
       "min                          0.000000                  0.000000   \n",
       "25%                          0.000000                  0.000000   \n",
       "50%                          0.000000                  0.000000   \n",
       "75%                          0.000000                  1.000000   \n",
       "max                          1.000000                  1.000000   \n",
       "\n",
       "       discrete_grade_2_one_hot  discrete_grade_3_one_hot  \\\n",
       "count              50000.000000              50000.000000   \n",
       "mean                   0.315720                  0.018980   \n",
       "std                    0.464807                  0.136456   \n",
       "min                    0.000000                  0.000000   \n",
       "25%                    0.000000                  0.000000   \n",
       "50%                    0.000000                  0.000000   \n",
       "75%                    1.000000                  0.000000   \n",
       "max                    1.000000                  1.000000   \n",
       "\n",
       "       discrete_grade_4_one_hot  discrete_grade_5_one_hot  \\\n",
       "count              50000.000000              50000.000000   \n",
       "mean                   0.191520                  0.062880   \n",
       "std                    0.393501                  0.242749   \n",
       "min                    0.000000                  0.000000   \n",
       "25%                    0.000000                  0.000000   \n",
       "50%                    0.000000                  0.000000   \n",
       "75%                    0.000000                  0.000000   \n",
       "max                    1.000000                  1.000000   \n",
       "\n",
       "       discrete_grade_6_one_hot  discrete_grade_7_one_hot  \\\n",
       "count               50000.00000              50000.000000   \n",
       "mean                    0.12806                  0.004360   \n",
       "std                     0.33416                  0.065887   \n",
       "min                     0.00000                  0.000000   \n",
       "25%                     0.00000                  0.000000   \n",
       "50%                     0.00000                  0.000000   \n",
       "75%                     0.00000                  0.000000   \n",
       "max                     1.00000                  1.000000   \n",
       "\n",
       "       discrete_home_ownership_1_one_hot  discrete_home_ownership_2_one_hot  \\\n",
       "count                       50000.000000                       50000.000000   \n",
       "mean                            0.486440                           0.397960   \n",
       "std                             0.499821                           0.489482   \n",
       "min                             0.000000                           0.000000   \n",
       "25%                             0.000000                           0.000000   \n",
       "50%                             0.000000                           0.000000   \n",
       "75%                             1.000000                           1.000000   \n",
       "max                             1.000000                           1.000000   \n",
       "\n",
       "       discrete_home_ownership_3_one_hot  discrete_home_ownership_4_one_hot  \\\n",
       "count                       50000.000000                       50000.000000   \n",
       "mean                            0.115580                           0.000020   \n",
       "std                             0.319724                           0.004472   \n",
       "min                             0.000000                           0.000000   \n",
       "25%                             0.000000                           0.000000   \n",
       "50%                             0.000000                           0.000000   \n",
       "75%                             0.000000                           0.000000   \n",
       "max                             1.000000                           1.000000   \n",
       "\n",
       "       discrete_policy_code_1_one_hot  discrete_purpose_1_one_hot  \\\n",
       "count                         50000.0                50000.000000   \n",
       "mean                              1.0                    0.562380   \n",
       "std                               0.0                    0.496098   \n",
       "min                               1.0                    0.000000   \n",
       "25%                               1.0                    0.000000   \n",
       "50%                               1.0                    1.000000   \n",
       "75%                               1.0                    1.000000   \n",
       "max                               1.0                    1.000000   \n",
       "\n",
       "       discrete_purpose_2_one_hot  discrete_purpose_3_one_hot  \\\n",
       "count                50000.000000                50000.000000   \n",
       "mean                     0.009600                    0.061500   \n",
       "std                      0.097509                    0.240248   \n",
       "min                      0.000000                    0.000000   \n",
       "25%                      0.000000                    0.000000   \n",
       "50%                      0.000000                    0.000000   \n",
       "75%                      0.000000                    0.000000   \n",
       "max                      1.000000                    1.000000   \n",
       "\n",
       "       discrete_purpose_4_one_hot  discrete_purpose_5_one_hot  \\\n",
       "count                50000.000000                50000.000000   \n",
       "mean                     0.021300                    0.249700   \n",
       "std                      0.144384                    0.432844   \n",
       "min                      0.000000                    0.000000   \n",
       "25%                      0.000000                    0.000000   \n",
       "50%                      0.000000                    0.000000   \n",
       "75%                      0.000000                    0.000000   \n",
       "max                      1.000000                    1.000000   \n",
       "\n",
       "       discrete_purpose_6_one_hot  discrete_purpose_7_one_hot  \\\n",
       "count                50000.000000                50000.000000   \n",
       "mean                     0.057880                    0.003760   \n",
       "std                      0.233519                    0.061204   \n",
       "min                      0.000000                    0.000000   \n",
       "25%                      0.000000                    0.000000   \n",
       "50%                      0.000000                    0.000000   \n",
       "75%                      0.000000                    0.000000   \n",
       "max                      1.000000                    1.000000   \n",
       "\n",
       "       discrete_purpose_8_one_hot  discrete_purpose_9_one_hot  \\\n",
       "count                50000.000000                50000.000000   \n",
       "mean                     0.005900                    0.008860   \n",
       "std                      0.076585                    0.093711   \n",
       "min                      0.000000                    0.000000   \n",
       "25%                      0.000000                    0.000000   \n",
       "50%                      0.000000                    0.000000   \n",
       "75%                      0.000000                    0.000000   \n",
       "max                      1.000000                    1.000000   \n",
       "\n",
       "       discrete_purpose_10_one_hot  discrete_purpose_11_one_hot  \\\n",
       "count                 50000.000000                 50000.000000   \n",
       "mean                      0.012560                     0.006040   \n",
       "std                       0.111366                     0.077483   \n",
       "min                       0.000000                     0.000000   \n",
       "25%                       0.000000                     0.000000   \n",
       "50%                       0.000000                     0.000000   \n",
       "75%                       0.000000                     0.000000   \n",
       "max                       1.000000                     1.000000   \n",
       "\n",
       "       discrete_purpose_12_one_hot  discrete_pymnt_plan_1_one_hot  \\\n",
       "count                 50000.000000                        50000.0   \n",
       "mean                      0.000520                            1.0   \n",
       "std                       0.022798                            0.0   \n",
       "min                       0.000000                            1.0   \n",
       "25%                       0.000000                            1.0   \n",
       "50%                       0.000000                            1.0   \n",
       "75%                       0.000000                            1.0   \n",
       "max                       1.000000                            1.0   \n",
       "\n",
       "       discrete_sub_grade_1_one_hot  discrete_sub_grade_2_one_hot  \\\n",
       "count                  50000.000000                  50000.000000   \n",
       "mean                       0.056600                      0.065920   \n",
       "std                        0.231079                      0.248145   \n",
       "min                        0.000000                      0.000000   \n",
       "25%                        0.000000                      0.000000   \n",
       "50%                        0.000000                      0.000000   \n",
       "75%                        0.000000                      0.000000   \n",
       "max                        1.000000                      1.000000   \n",
       "\n",
       "       discrete_sub_grade_3_one_hot  discrete_sub_grade_4_one_hot  \\\n",
       "count                  50000.000000                  50000.000000   \n",
       "mean                       0.067100                      0.005860   \n",
       "std                        0.250198                      0.076327   \n",
       "min                        0.000000                      0.000000   \n",
       "25%                        0.000000                      0.000000   \n",
       "50%                        0.000000                      0.000000   \n",
       "75%                        0.000000                      0.000000   \n",
       "max                        1.000000                      1.000000   \n",
       "\n",
       "       discrete_sub_grade_5_one_hot  discrete_sub_grade_6_one_hot  \\\n",
       "count                  50000.000000                  50000.000000   \n",
       "mean                       0.055120                      0.061300   \n",
       "std                        0.228217                      0.239882   \n",
       "min                        0.000000                      0.000000   \n",
       "25%                        0.000000                      0.000000   \n",
       "50%                        0.000000                      0.000000   \n",
       "75%                        0.000000                      0.000000   \n",
       "max                        1.000000                      1.000000   \n",
       "\n",
       "       discrete_sub_grade_7_one_hot  discrete_sub_grade_8_one_hot  \\\n",
       "count                  50000.000000                  50000.000000   \n",
       "mean                       0.056820                      0.030020   \n",
       "std                        0.231501                      0.170644   \n",
       "min                        0.000000                      0.000000   \n",
       "25%                        0.000000                      0.000000   \n",
       "50%                        0.000000                      0.000000   \n",
       "75%                        0.000000                      0.000000   \n",
       "max                        1.000000                      1.000000   \n",
       "\n",
       "       discrete_sub_grade_9_one_hot  discrete_sub_grade_10_one_hot  \\\n",
       "count                  50000.000000                   50000.000000   \n",
       "mean                       0.064140                       0.059300   \n",
       "std                        0.245005                       0.236188   \n",
       "min                        0.000000                       0.000000   \n",
       "25%                        0.000000                       0.000000   \n",
       "50%                        0.000000                       0.000000   \n",
       "75%                        0.000000                       0.000000   \n",
       "max                        1.000000                       1.000000   \n",
       "\n",
       "       discrete_sub_grade_11_one_hot  discrete_sub_grade_12_one_hot  \\\n",
       "count                   50000.000000                   50000.000000   \n",
       "mean                        0.015560                       0.036840   \n",
       "std                         0.123767                       0.188371   \n",
       "min                         0.000000                       0.000000   \n",
       "25%                         0.000000                       0.000000   \n",
       "50%                         0.000000                       0.000000   \n",
       "75%                         0.000000                       0.000000   \n",
       "max                         1.000000                       1.000000   \n",
       "\n",
       "       discrete_sub_grade_13_one_hot  discrete_sub_grade_14_one_hot  \\\n",
       "count                   50000.000000                   50000.000000   \n",
       "mean                        0.012960                       0.041540   \n",
       "std                         0.113103                       0.199538   \n",
       "min                         0.000000                       0.000000   \n",
       "25%                         0.000000                       0.000000   \n",
       "50%                         0.000000                       0.000000   \n",
       "75%                         0.000000                       0.000000   \n",
       "max                         1.000000                       1.000000   \n",
       "\n",
       "       discrete_sub_grade_15_one_hot  discrete_sub_grade_16_one_hot  \\\n",
       "count                   50000.000000                    50000.00000   \n",
       "mean                        0.044880                        0.02210   \n",
       "std                         0.207043                        0.14701   \n",
       "min                         0.000000                        0.00000   \n",
       "25%                         0.000000                        0.00000   \n",
       "50%                         0.000000                        0.00000   \n",
       "75%                         0.000000                        0.00000   \n",
       "max                         1.000000                        1.00000   \n",
       "\n",
       "       discrete_sub_grade_17_one_hot  discrete_sub_grade_18_one_hot  \\\n",
       "count                   50000.000000                   50000.000000   \n",
       "mean                        0.003780                       0.036220   \n",
       "std                         0.061366                       0.186839   \n",
       "min                         0.000000                       0.000000   \n",
       "25%                         0.000000                       0.000000   \n",
       "50%                         0.000000                       0.000000   \n",
       "75%                         0.000000                       0.000000   \n",
       "max                         1.000000                       1.000000   \n",
       "\n",
       "       discrete_sub_grade_19_one_hot  discrete_sub_grade_20_one_hot  \\\n",
       "count                   50000.000000                   50000.000000   \n",
       "mean                        0.066360                       0.023940   \n",
       "std                         0.248913                       0.152864   \n",
       "min                         0.000000                       0.000000   \n",
       "25%                         0.000000                       0.000000   \n",
       "50%                         0.000000                       0.000000   \n",
       "75%                         0.000000                       0.000000   \n",
       "max                         1.000000                       1.000000   \n",
       "\n",
       "       discrete_sub_grade_21_one_hot  discrete_sub_grade_22_one_hot  \\\n",
       "count                   50000.000000                    50000.00000   \n",
       "mean                        0.017880                        0.05160   \n",
       "std                         0.132517                        0.22122   \n",
       "min                         0.000000                        0.00000   \n",
       "25%                         0.000000                        0.00000   \n",
       "50%                         0.000000                        0.00000   \n",
       "75%                         0.000000                        0.00000   \n",
       "max                         1.000000                        1.00000   \n",
       "\n",
       "       discrete_sub_grade_23_one_hot  discrete_sub_grade_24_one_hot  \\\n",
       "count                   50000.000000                   50000.000000   \n",
       "mean                        0.004620                       0.009440   \n",
       "std                         0.067814                       0.096701   \n",
       "min                         0.000000                       0.000000   \n",
       "25%                         0.000000                       0.000000   \n",
       "50%                         0.000000                       0.000000   \n",
       "75%                         0.000000                       0.000000   \n",
       "max                         1.000000                       1.000000   \n",
       "\n",
       "       discrete_sub_grade_25_one_hot  discrete_sub_grade_26_one_hot  \\\n",
       "count                   50000.000000                   50000.000000   \n",
       "mean                        0.027920                       0.016740   \n",
       "std                         0.164745                       0.128297   \n",
       "min                         0.000000                       0.000000   \n",
       "25%                         0.000000                       0.000000   \n",
       "50%                         0.000000                       0.000000   \n",
       "75%                         0.000000                       0.000000   \n",
       "max                         1.000000                       1.000000   \n",
       "\n",
       "       discrete_sub_grade_27_one_hot  discrete_sub_grade_28_one_hot  \\\n",
       "count                   50000.000000                   50000.000000   \n",
       "mean                        0.002320                       0.008180   \n",
       "std                         0.048111                       0.090074   \n",
       "min                         0.000000                       0.000000   \n",
       "25%                         0.000000                       0.000000   \n",
       "50%                         0.000000                       0.000000   \n",
       "75%                         0.000000                       0.000000   \n",
       "max                         1.000000                       1.000000   \n",
       "\n",
       "       discrete_sub_grade_29_one_hot  discrete_sub_grade_30_one_hot  \\\n",
       "count                   50000.000000                   50000.000000   \n",
       "mean                        0.028180                       0.001420   \n",
       "std                         0.165488                       0.037656   \n",
       "min                         0.000000                       0.000000   \n",
       "25%                         0.000000                       0.000000   \n",
       "50%                         0.000000                       0.000000   \n",
       "75%                         0.000000                       0.000000   \n",
       "max                         1.000000                       1.000000   \n",
       "\n",
       "       discrete_sub_grade_31_one_hot  discrete_sub_grade_32_one_hot  \\\n",
       "count                   50000.000000                   50000.000000   \n",
       "mean                        0.001260                       0.000880   \n",
       "std                         0.035474                       0.029652   \n",
       "min                         0.000000                       0.000000   \n",
       "25%                         0.000000                       0.000000   \n",
       "50%                         0.000000                       0.000000   \n",
       "75%                         0.000000                       0.000000   \n",
       "max                         1.000000                       1.000000   \n",
       "\n",
       "       discrete_sub_grade_33_one_hot  discrete_sub_grade_34_one_hot  \\\n",
       "count                   50000.000000                   50000.000000   \n",
       "mean                        0.000520                       0.002400   \n",
       "std                         0.022798                       0.048931   \n",
       "min                         0.000000                       0.000000   \n",
       "25%                         0.000000                       0.000000   \n",
       "50%                         0.000000                       0.000000   \n",
       "75%                         0.000000                       0.000000   \n",
       "max                         1.000000                       1.000000   \n",
       "\n",
       "       discrete_sub_grade_35_one_hot  discrete_term_1_one_hot  \\\n",
       "count                   50000.000000             50000.000000   \n",
       "mean                        0.000280                 0.773740   \n",
       "std                         0.016731                 0.418414   \n",
       "min                         0.000000                 0.000000   \n",
       "25%                         0.000000                 1.000000   \n",
       "50%                         0.000000                 1.000000   \n",
       "75%                         0.000000                 1.000000   \n",
       "max                         1.000000                 1.000000   \n",
       "\n",
       "       discrete_term_2_one_hot  \n",
       "count             50000.000000  \n",
       "mean                  0.226260  \n",
       "std                   0.418414  \n",
       "min                   0.000000  \n",
       "25%                   0.000000  \n",
       "50%                   0.000000  \n",
       "75%                   0.000000  \n",
       "max                   1.000000  "
      ]
     },
     "execution_count": 21,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "train_final.describe()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "训练集\n",
      "Counter({1: 39788, 0: 10212})\n",
      "验证集\n",
      "Counter({1: 40226, 0: 9774})\n"
     ]
    }
   ],
   "source": [
    "#查看目标变量是否均衡:\n",
    "#轻微不均衡4:1\n",
    "#导入用于计数的包\n",
    "from collections import Counter\n",
    "print('训练集')\n",
    "print(Counter(train_final['loan_status']))\n",
    "print('验证集')\n",
    "print(Counter(test_final['loan_status']))"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "id": "MJU3SxO-fGjp"
   },
   "source": [
    "## 二.建立基准的TreeBased Models  \n",
    "\n",
    "---\n",
    "In this example, we use lightgbm as the tree model of choice."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 42,
   "metadata": {
    "id": "tWEKOv0peoNj"
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "训练集和测试集样本量\n",
      "40000\n",
      "10000\n"
     ]
    }
   ],
   "source": [
    "#将老师提供的训练集数据划分成训练集和测试集\n",
    "#目标变量：loan_status\n",
    "from sklearn.model_selection import train_test_split\n",
    "X_train, X_test, y_train, y_test = train_test_split(train_final.drop(columns='loan_status'),train_final['loan_status'], test_size=0.2,stratify=train_final['loan_status'],random_state=100)\n",
    "train = pd.concat([X_train,y_train], axis=1)\n",
    "test = pd.concat([X_test,y_test], axis=1)\n",
    "print('训练集和测试集样本量')\n",
    "print(len(train))\n",
    "print(len(test))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 48,
   "metadata": {
    "id": "XL0FibSQ2kxa"
   },
   "outputs": [],
   "source": [
    "import lightgbm as lgb\n",
    "train_dataset = lgb.Dataset(train.drop(columns='loan_status'), train['loan_status'])\n",
    "test_dataset = lgb.Dataset(test.drop(columns='loan_status'), test['loan_status'])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 49,
   "metadata": {
    "id": "fkpnHn_S3BGV"
   },
   "outputs": [],
   "source": [
    "param = {'num_leaves': 31, 'objective': 'binary', 'metric':'binary_error'}\n",
    "num_round = 2000"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 50,
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/"
    },
    "id": "PtLIAW9R3IKt",
    "outputId": "e4ac9d69-6e02-43ac-b79f-c8b98e766043"
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[LightGBM] [Info] Number of positive: 31830, number of negative: 8170\n",
      "[LightGBM] [Warning] Auto-choosing row-wise multi-threading, the overhead of testing was 0.006625 seconds.\n",
      "You can set `force_row_wise=true` to remove the overhead.\n",
      "And if memory is not enough, you can set `force_col_wise=true`.\n",
      "[LightGBM] [Info] Total Bins 2579\n",
      "[LightGBM] [Info] Number of data points in the train set: 40000, number of used features: 141\n",
      "[LightGBM] [Info] [binary:BoostFromScore]: pavg=0.795750 -> initscore=1.359940\n",
      "[LightGBM] [Info] Start training from score 1.359940\n",
      "[1]\ttraining's binary_error: 0.20425\tvalid_1's binary_error: 0.2042\n",
      "[2]\ttraining's binary_error: 0.20425\tvalid_1's binary_error: 0.2042\n",
      "[3]\ttraining's binary_error: 0.20425\tvalid_1's binary_error: 0.2042\n",
      "[4]\ttraining's binary_error: 0.20425\tvalid_1's binary_error: 0.2042\n",
      "[5]\ttraining's binary_error: 0.15065\tvalid_1's binary_error: 0.1461\n",
      "[6]\ttraining's binary_error: 0.108925\tvalid_1's binary_error: 0.1071\n",
      "[7]\ttraining's binary_error: 0.09985\tvalid_1's binary_error: 0.1008\n",
      "[8]\ttraining's binary_error: 0.093875\tvalid_1's binary_error: 0.0952\n",
      "[9]\ttraining's binary_error: 0.08915\tvalid_1's binary_error: 0.0892\n",
      "[10]\ttraining's binary_error: 0.085325\tvalid_1's binary_error: 0.086\n",
      "[11]\ttraining's binary_error: 0.08205\tvalid_1's binary_error: 0.0847\n",
      "[12]\ttraining's binary_error: 0.08045\tvalid_1's binary_error: 0.0823\n",
      "[13]\ttraining's binary_error: 0.07995\tvalid_1's binary_error: 0.0821\n",
      "[14]\ttraining's binary_error: 0.079\tvalid_1's binary_error: 0.0811\n",
      "[15]\ttraining's binary_error: 0.07855\tvalid_1's binary_error: 0.0809\n",
      "[16]\ttraining's binary_error: 0.078175\tvalid_1's binary_error: 0.0802\n",
      "[17]\ttraining's binary_error: 0.077325\tvalid_1's binary_error: 0.08\n",
      "[18]\ttraining's binary_error: 0.07635\tvalid_1's binary_error: 0.0795\n",
      "[19]\ttraining's binary_error: 0.075975\tvalid_1's binary_error: 0.0787\n",
      "[20]\ttraining's binary_error: 0.075875\tvalid_1's binary_error: 0.0784\n",
      "[21]\ttraining's binary_error: 0.075625\tvalid_1's binary_error: 0.0786\n",
      "[22]\ttraining's binary_error: 0.075325\tvalid_1's binary_error: 0.0788\n",
      "[23]\ttraining's binary_error: 0.075\tvalid_1's binary_error: 0.0788\n",
      "[24]\ttraining's binary_error: 0.0747\tvalid_1's binary_error: 0.0787\n",
      "[25]\ttraining's binary_error: 0.0745\tvalid_1's binary_error: 0.0784\n",
      "[26]\ttraining's binary_error: 0.0744\tvalid_1's binary_error: 0.078\n",
      "[27]\ttraining's binary_error: 0.074525\tvalid_1's binary_error: 0.078\n",
      "[28]\ttraining's binary_error: 0.07455\tvalid_1's binary_error: 0.0776\n",
      "[29]\ttraining's binary_error: 0.07445\tvalid_1's binary_error: 0.0781\n",
      "[30]\ttraining's binary_error: 0.07425\tvalid_1's binary_error: 0.0774\n",
      "[31]\ttraining's binary_error: 0.073975\tvalid_1's binary_error: 0.0777\n",
      "[32]\ttraining's binary_error: 0.0738\tvalid_1's binary_error: 0.0773\n",
      "[33]\ttraining's binary_error: 0.07405\tvalid_1's binary_error: 0.0776\n",
      "[34]\ttraining's binary_error: 0.07385\tvalid_1's binary_error: 0.0777\n",
      "[35]\ttraining's binary_error: 0.073775\tvalid_1's binary_error: 0.0779\n",
      "[36]\ttraining's binary_error: 0.073675\tvalid_1's binary_error: 0.0782\n",
      "[37]\ttraining's binary_error: 0.073475\tvalid_1's binary_error: 0.0787\n",
      "[38]\ttraining's binary_error: 0.073375\tvalid_1's binary_error: 0.0787\n",
      "[39]\ttraining's binary_error: 0.073175\tvalid_1's binary_error: 0.0786\n",
      "[40]\ttraining's binary_error: 0.073125\tvalid_1's binary_error: 0.0788\n",
      "[41]\ttraining's binary_error: 0.07295\tvalid_1's binary_error: 0.0792\n",
      "[42]\ttraining's binary_error: 0.072875\tvalid_1's binary_error: 0.0794\n",
      "[43]\ttraining's binary_error: 0.072575\tvalid_1's binary_error: 0.0788\n",
      "[44]\ttraining's binary_error: 0.07255\tvalid_1's binary_error: 0.0788\n",
      "[45]\ttraining's binary_error: 0.072525\tvalid_1's binary_error: 0.0789\n",
      "[46]\ttraining's binary_error: 0.072575\tvalid_1's binary_error: 0.0789\n",
      "[47]\ttraining's binary_error: 0.072275\tvalid_1's binary_error: 0.0785\n",
      "[48]\ttraining's binary_error: 0.07235\tvalid_1's binary_error: 0.0784\n",
      "[49]\ttraining's binary_error: 0.0722\tvalid_1's binary_error: 0.0786\n",
      "[50]\ttraining's binary_error: 0.071975\tvalid_1's binary_error: 0.0786\n",
      "[51]\ttraining's binary_error: 0.071775\tvalid_1's binary_error: 0.0784\n",
      "[52]\ttraining's binary_error: 0.0717\tvalid_1's binary_error: 0.0784\n",
      "[53]\ttraining's binary_error: 0.071725\tvalid_1's binary_error: 0.0784\n",
      "[54]\ttraining's binary_error: 0.0716\tvalid_1's binary_error: 0.0783\n",
      "[55]\ttraining's binary_error: 0.071575\tvalid_1's binary_error: 0.0784\n",
      "[56]\ttraining's binary_error: 0.071575\tvalid_1's binary_error: 0.0784\n",
      "[57]\ttraining's binary_error: 0.071525\tvalid_1's binary_error: 0.0784\n",
      "[58]\ttraining's binary_error: 0.071325\tvalid_1's binary_error: 0.0783\n",
      "[59]\ttraining's binary_error: 0.071225\tvalid_1's binary_error: 0.0782\n",
      "[60]\ttraining's binary_error: 0.071275\tvalid_1's binary_error: 0.0782\n",
      "[61]\ttraining's binary_error: 0.07115\tvalid_1's binary_error: 0.078\n",
      "[62]\ttraining's binary_error: 0.07115\tvalid_1's binary_error: 0.0781\n",
      "[63]\ttraining's binary_error: 0.070975\tvalid_1's binary_error: 0.0782\n",
      "[64]\ttraining's binary_error: 0.0706\tvalid_1's binary_error: 0.0783\n",
      "[65]\ttraining's binary_error: 0.070375\tvalid_1's binary_error: 0.0781\n",
      "[66]\ttraining's binary_error: 0.070125\tvalid_1's binary_error: 0.0786\n",
      "[67]\ttraining's binary_error: 0.069925\tvalid_1's binary_error: 0.0786\n",
      "[68]\ttraining's binary_error: 0.06955\tvalid_1's binary_error: 0.079\n",
      "[69]\ttraining's binary_error: 0.0693\tvalid_1's binary_error: 0.079\n",
      "[70]\ttraining's binary_error: 0.06925\tvalid_1's binary_error: 0.0788\n",
      "[71]\ttraining's binary_error: 0.06915\tvalid_1's binary_error: 0.0781\n",
      "[72]\ttraining's binary_error: 0.068975\tvalid_1's binary_error: 0.078\n",
      "[73]\ttraining's binary_error: 0.068675\tvalid_1's binary_error: 0.078\n",
      "[74]\ttraining's binary_error: 0.068375\tvalid_1's binary_error: 0.0782\n",
      "[75]\ttraining's binary_error: 0.0684\tvalid_1's binary_error: 0.0778\n",
      "[76]\ttraining's binary_error: 0.068275\tvalid_1's binary_error: 0.0778\n",
      "[77]\ttraining's binary_error: 0.068175\tvalid_1's binary_error: 0.0779\n",
      "[78]\ttraining's binary_error: 0.068175\tvalid_1's binary_error: 0.0782\n",
      "[79]\ttraining's binary_error: 0.06795\tvalid_1's binary_error: 0.078\n",
      "[80]\ttraining's binary_error: 0.06765\tvalid_1's binary_error: 0.0781\n",
      "[81]\ttraining's binary_error: 0.0675\tvalid_1's binary_error: 0.0782\n",
      "[82]\ttraining's binary_error: 0.0673\tvalid_1's binary_error: 0.0784\n",
      "[83]\ttraining's binary_error: 0.067225\tvalid_1's binary_error: 0.0783\n",
      "[84]\ttraining's binary_error: 0.067125\tvalid_1's binary_error: 0.0782\n",
      "[85]\ttraining's binary_error: 0.06685\tvalid_1's binary_error: 0.0783\n",
      "[86]\ttraining's binary_error: 0.06675\tvalid_1's binary_error: 0.0783\n",
      "[87]\ttraining's binary_error: 0.06655\tvalid_1's binary_error: 0.0785\n",
      "[88]\ttraining's binary_error: 0.066425\tvalid_1's binary_error: 0.0786\n",
      "[89]\ttraining's binary_error: 0.06625\tvalid_1's binary_error: 0.0784\n",
      "[90]\ttraining's binary_error: 0.0661\tvalid_1's binary_error: 0.0782\n",
      "[91]\ttraining's binary_error: 0.066125\tvalid_1's binary_error: 0.078\n",
      "[92]\ttraining's binary_error: 0.065975\tvalid_1's binary_error: 0.0778\n",
      "[93]\ttraining's binary_error: 0.06585\tvalid_1's binary_error: 0.0781\n",
      "[94]\ttraining's binary_error: 0.065575\tvalid_1's binary_error: 0.078\n",
      "[95]\ttraining's binary_error: 0.0653\tvalid_1's binary_error: 0.0781\n",
      "[96]\ttraining's binary_error: 0.065275\tvalid_1's binary_error: 0.0779\n",
      "[97]\ttraining's binary_error: 0.06495\tvalid_1's binary_error: 0.0779\n",
      "[98]\ttraining's binary_error: 0.064825\tvalid_1's binary_error: 0.0779\n",
      "[99]\ttraining's binary_error: 0.064725\tvalid_1's binary_error: 0.0779\n",
      "[100]\ttraining's binary_error: 0.0647\tvalid_1's binary_error: 0.0779\n",
      "[101]\ttraining's binary_error: 0.0645\tvalid_1's binary_error: 0.0779\n",
      "[102]\ttraining's binary_error: 0.06435\tvalid_1's binary_error: 0.078\n",
      "[103]\ttraining's binary_error: 0.0642\tvalid_1's binary_error: 0.0781\n",
      "[104]\ttraining's binary_error: 0.064025\tvalid_1's binary_error: 0.0781\n",
      "[105]\ttraining's binary_error: 0.064\tvalid_1's binary_error: 0.0782\n",
      "[106]\ttraining's binary_error: 0.0639\tvalid_1's binary_error: 0.0783\n",
      "[107]\ttraining's binary_error: 0.063725\tvalid_1's binary_error: 0.0784\n",
      "[108]\ttraining's binary_error: 0.06355\tvalid_1's binary_error: 0.0784\n",
      "[109]\ttraining's binary_error: 0.063425\tvalid_1's binary_error: 0.0784\n",
      "[110]\ttraining's binary_error: 0.0633\tvalid_1's binary_error: 0.0785\n",
      "[111]\ttraining's binary_error: 0.06295\tvalid_1's binary_error: 0.0784\n",
      "[112]\ttraining's binary_error: 0.0628\tvalid_1's binary_error: 0.0784\n",
      "[113]\ttraining's binary_error: 0.0626\tvalid_1's binary_error: 0.0783\n",
      "[114]\ttraining's binary_error: 0.062375\tvalid_1's binary_error: 0.0784\n",
      "[115]\ttraining's binary_error: 0.062425\tvalid_1's binary_error: 0.0785\n",
      "[116]\ttraining's binary_error: 0.0621\tvalid_1's binary_error: 0.0784\n",
      "[117]\ttraining's binary_error: 0.062075\tvalid_1's binary_error: 0.0782\n",
      "[118]\ttraining's binary_error: 0.0619\tvalid_1's binary_error: 0.0784\n",
      "[119]\ttraining's binary_error: 0.061675\tvalid_1's binary_error: 0.0786\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[120]\ttraining's binary_error: 0.0615\tvalid_1's binary_error: 0.0787\n",
      "[121]\ttraining's binary_error: 0.06125\tvalid_1's binary_error: 0.0785\n",
      "[122]\ttraining's binary_error: 0.061125\tvalid_1's binary_error: 0.0785\n",
      "[123]\ttraining's binary_error: 0.0611\tvalid_1's binary_error: 0.0784\n",
      "[124]\ttraining's binary_error: 0.061\tvalid_1's binary_error: 0.0784\n",
      "[125]\ttraining's binary_error: 0.06095\tvalid_1's binary_error: 0.0785\n",
      "[126]\ttraining's binary_error: 0.06065\tvalid_1's binary_error: 0.0786\n",
      "[127]\ttraining's binary_error: 0.06065\tvalid_1's binary_error: 0.0786\n",
      "[128]\ttraining's binary_error: 0.060475\tvalid_1's binary_error: 0.0787\n",
      "[129]\ttraining's binary_error: 0.06045\tvalid_1's binary_error: 0.0787\n",
      "[130]\ttraining's binary_error: 0.060375\tvalid_1's binary_error: 0.0786\n",
      "[131]\ttraining's binary_error: 0.06025\tvalid_1's binary_error: 0.0786\n",
      "[132]\ttraining's binary_error: 0.060075\tvalid_1's binary_error: 0.0787\n",
      "[133]\ttraining's binary_error: 0.059925\tvalid_1's binary_error: 0.0788\n",
      "[134]\ttraining's binary_error: 0.059775\tvalid_1's binary_error: 0.0788\n",
      "[135]\ttraining's binary_error: 0.0597\tvalid_1's binary_error: 0.0788\n",
      "[136]\ttraining's binary_error: 0.059625\tvalid_1's binary_error: 0.079\n",
      "[137]\ttraining's binary_error: 0.059625\tvalid_1's binary_error: 0.079\n",
      "[138]\ttraining's binary_error: 0.05955\tvalid_1's binary_error: 0.0789\n",
      "[139]\ttraining's binary_error: 0.059375\tvalid_1's binary_error: 0.0788\n",
      "[140]\ttraining's binary_error: 0.059325\tvalid_1's binary_error: 0.079\n",
      "[141]\ttraining's binary_error: 0.0592\tvalid_1's binary_error: 0.0788\n",
      "[142]\ttraining's binary_error: 0.05895\tvalid_1's binary_error: 0.0786\n",
      "[143]\ttraining's binary_error: 0.0587\tvalid_1's binary_error: 0.0786\n",
      "[144]\ttraining's binary_error: 0.0588\tvalid_1's binary_error: 0.0787\n",
      "[145]\ttraining's binary_error: 0.058675\tvalid_1's binary_error: 0.0785\n",
      "[146]\ttraining's binary_error: 0.058575\tvalid_1's binary_error: 0.0782\n",
      "[147]\ttraining's binary_error: 0.058425\tvalid_1's binary_error: 0.0785\n",
      "[148]\ttraining's binary_error: 0.058175\tvalid_1's binary_error: 0.0781\n",
      "[149]\ttraining's binary_error: 0.057875\tvalid_1's binary_error: 0.0783\n",
      "[150]\ttraining's binary_error: 0.05785\tvalid_1's binary_error: 0.0784\n",
      "[151]\ttraining's binary_error: 0.05765\tvalid_1's binary_error: 0.0784\n",
      "[152]\ttraining's binary_error: 0.05755\tvalid_1's binary_error: 0.0784\n",
      "[153]\ttraining's binary_error: 0.05745\tvalid_1's binary_error: 0.0786\n",
      "[154]\ttraining's binary_error: 0.0573\tvalid_1's binary_error: 0.0786\n",
      "[155]\ttraining's binary_error: 0.05725\tvalid_1's binary_error: 0.0787\n",
      "[156]\ttraining's binary_error: 0.057225\tvalid_1's binary_error: 0.0786\n",
      "[157]\ttraining's binary_error: 0.057025\tvalid_1's binary_error: 0.0786\n",
      "[158]\ttraining's binary_error: 0.057025\tvalid_1's binary_error: 0.0786\n",
      "[159]\ttraining's binary_error: 0.056875\tvalid_1's binary_error: 0.0787\n",
      "[160]\ttraining's binary_error: 0.056525\tvalid_1's binary_error: 0.0787\n",
      "[161]\ttraining's binary_error: 0.056425\tvalid_1's binary_error: 0.0788\n",
      "[162]\ttraining's binary_error: 0.056225\tvalid_1's binary_error: 0.0789\n",
      "[163]\ttraining's binary_error: 0.056025\tvalid_1's binary_error: 0.0793\n",
      "[164]\ttraining's binary_error: 0.055925\tvalid_1's binary_error: 0.0796\n",
      "[165]\ttraining's binary_error: 0.05575\tvalid_1's binary_error: 0.0797\n",
      "[166]\ttraining's binary_error: 0.05565\tvalid_1's binary_error: 0.0795\n",
      "[167]\ttraining's binary_error: 0.05535\tvalid_1's binary_error: 0.0796\n",
      "[168]\ttraining's binary_error: 0.055225\tvalid_1's binary_error: 0.0795\n",
      "[169]\ttraining's binary_error: 0.055025\tvalid_1's binary_error: 0.0795\n",
      "[170]\ttraining's binary_error: 0.054925\tvalid_1's binary_error: 0.0795\n",
      "[171]\ttraining's binary_error: 0.054625\tvalid_1's binary_error: 0.0793\n",
      "[172]\ttraining's binary_error: 0.05465\tvalid_1's binary_error: 0.0792\n",
      "[173]\ttraining's binary_error: 0.054525\tvalid_1's binary_error: 0.0791\n",
      "[174]\ttraining's binary_error: 0.0542\tvalid_1's binary_error: 0.0791\n",
      "[175]\ttraining's binary_error: 0.05405\tvalid_1's binary_error: 0.0788\n",
      "[176]\ttraining's binary_error: 0.05395\tvalid_1's binary_error: 0.0786\n",
      "[177]\ttraining's binary_error: 0.05395\tvalid_1's binary_error: 0.0786\n",
      "[178]\ttraining's binary_error: 0.053725\tvalid_1's binary_error: 0.0786\n",
      "[179]\ttraining's binary_error: 0.053675\tvalid_1's binary_error: 0.0786\n",
      "[180]\ttraining's binary_error: 0.0535\tvalid_1's binary_error: 0.0786\n",
      "[181]\ttraining's binary_error: 0.053425\tvalid_1's binary_error: 0.0785\n",
      "[182]\ttraining's binary_error: 0.05325\tvalid_1's binary_error: 0.0785\n",
      "[183]\ttraining's binary_error: 0.0532\tvalid_1's binary_error: 0.0785\n",
      "[184]\ttraining's binary_error: 0.0531\tvalid_1's binary_error: 0.0786\n",
      "[185]\ttraining's binary_error: 0.053025\tvalid_1's binary_error: 0.0787\n",
      "[186]\ttraining's binary_error: 0.053\tvalid_1's binary_error: 0.0789\n",
      "[187]\ttraining's binary_error: 0.05295\tvalid_1's binary_error: 0.0789\n",
      "[188]\ttraining's binary_error: 0.052925\tvalid_1's binary_error: 0.0789\n",
      "[189]\ttraining's binary_error: 0.05285\tvalid_1's binary_error: 0.0789\n",
      "[190]\ttraining's binary_error: 0.05285\tvalid_1's binary_error: 0.0789\n",
      "[191]\ttraining's binary_error: 0.05275\tvalid_1's binary_error: 0.0791\n",
      "[192]\ttraining's binary_error: 0.052575\tvalid_1's binary_error: 0.0793\n",
      "[193]\ttraining's binary_error: 0.052275\tvalid_1's binary_error: 0.0793\n",
      "[194]\ttraining's binary_error: 0.0521\tvalid_1's binary_error: 0.0794\n",
      "[195]\ttraining's binary_error: 0.05195\tvalid_1's binary_error: 0.0793\n",
      "[196]\ttraining's binary_error: 0.05185\tvalid_1's binary_error: 0.0791\n",
      "[197]\ttraining's binary_error: 0.051725\tvalid_1's binary_error: 0.0789\n",
      "[198]\ttraining's binary_error: 0.05165\tvalid_1's binary_error: 0.079\n",
      "[199]\ttraining's binary_error: 0.05155\tvalid_1's binary_error: 0.079\n",
      "[200]\ttraining's binary_error: 0.051375\tvalid_1's binary_error: 0.0789\n",
      "[201]\ttraining's binary_error: 0.051375\tvalid_1's binary_error: 0.0789\n",
      "[202]\ttraining's binary_error: 0.051175\tvalid_1's binary_error: 0.0788\n",
      "[203]\ttraining's binary_error: 0.051\tvalid_1's binary_error: 0.0788\n",
      "[204]\ttraining's binary_error: 0.050675\tvalid_1's binary_error: 0.0788\n",
      "[205]\ttraining's binary_error: 0.0505\tvalid_1's binary_error: 0.0786\n",
      "[206]\ttraining's binary_error: 0.050475\tvalid_1's binary_error: 0.0787\n",
      "[207]\ttraining's binary_error: 0.050225\tvalid_1's binary_error: 0.0788\n",
      "[208]\ttraining's binary_error: 0.050125\tvalid_1's binary_error: 0.0785\n",
      "[209]\ttraining's binary_error: 0.049925\tvalid_1's binary_error: 0.0787\n",
      "[210]\ttraining's binary_error: 0.0499\tvalid_1's binary_error: 0.0787\n",
      "[211]\ttraining's binary_error: 0.0497\tvalid_1's binary_error: 0.0787\n",
      "[212]\ttraining's binary_error: 0.04965\tvalid_1's binary_error: 0.0786\n",
      "[213]\ttraining's binary_error: 0.049525\tvalid_1's binary_error: 0.0786\n",
      "[214]\ttraining's binary_error: 0.0494\tvalid_1's binary_error: 0.0787\n",
      "[215]\ttraining's binary_error: 0.0493\tvalid_1's binary_error: 0.079\n",
      "[216]\ttraining's binary_error: 0.049225\tvalid_1's binary_error: 0.079\n",
      "[217]\ttraining's binary_error: 0.049075\tvalid_1's binary_error: 0.0788\n",
      "[218]\ttraining's binary_error: 0.049025\tvalid_1's binary_error: 0.0788\n",
      "[219]\ttraining's binary_error: 0.0488\tvalid_1's binary_error: 0.0792\n",
      "[220]\ttraining's binary_error: 0.048675\tvalid_1's binary_error: 0.0792\n",
      "[221]\ttraining's binary_error: 0.04855\tvalid_1's binary_error: 0.0792\n",
      "[222]\ttraining's binary_error: 0.048375\tvalid_1's binary_error: 0.0792\n",
      "[223]\ttraining's binary_error: 0.04815\tvalid_1's binary_error: 0.0792\n",
      "[224]\ttraining's binary_error: 0.04805\tvalid_1's binary_error: 0.0794\n",
      "[225]\ttraining's binary_error: 0.047925\tvalid_1's binary_error: 0.0796\n",
      "[226]\ttraining's binary_error: 0.047875\tvalid_1's binary_error: 0.0796\n",
      "[227]\ttraining's binary_error: 0.047775\tvalid_1's binary_error: 0.0795\n",
      "[228]\ttraining's binary_error: 0.047825\tvalid_1's binary_error: 0.0796\n",
      "[229]\ttraining's binary_error: 0.0477\tvalid_1's binary_error: 0.0796\n",
      "[230]\ttraining's binary_error: 0.047575\tvalid_1's binary_error: 0.0798\n",
      "[231]\ttraining's binary_error: 0.047425\tvalid_1's binary_error: 0.0799\n",
      "[232]\ttraining's binary_error: 0.04725\tvalid_1's binary_error: 0.08\n",
      "[233]\ttraining's binary_error: 0.04715\tvalid_1's binary_error: 0.0801\n",
      "[234]\ttraining's binary_error: 0.04715\tvalid_1's binary_error: 0.0801\n",
      "[235]\ttraining's binary_error: 0.047\tvalid_1's binary_error: 0.0802\n",
      "[236]\ttraining's binary_error: 0.047\tvalid_1's binary_error: 0.0803\n",
      "[237]\ttraining's binary_error: 0.0467\tvalid_1's binary_error: 0.0805\n",
      "[238]\ttraining's binary_error: 0.046625\tvalid_1's binary_error: 0.0805\n",
      "[239]\ttraining's binary_error: 0.046575\tvalid_1's binary_error: 0.0806\n",
      "[240]\ttraining's binary_error: 0.04635\tvalid_1's binary_error: 0.0803\n",
      "[241]\ttraining's binary_error: 0.046025\tvalid_1's binary_error: 0.0806\n",
      "[242]\ttraining's binary_error: 0.0461\tvalid_1's binary_error: 0.0805\n",
      "[243]\ttraining's binary_error: 0.0461\tvalid_1's binary_error: 0.0805\n",
      "[244]\ttraining's binary_error: 0.046025\tvalid_1's binary_error: 0.0805\n",
      "[245]\ttraining's binary_error: 0.045975\tvalid_1's binary_error: 0.0807\n",
      "[246]\ttraining's binary_error: 0.0459\tvalid_1's binary_error: 0.0807\n",
      "[247]\ttraining's binary_error: 0.045825\tvalid_1's binary_error: 0.0807\n",
      "[248]\ttraining's binary_error: 0.04565\tvalid_1's binary_error: 0.0808\n",
      "[249]\ttraining's binary_error: 0.045475\tvalid_1's binary_error: 0.0807\n",
      "[250]\ttraining's binary_error: 0.0453\tvalid_1's binary_error: 0.0807\n",
      "[251]\ttraining's binary_error: 0.04525\tvalid_1's binary_error: 0.0808\n",
      "[252]\ttraining's binary_error: 0.04515\tvalid_1's binary_error: 0.0805\n",
      "[253]\ttraining's binary_error: 0.044925\tvalid_1's binary_error: 0.0805\n",
      "[254]\ttraining's binary_error: 0.04495\tvalid_1's binary_error: 0.0806\n",
      "[255]\ttraining's binary_error: 0.04475\tvalid_1's binary_error: 0.0805\n",
      "[256]\ttraining's binary_error: 0.044675\tvalid_1's binary_error: 0.0806\n",
      "[257]\ttraining's binary_error: 0.044625\tvalid_1's binary_error: 0.0804\n",
      "[258]\ttraining's binary_error: 0.0445\tvalid_1's binary_error: 0.0805\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[259]\ttraining's binary_error: 0.044475\tvalid_1's binary_error: 0.0805\n",
      "[260]\ttraining's binary_error: 0.044475\tvalid_1's binary_error: 0.0804\n",
      "[261]\ttraining's binary_error: 0.04435\tvalid_1's binary_error: 0.0806\n",
      "[262]\ttraining's binary_error: 0.0443\tvalid_1's binary_error: 0.0806\n",
      "[263]\ttraining's binary_error: 0.0441\tvalid_1's binary_error: 0.0808\n",
      "[264]\ttraining's binary_error: 0.044075\tvalid_1's binary_error: 0.0807\n",
      "[265]\ttraining's binary_error: 0.043925\tvalid_1's binary_error: 0.0805\n",
      "[266]\ttraining's binary_error: 0.0439\tvalid_1's binary_error: 0.0805\n",
      "[267]\ttraining's binary_error: 0.043725\tvalid_1's binary_error: 0.0803\n",
      "[268]\ttraining's binary_error: 0.043575\tvalid_1's binary_error: 0.0803\n",
      "[269]\ttraining's binary_error: 0.0435\tvalid_1's binary_error: 0.0802\n",
      "[270]\ttraining's binary_error: 0.04345\tvalid_1's binary_error: 0.0802\n",
      "[271]\ttraining's binary_error: 0.043375\tvalid_1's binary_error: 0.0803\n",
      "[272]\ttraining's binary_error: 0.043225\tvalid_1's binary_error: 0.0803\n",
      "[273]\ttraining's binary_error: 0.0432\tvalid_1's binary_error: 0.0801\n",
      "[274]\ttraining's binary_error: 0.04315\tvalid_1's binary_error: 0.08\n",
      "[275]\ttraining's binary_error: 0.04295\tvalid_1's binary_error: 0.0799\n",
      "[276]\ttraining's binary_error: 0.042925\tvalid_1's binary_error: 0.0799\n",
      "[277]\ttraining's binary_error: 0.0426\tvalid_1's binary_error: 0.08\n",
      "[278]\ttraining's binary_error: 0.0426\tvalid_1's binary_error: 0.08\n",
      "[279]\ttraining's binary_error: 0.0425\tvalid_1's binary_error: 0.08\n",
      "[280]\ttraining's binary_error: 0.042175\tvalid_1's binary_error: 0.0801\n",
      "[281]\ttraining's binary_error: 0.042025\tvalid_1's binary_error: 0.0801\n",
      "[282]\ttraining's binary_error: 0.041875\tvalid_1's binary_error: 0.08\n",
      "[283]\ttraining's binary_error: 0.041875\tvalid_1's binary_error: 0.0802\n",
      "[284]\ttraining's binary_error: 0.041575\tvalid_1's binary_error: 0.0802\n",
      "[285]\ttraining's binary_error: 0.0416\tvalid_1's binary_error: 0.0802\n",
      "[286]\ttraining's binary_error: 0.041575\tvalid_1's binary_error: 0.0802\n",
      "[287]\ttraining's binary_error: 0.0415\tvalid_1's binary_error: 0.0802\n",
      "[288]\ttraining's binary_error: 0.0412\tvalid_1's binary_error: 0.0806\n",
      "[289]\ttraining's binary_error: 0.040975\tvalid_1's binary_error: 0.0805\n",
      "[290]\ttraining's binary_error: 0.040925\tvalid_1's binary_error: 0.0805\n",
      "[291]\ttraining's binary_error: 0.040875\tvalid_1's binary_error: 0.0805\n",
      "[292]\ttraining's binary_error: 0.040825\tvalid_1's binary_error: 0.0806\n",
      "[293]\ttraining's binary_error: 0.04075\tvalid_1's binary_error: 0.0807\n",
      "[294]\ttraining's binary_error: 0.04055\tvalid_1's binary_error: 0.0807\n",
      "[295]\ttraining's binary_error: 0.040425\tvalid_1's binary_error: 0.081\n",
      "[296]\ttraining's binary_error: 0.040125\tvalid_1's binary_error: 0.0811\n",
      "[297]\ttraining's binary_error: 0.040025\tvalid_1's binary_error: 0.0811\n",
      "[298]\ttraining's binary_error: 0.039975\tvalid_1's binary_error: 0.0811\n",
      "[299]\ttraining's binary_error: 0.039825\tvalid_1's binary_error: 0.0809\n",
      "[300]\ttraining's binary_error: 0.0396\tvalid_1's binary_error: 0.0807\n",
      "[301]\ttraining's binary_error: 0.039575\tvalid_1's binary_error: 0.0807\n",
      "[302]\ttraining's binary_error: 0.039575\tvalid_1's binary_error: 0.0808\n",
      "[303]\ttraining's binary_error: 0.039475\tvalid_1's binary_error: 0.0808\n",
      "[304]\ttraining's binary_error: 0.039475\tvalid_1's binary_error: 0.0809\n",
      "[305]\ttraining's binary_error: 0.039425\tvalid_1's binary_error: 0.0806\n",
      "[306]\ttraining's binary_error: 0.039275\tvalid_1's binary_error: 0.081\n",
      "[307]\ttraining's binary_error: 0.039225\tvalid_1's binary_error: 0.0811\n",
      "[308]\ttraining's binary_error: 0.03915\tvalid_1's binary_error: 0.0811\n",
      "[309]\ttraining's binary_error: 0.039025\tvalid_1's binary_error: 0.0811\n",
      "[310]\ttraining's binary_error: 0.0389\tvalid_1's binary_error: 0.0813\n",
      "[311]\ttraining's binary_error: 0.03875\tvalid_1's binary_error: 0.081\n",
      "[312]\ttraining's binary_error: 0.038675\tvalid_1's binary_error: 0.0811\n",
      "[313]\ttraining's binary_error: 0.0384\tvalid_1's binary_error: 0.0811\n",
      "[314]\ttraining's binary_error: 0.0382\tvalid_1's binary_error: 0.081\n",
      "[315]\ttraining's binary_error: 0.038075\tvalid_1's binary_error: 0.0811\n",
      "[316]\ttraining's binary_error: 0.03795\tvalid_1's binary_error: 0.081\n",
      "[317]\ttraining's binary_error: 0.03775\tvalid_1's binary_error: 0.0809\n",
      "[318]\ttraining's binary_error: 0.037625\tvalid_1's binary_error: 0.0808\n",
      "[319]\ttraining's binary_error: 0.03755\tvalid_1's binary_error: 0.0807\n",
      "[320]\ttraining's binary_error: 0.037475\tvalid_1's binary_error: 0.0808\n",
      "[321]\ttraining's binary_error: 0.03735\tvalid_1's binary_error: 0.0808\n",
      "[322]\ttraining's binary_error: 0.037325\tvalid_1's binary_error: 0.0808\n",
      "[323]\ttraining's binary_error: 0.0372\tvalid_1's binary_error: 0.0808\n",
      "[324]\ttraining's binary_error: 0.03715\tvalid_1's binary_error: 0.0809\n",
      "[325]\ttraining's binary_error: 0.037\tvalid_1's binary_error: 0.081\n",
      "[326]\ttraining's binary_error: 0.0368\tvalid_1's binary_error: 0.081\n",
      "[327]\ttraining's binary_error: 0.036575\tvalid_1's binary_error: 0.081\n",
      "[328]\ttraining's binary_error: 0.036575\tvalid_1's binary_error: 0.081\n",
      "[329]\ttraining's binary_error: 0.03645\tvalid_1's binary_error: 0.0808\n",
      "[330]\ttraining's binary_error: 0.03635\tvalid_1's binary_error: 0.0808\n",
      "[331]\ttraining's binary_error: 0.036275\tvalid_1's binary_error: 0.0806\n",
      "[332]\ttraining's binary_error: 0.036075\tvalid_1's binary_error: 0.0806\n",
      "[333]\ttraining's binary_error: 0.035925\tvalid_1's binary_error: 0.0806\n",
      "[334]\ttraining's binary_error: 0.03585\tvalid_1's binary_error: 0.0807\n",
      "[335]\ttraining's binary_error: 0.035625\tvalid_1's binary_error: 0.0808\n",
      "[336]\ttraining's binary_error: 0.035325\tvalid_1's binary_error: 0.0811\n",
      "[337]\ttraining's binary_error: 0.035225\tvalid_1's binary_error: 0.0812\n",
      "[338]\ttraining's binary_error: 0.03525\tvalid_1's binary_error: 0.0812\n",
      "[339]\ttraining's binary_error: 0.03515\tvalid_1's binary_error: 0.0814\n",
      "[340]\ttraining's binary_error: 0.03505\tvalid_1's binary_error: 0.0812\n",
      "[341]\ttraining's binary_error: 0.034925\tvalid_1's binary_error: 0.0815\n",
      "[342]\ttraining's binary_error: 0.0349\tvalid_1's binary_error: 0.0817\n",
      "[343]\ttraining's binary_error: 0.0348\tvalid_1's binary_error: 0.0819\n",
      "[344]\ttraining's binary_error: 0.03475\tvalid_1's binary_error: 0.0819\n",
      "[345]\ttraining's binary_error: 0.0347\tvalid_1's binary_error: 0.0819\n",
      "[346]\ttraining's binary_error: 0.0346\tvalid_1's binary_error: 0.0819\n",
      "[347]\ttraining's binary_error: 0.0345\tvalid_1's binary_error: 0.0818\n",
      "[348]\ttraining's binary_error: 0.034375\tvalid_1's binary_error: 0.0818\n",
      "[349]\ttraining's binary_error: 0.034075\tvalid_1's binary_error: 0.082\n",
      "[350]\ttraining's binary_error: 0.034\tvalid_1's binary_error: 0.0819\n",
      "[351]\ttraining's binary_error: 0.0338\tvalid_1's binary_error: 0.082\n",
      "[352]\ttraining's binary_error: 0.0337\tvalid_1's binary_error: 0.082\n",
      "[353]\ttraining's binary_error: 0.0337\tvalid_1's binary_error: 0.082\n",
      "[354]\ttraining's binary_error: 0.033575\tvalid_1's binary_error: 0.0819\n",
      "[355]\ttraining's binary_error: 0.033525\tvalid_1's binary_error: 0.0819\n",
      "[356]\ttraining's binary_error: 0.033375\tvalid_1's binary_error: 0.0817\n",
      "[357]\ttraining's binary_error: 0.033175\tvalid_1's binary_error: 0.0816\n",
      "[358]\ttraining's binary_error: 0.0331\tvalid_1's binary_error: 0.0815\n",
      "[359]\ttraining's binary_error: 0.033075\tvalid_1's binary_error: 0.0814\n",
      "[360]\ttraining's binary_error: 0.032875\tvalid_1's binary_error: 0.0815\n",
      "[361]\ttraining's binary_error: 0.0327\tvalid_1's binary_error: 0.0813\n",
      "[362]\ttraining's binary_error: 0.0326\tvalid_1's binary_error: 0.0815\n",
      "[363]\ttraining's binary_error: 0.0326\tvalid_1's binary_error: 0.0814\n",
      "[364]\ttraining's binary_error: 0.03255\tvalid_1's binary_error: 0.0819\n",
      "[365]\ttraining's binary_error: 0.0323\tvalid_1's binary_error: 0.0817\n",
      "[366]\ttraining's binary_error: 0.03235\tvalid_1's binary_error: 0.0818\n",
      "[367]\ttraining's binary_error: 0.032275\tvalid_1's binary_error: 0.0819\n",
      "[368]\ttraining's binary_error: 0.032175\tvalid_1's binary_error: 0.082\n",
      "[369]\ttraining's binary_error: 0.0321\tvalid_1's binary_error: 0.0821\n",
      "[370]\ttraining's binary_error: 0.031975\tvalid_1's binary_error: 0.0819\n",
      "[371]\ttraining's binary_error: 0.031925\tvalid_1's binary_error: 0.0817\n",
      "[372]\ttraining's binary_error: 0.031725\tvalid_1's binary_error: 0.0816\n",
      "[373]\ttraining's binary_error: 0.03165\tvalid_1's binary_error: 0.0815\n",
      "[374]\ttraining's binary_error: 0.03155\tvalid_1's binary_error: 0.0814\n",
      "[375]\ttraining's binary_error: 0.031425\tvalid_1's binary_error: 0.0816\n",
      "[376]\ttraining's binary_error: 0.0313\tvalid_1's binary_error: 0.0816\n",
      "[377]\ttraining's binary_error: 0.0313\tvalid_1's binary_error: 0.0815\n",
      "[378]\ttraining's binary_error: 0.03125\tvalid_1's binary_error: 0.0814\n",
      "[379]\ttraining's binary_error: 0.03115\tvalid_1's binary_error: 0.0814\n",
      "[380]\ttraining's binary_error: 0.030975\tvalid_1's binary_error: 0.0816\n",
      "[381]\ttraining's binary_error: 0.030775\tvalid_1's binary_error: 0.0814\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[382]\ttraining's binary_error: 0.0307\tvalid_1's binary_error: 0.0815\n",
      "[383]\ttraining's binary_error: 0.030575\tvalid_1's binary_error: 0.0815\n",
      "[384]\ttraining's binary_error: 0.03045\tvalid_1's binary_error: 0.0814\n",
      "[385]\ttraining's binary_error: 0.03035\tvalid_1's binary_error: 0.0811\n",
      "[386]\ttraining's binary_error: 0.03025\tvalid_1's binary_error: 0.0811\n",
      "[387]\ttraining's binary_error: 0.03005\tvalid_1's binary_error: 0.081\n",
      "[388]\ttraining's binary_error: 0.029975\tvalid_1's binary_error: 0.0811\n",
      "[389]\ttraining's binary_error: 0.0299\tvalid_1's binary_error: 0.0813\n",
      "[390]\ttraining's binary_error: 0.02985\tvalid_1's binary_error: 0.0813\n",
      "[391]\ttraining's binary_error: 0.029675\tvalid_1's binary_error: 0.0814\n",
      "[392]\ttraining's binary_error: 0.029525\tvalid_1's binary_error: 0.0813\n",
      "[393]\ttraining's binary_error: 0.029425\tvalid_1's binary_error: 0.0814\n",
      "[394]\ttraining's binary_error: 0.029275\tvalid_1's binary_error: 0.0815\n",
      "[395]\ttraining's binary_error: 0.0293\tvalid_1's binary_error: 0.0816\n",
      "[396]\ttraining's binary_error: 0.029225\tvalid_1's binary_error: 0.0813\n",
      "[397]\ttraining's binary_error: 0.02915\tvalid_1's binary_error: 0.0813\n",
      "[398]\ttraining's binary_error: 0.029025\tvalid_1's binary_error: 0.0812\n",
      "[399]\ttraining's binary_error: 0.029\tvalid_1's binary_error: 0.0812\n",
      "[400]\ttraining's binary_error: 0.028925\tvalid_1's binary_error: 0.0812\n",
      "[401]\ttraining's binary_error: 0.0289\tvalid_1's binary_error: 0.0814\n",
      "[402]\ttraining's binary_error: 0.0288\tvalid_1's binary_error: 0.0816\n",
      "[403]\ttraining's binary_error: 0.028725\tvalid_1's binary_error: 0.0817\n",
      "[404]\ttraining's binary_error: 0.028725\tvalid_1's binary_error: 0.0817\n",
      "[405]\ttraining's binary_error: 0.028675\tvalid_1's binary_error: 0.0815\n",
      "[406]\ttraining's binary_error: 0.028475\tvalid_1's binary_error: 0.0817\n",
      "[407]\ttraining's binary_error: 0.028375\tvalid_1's binary_error: 0.0816\n",
      "[408]\ttraining's binary_error: 0.02825\tvalid_1's binary_error: 0.0815\n",
      "[409]\ttraining's binary_error: 0.02815\tvalid_1's binary_error: 0.0815\n",
      "[410]\ttraining's binary_error: 0.028075\tvalid_1's binary_error: 0.0816\n",
      "[411]\ttraining's binary_error: 0.02775\tvalid_1's binary_error: 0.0817\n",
      "[412]\ttraining's binary_error: 0.02775\tvalid_1's binary_error: 0.0817\n",
      "[413]\ttraining's binary_error: 0.0277\tvalid_1's binary_error: 0.0817\n",
      "[414]\ttraining's binary_error: 0.027625\tvalid_1's binary_error: 0.0818\n",
      "[415]\ttraining's binary_error: 0.0276\tvalid_1's binary_error: 0.082\n",
      "[416]\ttraining's binary_error: 0.027475\tvalid_1's binary_error: 0.0823\n",
      "[417]\ttraining's binary_error: 0.0274\tvalid_1's binary_error: 0.0823\n",
      "[418]\ttraining's binary_error: 0.027325\tvalid_1's binary_error: 0.0822\n",
      "[419]\ttraining's binary_error: 0.027125\tvalid_1's binary_error: 0.082\n",
      "[420]\ttraining's binary_error: 0.02705\tvalid_1's binary_error: 0.082\n",
      "[421]\ttraining's binary_error: 0.026925\tvalid_1's binary_error: 0.0821\n",
      "[422]\ttraining's binary_error: 0.0268\tvalid_1's binary_error: 0.0822\n",
      "[423]\ttraining's binary_error: 0.02675\tvalid_1's binary_error: 0.0824\n",
      "[424]\ttraining's binary_error: 0.02655\tvalid_1's binary_error: 0.0824\n",
      "[425]\ttraining's binary_error: 0.026475\tvalid_1's binary_error: 0.0824\n",
      "[426]\ttraining's binary_error: 0.0264\tvalid_1's binary_error: 0.0823\n",
      "[427]\ttraining's binary_error: 0.02625\tvalid_1's binary_error: 0.0823\n",
      "[428]\ttraining's binary_error: 0.02605\tvalid_1's binary_error: 0.0821\n",
      "[429]\ttraining's binary_error: 0.02605\tvalid_1's binary_error: 0.0821\n",
      "[430]\ttraining's binary_error: 0.025975\tvalid_1's binary_error: 0.0822\n",
      "[431]\ttraining's binary_error: 0.025875\tvalid_1's binary_error: 0.0823\n",
      "[432]\ttraining's binary_error: 0.0257\tvalid_1's binary_error: 0.0824\n",
      "[433]\ttraining's binary_error: 0.025625\tvalid_1's binary_error: 0.0825\n",
      "[434]\ttraining's binary_error: 0.02545\tvalid_1's binary_error: 0.0825\n",
      "[435]\ttraining's binary_error: 0.025375\tvalid_1's binary_error: 0.0825\n",
      "[436]\ttraining's binary_error: 0.025325\tvalid_1's binary_error: 0.0825\n",
      "[437]\ttraining's binary_error: 0.025275\tvalid_1's binary_error: 0.0826\n",
      "[438]\ttraining's binary_error: 0.025125\tvalid_1's binary_error: 0.0826\n",
      "[439]\ttraining's binary_error: 0.025\tvalid_1's binary_error: 0.0825\n",
      "[440]\ttraining's binary_error: 0.0249\tvalid_1's binary_error: 0.0827\n",
      "[441]\ttraining's binary_error: 0.0247\tvalid_1's binary_error: 0.0825\n",
      "[442]\ttraining's binary_error: 0.024625\tvalid_1's binary_error: 0.0825\n",
      "[443]\ttraining's binary_error: 0.024575\tvalid_1's binary_error: 0.0826\n",
      "[444]\ttraining's binary_error: 0.02455\tvalid_1's binary_error: 0.0825\n",
      "[445]\ttraining's binary_error: 0.02445\tvalid_1's binary_error: 0.0826\n",
      "[446]\ttraining's binary_error: 0.0244\tvalid_1's binary_error: 0.0826\n",
      "[447]\ttraining's binary_error: 0.0243\tvalid_1's binary_error: 0.0825\n",
      "[448]\ttraining's binary_error: 0.0243\tvalid_1's binary_error: 0.0824\n",
      "[449]\ttraining's binary_error: 0.024275\tvalid_1's binary_error: 0.0823\n",
      "[450]\ttraining's binary_error: 0.024175\tvalid_1's binary_error: 0.0822\n",
      "[451]\ttraining's binary_error: 0.024175\tvalid_1's binary_error: 0.082\n",
      "[452]\ttraining's binary_error: 0.02405\tvalid_1's binary_error: 0.0822\n",
      "[453]\ttraining's binary_error: 0.024025\tvalid_1's binary_error: 0.0822\n",
      "[454]\ttraining's binary_error: 0.024025\tvalid_1's binary_error: 0.0822\n",
      "[455]\ttraining's binary_error: 0.0239\tvalid_1's binary_error: 0.0822\n",
      "[456]\ttraining's binary_error: 0.023725\tvalid_1's binary_error: 0.0823\n",
      "[457]\ttraining's binary_error: 0.02365\tvalid_1's binary_error: 0.0823\n",
      "[458]\ttraining's binary_error: 0.023625\tvalid_1's binary_error: 0.0822\n",
      "[459]\ttraining's binary_error: 0.023625\tvalid_1's binary_error: 0.0822\n",
      "[460]\ttraining's binary_error: 0.023475\tvalid_1's binary_error: 0.0821\n",
      "[461]\ttraining's binary_error: 0.023275\tvalid_1's binary_error: 0.0819\n",
      "[462]\ttraining's binary_error: 0.023225\tvalid_1's binary_error: 0.0819\n",
      "[463]\ttraining's binary_error: 0.023075\tvalid_1's binary_error: 0.0818\n",
      "[464]\ttraining's binary_error: 0.02295\tvalid_1's binary_error: 0.0818\n",
      "[465]\ttraining's binary_error: 0.02295\tvalid_1's binary_error: 0.0818\n",
      "[466]\ttraining's binary_error: 0.0229\tvalid_1's binary_error: 0.0818\n",
      "[467]\ttraining's binary_error: 0.02295\tvalid_1's binary_error: 0.0818\n",
      "[468]\ttraining's binary_error: 0.022875\tvalid_1's binary_error: 0.082\n",
      "[469]\ttraining's binary_error: 0.0228\tvalid_1's binary_error: 0.082\n",
      "[470]\ttraining's binary_error: 0.02265\tvalid_1's binary_error: 0.0821\n",
      "[471]\ttraining's binary_error: 0.022575\tvalid_1's binary_error: 0.0822\n",
      "[472]\ttraining's binary_error: 0.022525\tvalid_1's binary_error: 0.0821\n",
      "[473]\ttraining's binary_error: 0.0225\tvalid_1's binary_error: 0.0824\n",
      "[474]\ttraining's binary_error: 0.022475\tvalid_1's binary_error: 0.0824\n",
      "[475]\ttraining's binary_error: 0.02245\tvalid_1's binary_error: 0.0824\n",
      "[476]\ttraining's binary_error: 0.02235\tvalid_1's binary_error: 0.0823\n",
      "[477]\ttraining's binary_error: 0.022275\tvalid_1's binary_error: 0.0823\n",
      "[478]\ttraining's binary_error: 0.02215\tvalid_1's binary_error: 0.0823\n",
      "[479]\ttraining's binary_error: 0.022025\tvalid_1's binary_error: 0.0822\n",
      "[480]\ttraining's binary_error: 0.021975\tvalid_1's binary_error: 0.0825\n",
      "[481]\ttraining's binary_error: 0.02185\tvalid_1's binary_error: 0.0824\n",
      "[482]\ttraining's binary_error: 0.0218\tvalid_1's binary_error: 0.0825\n",
      "[483]\ttraining's binary_error: 0.0218\tvalid_1's binary_error: 0.0824\n",
      "[484]\ttraining's binary_error: 0.021825\tvalid_1's binary_error: 0.0825\n",
      "[485]\ttraining's binary_error: 0.0216\tvalid_1's binary_error: 0.0824\n",
      "[486]\ttraining's binary_error: 0.02155\tvalid_1's binary_error: 0.0823\n",
      "[487]\ttraining's binary_error: 0.021575\tvalid_1's binary_error: 0.0824\n",
      "[488]\ttraining's binary_error: 0.0214\tvalid_1's binary_error: 0.0824\n",
      "[489]\ttraining's binary_error: 0.021375\tvalid_1's binary_error: 0.0823\n",
      "[490]\ttraining's binary_error: 0.021375\tvalid_1's binary_error: 0.0823\n",
      "[491]\ttraining's binary_error: 0.021275\tvalid_1's binary_error: 0.0823\n",
      "[492]\ttraining's binary_error: 0.021275\tvalid_1's binary_error: 0.0823\n",
      "[493]\ttraining's binary_error: 0.0212\tvalid_1's binary_error: 0.0823\n",
      "[494]\ttraining's binary_error: 0.02115\tvalid_1's binary_error: 0.0824\n",
      "[495]\ttraining's binary_error: 0.021025\tvalid_1's binary_error: 0.0823\n",
      "[496]\ttraining's binary_error: 0.020875\tvalid_1's binary_error: 0.0824\n",
      "[497]\ttraining's binary_error: 0.020825\tvalid_1's binary_error: 0.0824\n",
      "[498]\ttraining's binary_error: 0.02075\tvalid_1's binary_error: 0.0824\n",
      "[499]\ttraining's binary_error: 0.02065\tvalid_1's binary_error: 0.0825\n",
      "[500]\ttraining's binary_error: 0.020725\tvalid_1's binary_error: 0.0824\n",
      "[501]\ttraining's binary_error: 0.020575\tvalid_1's binary_error: 0.0826\n",
      "[502]\ttraining's binary_error: 0.020575\tvalid_1's binary_error: 0.0827\n",
      "[503]\ttraining's binary_error: 0.020475\tvalid_1's binary_error: 0.0826\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[504]\ttraining's binary_error: 0.020425\tvalid_1's binary_error: 0.0826\n",
      "[505]\ttraining's binary_error: 0.02035\tvalid_1's binary_error: 0.0826\n",
      "[506]\ttraining's binary_error: 0.02035\tvalid_1's binary_error: 0.0827\n",
      "[507]\ttraining's binary_error: 0.0201\tvalid_1's binary_error: 0.0827\n",
      "[508]\ttraining's binary_error: 0.0201\tvalid_1's binary_error: 0.0828\n",
      "[509]\ttraining's binary_error: 0.02005\tvalid_1's binary_error: 0.0825\n",
      "[510]\ttraining's binary_error: 0.01995\tvalid_1's binary_error: 0.0825\n",
      "[511]\ttraining's binary_error: 0.019875\tvalid_1's binary_error: 0.0823\n",
      "[512]\ttraining's binary_error: 0.0198\tvalid_1's binary_error: 0.0823\n",
      "[513]\ttraining's binary_error: 0.019725\tvalid_1's binary_error: 0.0822\n",
      "[514]\ttraining's binary_error: 0.0197\tvalid_1's binary_error: 0.0824\n",
      "[515]\ttraining's binary_error: 0.019625\tvalid_1's binary_error: 0.0824\n",
      "[516]\ttraining's binary_error: 0.019425\tvalid_1's binary_error: 0.0825\n",
      "[517]\ttraining's binary_error: 0.01935\tvalid_1's binary_error: 0.0825\n",
      "[518]\ttraining's binary_error: 0.019275\tvalid_1's binary_error: 0.0827\n",
      "[519]\ttraining's binary_error: 0.019075\tvalid_1's binary_error: 0.0828\n",
      "[520]\ttraining's binary_error: 0.01905\tvalid_1's binary_error: 0.0827\n",
      "[521]\ttraining's binary_error: 0.018875\tvalid_1's binary_error: 0.0826\n",
      "[522]\ttraining's binary_error: 0.0188\tvalid_1's binary_error: 0.0826\n",
      "[523]\ttraining's binary_error: 0.0187\tvalid_1's binary_error: 0.0826\n",
      "[524]\ttraining's binary_error: 0.018725\tvalid_1's binary_error: 0.0825\n",
      "[525]\ttraining's binary_error: 0.0186\tvalid_1's binary_error: 0.0826\n",
      "[526]\ttraining's binary_error: 0.018575\tvalid_1's binary_error: 0.0828\n",
      "[527]\ttraining's binary_error: 0.0185\tvalid_1's binary_error: 0.0827\n",
      "[528]\ttraining's binary_error: 0.0184\tvalid_1's binary_error: 0.0827\n",
      "[529]\ttraining's binary_error: 0.0184\tvalid_1's binary_error: 0.0829\n",
      "[530]\ttraining's binary_error: 0.01835\tvalid_1's binary_error: 0.0829\n",
      "[531]\ttraining's binary_error: 0.018275\tvalid_1's binary_error: 0.0828\n",
      "[532]\ttraining's binary_error: 0.0181\tvalid_1's binary_error: 0.083\n",
      "[533]\ttraining's binary_error: 0.0181\tvalid_1's binary_error: 0.0831\n",
      "[534]\ttraining's binary_error: 0.01805\tvalid_1's binary_error: 0.083\n",
      "[535]\ttraining's binary_error: 0.01805\tvalid_1's binary_error: 0.0829\n",
      "[536]\ttraining's binary_error: 0.017975\tvalid_1's binary_error: 0.0829\n",
      "[537]\ttraining's binary_error: 0.01795\tvalid_1's binary_error: 0.0829\n",
      "[538]\ttraining's binary_error: 0.01795\tvalid_1's binary_error: 0.0828\n",
      "[539]\ttraining's binary_error: 0.017825\tvalid_1's binary_error: 0.0828\n",
      "[540]\ttraining's binary_error: 0.017725\tvalid_1's binary_error: 0.0828\n",
      "[541]\ttraining's binary_error: 0.017725\tvalid_1's binary_error: 0.0829\n",
      "[542]\ttraining's binary_error: 0.01765\tvalid_1's binary_error: 0.0828\n",
      "[543]\ttraining's binary_error: 0.017625\tvalid_1's binary_error: 0.0828\n",
      "[544]\ttraining's binary_error: 0.0176\tvalid_1's binary_error: 0.0827\n",
      "[545]\ttraining's binary_error: 0.017525\tvalid_1's binary_error: 0.0828\n",
      "[546]\ttraining's binary_error: 0.017425\tvalid_1's binary_error: 0.0827\n",
      "[547]\ttraining's binary_error: 0.017325\tvalid_1's binary_error: 0.0828\n",
      "[548]\ttraining's binary_error: 0.017325\tvalid_1's binary_error: 0.0828\n",
      "[549]\ttraining's binary_error: 0.017225\tvalid_1's binary_error: 0.0828\n",
      "[550]\ttraining's binary_error: 0.017075\tvalid_1's binary_error: 0.0828\n",
      "[551]\ttraining's binary_error: 0.01705\tvalid_1's binary_error: 0.0828\n",
      "[552]\ttraining's binary_error: 0.017025\tvalid_1's binary_error: 0.0827\n",
      "[553]\ttraining's binary_error: 0.01685\tvalid_1's binary_error: 0.0826\n",
      "[554]\ttraining's binary_error: 0.016825\tvalid_1's binary_error: 0.0825\n",
      "[555]\ttraining's binary_error: 0.01675\tvalid_1's binary_error: 0.0825\n",
      "[556]\ttraining's binary_error: 0.016725\tvalid_1's binary_error: 0.0824\n",
      "[557]\ttraining's binary_error: 0.0167\tvalid_1's binary_error: 0.0824\n",
      "[558]\ttraining's binary_error: 0.01665\tvalid_1's binary_error: 0.0824\n",
      "[559]\ttraining's binary_error: 0.0166\tvalid_1's binary_error: 0.0823\n",
      "[560]\ttraining's binary_error: 0.01655\tvalid_1's binary_error: 0.0823\n",
      "[561]\ttraining's binary_error: 0.0165\tvalid_1's binary_error: 0.0823\n",
      "[562]\ttraining's binary_error: 0.01645\tvalid_1's binary_error: 0.0822\n",
      "[563]\ttraining's binary_error: 0.0164\tvalid_1's binary_error: 0.0822\n",
      "[564]\ttraining's binary_error: 0.016325\tvalid_1's binary_error: 0.0823\n",
      "[565]\ttraining's binary_error: 0.016275\tvalid_1's binary_error: 0.0823\n",
      "[566]\ttraining's binary_error: 0.016125\tvalid_1's binary_error: 0.0823\n",
      "[567]\ttraining's binary_error: 0.0161\tvalid_1's binary_error: 0.0823\n",
      "[568]\ttraining's binary_error: 0.015875\tvalid_1's binary_error: 0.0823\n",
      "[569]\ttraining's binary_error: 0.015875\tvalid_1's binary_error: 0.0823\n",
      "[570]\ttraining's binary_error: 0.015825\tvalid_1's binary_error: 0.0822\n",
      "[571]\ttraining's binary_error: 0.015825\tvalid_1's binary_error: 0.0822\n",
      "[572]\ttraining's binary_error: 0.015825\tvalid_1's binary_error: 0.0822\n",
      "[573]\ttraining's binary_error: 0.01565\tvalid_1's binary_error: 0.0824\n",
      "[574]\ttraining's binary_error: 0.0156\tvalid_1's binary_error: 0.0826\n",
      "[575]\ttraining's binary_error: 0.0156\tvalid_1's binary_error: 0.0825\n",
      "[576]\ttraining's binary_error: 0.015575\tvalid_1's binary_error: 0.0825\n",
      "[577]\ttraining's binary_error: 0.015425\tvalid_1's binary_error: 0.0824\n",
      "[578]\ttraining's binary_error: 0.0154\tvalid_1's binary_error: 0.0825\n",
      "[579]\ttraining's binary_error: 0.015425\tvalid_1's binary_error: 0.0821\n",
      "[580]\ttraining's binary_error: 0.015325\tvalid_1's binary_error: 0.0821\n",
      "[581]\ttraining's binary_error: 0.0153\tvalid_1's binary_error: 0.0822\n",
      "[582]\ttraining's binary_error: 0.0153\tvalid_1's binary_error: 0.0824\n",
      "[583]\ttraining's binary_error: 0.015275\tvalid_1's binary_error: 0.0823\n",
      "[584]\ttraining's binary_error: 0.01525\tvalid_1's binary_error: 0.0824\n",
      "[585]\ttraining's binary_error: 0.015125\tvalid_1's binary_error: 0.0824\n",
      "[586]\ttraining's binary_error: 0.015125\tvalid_1's binary_error: 0.0824\n",
      "[587]\ttraining's binary_error: 0.0151\tvalid_1's binary_error: 0.0824\n",
      "[588]\ttraining's binary_error: 0.01505\tvalid_1's binary_error: 0.0826\n",
      "[589]\ttraining's binary_error: 0.015\tvalid_1's binary_error: 0.0826\n",
      "[590]\ttraining's binary_error: 0.0149\tvalid_1's binary_error: 0.0827\n",
      "[591]\ttraining's binary_error: 0.014875\tvalid_1's binary_error: 0.0827\n",
      "[592]\ttraining's binary_error: 0.014775\tvalid_1's binary_error: 0.0829\n",
      "[593]\ttraining's binary_error: 0.0148\tvalid_1's binary_error: 0.0828\n",
      "[594]\ttraining's binary_error: 0.0148\tvalid_1's binary_error: 0.0828\n",
      "[595]\ttraining's binary_error: 0.0148\tvalid_1's binary_error: 0.0829\n",
      "[596]\ttraining's binary_error: 0.01475\tvalid_1's binary_error: 0.0831\n",
      "[597]\ttraining's binary_error: 0.014675\tvalid_1's binary_error: 0.0832\n",
      "[598]\ttraining's binary_error: 0.0146\tvalid_1's binary_error: 0.0832\n",
      "[599]\ttraining's binary_error: 0.01455\tvalid_1's binary_error: 0.0832\n",
      "[600]\ttraining's binary_error: 0.0145\tvalid_1's binary_error: 0.0831\n",
      "[601]\ttraining's binary_error: 0.014525\tvalid_1's binary_error: 0.083\n",
      "[602]\ttraining's binary_error: 0.0145\tvalid_1's binary_error: 0.083\n",
      "[603]\ttraining's binary_error: 0.01435\tvalid_1's binary_error: 0.0827\n",
      "[604]\ttraining's binary_error: 0.014275\tvalid_1's binary_error: 0.0827\n",
      "[605]\ttraining's binary_error: 0.014275\tvalid_1's binary_error: 0.0827\n",
      "[606]\ttraining's binary_error: 0.014125\tvalid_1's binary_error: 0.0829\n",
      "[607]\ttraining's binary_error: 0.014075\tvalid_1's binary_error: 0.0829\n",
      "[608]\ttraining's binary_error: 0.014075\tvalid_1's binary_error: 0.0827\n",
      "[609]\ttraining's binary_error: 0.01405\tvalid_1's binary_error: 0.0825\n",
      "[610]\ttraining's binary_error: 0.013975\tvalid_1's binary_error: 0.0825\n",
      "[611]\ttraining's binary_error: 0.013975\tvalid_1's binary_error: 0.0824\n",
      "[612]\ttraining's binary_error: 0.013925\tvalid_1's binary_error: 0.0822\n",
      "[613]\ttraining's binary_error: 0.013775\tvalid_1's binary_error: 0.0823\n",
      "[614]\ttraining's binary_error: 0.013725\tvalid_1's binary_error: 0.0824\n",
      "[615]\ttraining's binary_error: 0.013675\tvalid_1's binary_error: 0.0823\n",
      "[616]\ttraining's binary_error: 0.01365\tvalid_1's binary_error: 0.0822\n",
      "[617]\ttraining's binary_error: 0.01355\tvalid_1's binary_error: 0.0822\n",
      "[618]\ttraining's binary_error: 0.013425\tvalid_1's binary_error: 0.0823\n",
      "[619]\ttraining's binary_error: 0.0134\tvalid_1's binary_error: 0.0823\n",
      "[620]\ttraining's binary_error: 0.01335\tvalid_1's binary_error: 0.0825\n",
      "[621]\ttraining's binary_error: 0.01335\tvalid_1's binary_error: 0.0825\n",
      "[622]\ttraining's binary_error: 0.01335\tvalid_1's binary_error: 0.0825\n",
      "[623]\ttraining's binary_error: 0.013225\tvalid_1's binary_error: 0.0824\n",
      "[624]\ttraining's binary_error: 0.01315\tvalid_1's binary_error: 0.0824\n",
      "[625]\ttraining's binary_error: 0.01295\tvalid_1's binary_error: 0.0823\n",
      "[626]\ttraining's binary_error: 0.01295\tvalid_1's binary_error: 0.0823\n",
      "[627]\ttraining's binary_error: 0.012925\tvalid_1's binary_error: 0.0823\n",
      "[628]\ttraining's binary_error: 0.012925\tvalid_1's binary_error: 0.0823\n",
      "[629]\ttraining's binary_error: 0.012925\tvalid_1's binary_error: 0.0824\n",
      "[630]\ttraining's binary_error: 0.0129\tvalid_1's binary_error: 0.0824\n",
      "[631]\ttraining's binary_error: 0.01285\tvalid_1's binary_error: 0.0824\n",
      "[632]\ttraining's binary_error: 0.012775\tvalid_1's binary_error: 0.0825\n",
      "[633]\ttraining's binary_error: 0.012725\tvalid_1's binary_error: 0.0825\n",
      "[634]\ttraining's binary_error: 0.012625\tvalid_1's binary_error: 0.0825\n",
      "[635]\ttraining's binary_error: 0.012625\tvalid_1's binary_error: 0.0826\n",
      "[636]\ttraining's binary_error: 0.012625\tvalid_1's binary_error: 0.0826\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[637]\ttraining's binary_error: 0.01255\tvalid_1's binary_error: 0.0827\n",
      "[638]\ttraining's binary_error: 0.0125\tvalid_1's binary_error: 0.0825\n",
      "[639]\ttraining's binary_error: 0.0125\tvalid_1's binary_error: 0.0825\n",
      "[640]\ttraining's binary_error: 0.012475\tvalid_1's binary_error: 0.0825\n",
      "[641]\ttraining's binary_error: 0.012425\tvalid_1's binary_error: 0.0826\n",
      "[642]\ttraining's binary_error: 0.0124\tvalid_1's binary_error: 0.0827\n",
      "[643]\ttraining's binary_error: 0.012375\tvalid_1's binary_error: 0.0826\n",
      "[644]\ttraining's binary_error: 0.012325\tvalid_1's binary_error: 0.0826\n",
      "[645]\ttraining's binary_error: 0.01225\tvalid_1's binary_error: 0.0824\n",
      "[646]\ttraining's binary_error: 0.01225\tvalid_1's binary_error: 0.0824\n",
      "[647]\ttraining's binary_error: 0.01225\tvalid_1's binary_error: 0.0823\n",
      "[648]\ttraining's binary_error: 0.0122\tvalid_1's binary_error: 0.0823\n",
      "[649]\ttraining's binary_error: 0.01205\tvalid_1's binary_error: 0.0822\n",
      "[650]\ttraining's binary_error: 0.0121\tvalid_1's binary_error: 0.0823\n",
      "[651]\ttraining's binary_error: 0.01205\tvalid_1's binary_error: 0.0822\n",
      "[652]\ttraining's binary_error: 0.01195\tvalid_1's binary_error: 0.0821\n",
      "[653]\ttraining's binary_error: 0.011925\tvalid_1's binary_error: 0.0821\n",
      "[654]\ttraining's binary_error: 0.011725\tvalid_1's binary_error: 0.0821\n",
      "[655]\ttraining's binary_error: 0.011675\tvalid_1's binary_error: 0.0821\n",
      "[656]\ttraining's binary_error: 0.0116\tvalid_1's binary_error: 0.082\n",
      "[657]\ttraining's binary_error: 0.01145\tvalid_1's binary_error: 0.082\n",
      "[658]\ttraining's binary_error: 0.011325\tvalid_1's binary_error: 0.0821\n",
      "[659]\ttraining's binary_error: 0.0113\tvalid_1's binary_error: 0.0819\n",
      "[660]\ttraining's binary_error: 0.011325\tvalid_1's binary_error: 0.0817\n",
      "[661]\ttraining's binary_error: 0.0113\tvalid_1's binary_error: 0.0816\n",
      "[662]\ttraining's binary_error: 0.0113\tvalid_1's binary_error: 0.0816\n",
      "[663]\ttraining's binary_error: 0.011275\tvalid_1's binary_error: 0.0815\n",
      "[664]\ttraining's binary_error: 0.0112\tvalid_1's binary_error: 0.0815\n",
      "[665]\ttraining's binary_error: 0.0112\tvalid_1's binary_error: 0.0816\n",
      "[666]\ttraining's binary_error: 0.011175\tvalid_1's binary_error: 0.0816\n",
      "[667]\ttraining's binary_error: 0.011175\tvalid_1's binary_error: 0.0816\n",
      "[668]\ttraining's binary_error: 0.01115\tvalid_1's binary_error: 0.0815\n",
      "[669]\ttraining's binary_error: 0.0111\tvalid_1's binary_error: 0.0815\n",
      "[670]\ttraining's binary_error: 0.011025\tvalid_1's binary_error: 0.0816\n",
      "[671]\ttraining's binary_error: 0.011025\tvalid_1's binary_error: 0.0815\n",
      "[672]\ttraining's binary_error: 0.010975\tvalid_1's binary_error: 0.0817\n",
      "[673]\ttraining's binary_error: 0.010875\tvalid_1's binary_error: 0.0819\n",
      "[674]\ttraining's binary_error: 0.0108\tvalid_1's binary_error: 0.0819\n",
      "[675]\ttraining's binary_error: 0.010825\tvalid_1's binary_error: 0.0816\n",
      "[676]\ttraining's binary_error: 0.010725\tvalid_1's binary_error: 0.0815\n",
      "[677]\ttraining's binary_error: 0.0107\tvalid_1's binary_error: 0.0816\n",
      "[678]\ttraining's binary_error: 0.01065\tvalid_1's binary_error: 0.0817\n",
      "[679]\ttraining's binary_error: 0.010625\tvalid_1's binary_error: 0.0817\n",
      "[680]\ttraining's binary_error: 0.010625\tvalid_1's binary_error: 0.0818\n",
      "[681]\ttraining's binary_error: 0.010525\tvalid_1's binary_error: 0.0818\n",
      "[682]\ttraining's binary_error: 0.0105\tvalid_1's binary_error: 0.0818\n",
      "[683]\ttraining's binary_error: 0.010475\tvalid_1's binary_error: 0.0818\n",
      "[684]\ttraining's binary_error: 0.010475\tvalid_1's binary_error: 0.0817\n",
      "[685]\ttraining's binary_error: 0.01045\tvalid_1's binary_error: 0.0817\n",
      "[686]\ttraining's binary_error: 0.010425\tvalid_1's binary_error: 0.0817\n",
      "[687]\ttraining's binary_error: 0.010425\tvalid_1's binary_error: 0.0817\n",
      "[688]\ttraining's binary_error: 0.01035\tvalid_1's binary_error: 0.0817\n",
      "[689]\ttraining's binary_error: 0.01035\tvalid_1's binary_error: 0.0815\n",
      "[690]\ttraining's binary_error: 0.010325\tvalid_1's binary_error: 0.0815\n",
      "[691]\ttraining's binary_error: 0.010225\tvalid_1's binary_error: 0.0817\n",
      "[692]\ttraining's binary_error: 0.010175\tvalid_1's binary_error: 0.0817\n",
      "[693]\ttraining's binary_error: 0.0101\tvalid_1's binary_error: 0.0817\n",
      "[694]\ttraining's binary_error: 0.010025\tvalid_1's binary_error: 0.0817\n",
      "[695]\ttraining's binary_error: 0.010025\tvalid_1's binary_error: 0.0819\n",
      "[696]\ttraining's binary_error: 0.00995\tvalid_1's binary_error: 0.0821\n",
      "[697]\ttraining's binary_error: 0.00985\tvalid_1's binary_error: 0.0819\n",
      "[698]\ttraining's binary_error: 0.00985\tvalid_1's binary_error: 0.0819\n",
      "[699]\ttraining's binary_error: 0.009825\tvalid_1's binary_error: 0.0819\n",
      "[700]\ttraining's binary_error: 0.0098\tvalid_1's binary_error: 0.0819\n",
      "[701]\ttraining's binary_error: 0.009725\tvalid_1's binary_error: 0.082\n",
      "[702]\ttraining's binary_error: 0.009675\tvalid_1's binary_error: 0.0819\n",
      "[703]\ttraining's binary_error: 0.009675\tvalid_1's binary_error: 0.0819\n",
      "[704]\ttraining's binary_error: 0.0095\tvalid_1's binary_error: 0.0819\n",
      "[705]\ttraining's binary_error: 0.009475\tvalid_1's binary_error: 0.0819\n",
      "[706]\ttraining's binary_error: 0.009475\tvalid_1's binary_error: 0.0819\n",
      "[707]\ttraining's binary_error: 0.009475\tvalid_1's binary_error: 0.0819\n",
      "[708]\ttraining's binary_error: 0.009425\tvalid_1's binary_error: 0.082\n",
      "[709]\ttraining's binary_error: 0.009425\tvalid_1's binary_error: 0.0819\n",
      "[710]\ttraining's binary_error: 0.009375\tvalid_1's binary_error: 0.0818\n",
      "[711]\ttraining's binary_error: 0.009325\tvalid_1's binary_error: 0.082\n",
      "[712]\ttraining's binary_error: 0.009225\tvalid_1's binary_error: 0.082\n",
      "[713]\ttraining's binary_error: 0.0092\tvalid_1's binary_error: 0.082\n",
      "[714]\ttraining's binary_error: 0.0092\tvalid_1's binary_error: 0.0818\n",
      "[715]\ttraining's binary_error: 0.009075\tvalid_1's binary_error: 0.0817\n",
      "[716]\ttraining's binary_error: 0.009075\tvalid_1's binary_error: 0.0815\n",
      "[717]\ttraining's binary_error: 0.009075\tvalid_1's binary_error: 0.0814\n",
      "[718]\ttraining's binary_error: 0.00905\tvalid_1's binary_error: 0.0814\n",
      "[719]\ttraining's binary_error: 0.00905\tvalid_1's binary_error: 0.0815\n",
      "[720]\ttraining's binary_error: 0.00905\tvalid_1's binary_error: 0.0817\n",
      "[721]\ttraining's binary_error: 0.009\tvalid_1's binary_error: 0.0818\n",
      "[722]\ttraining's binary_error: 0.008975\tvalid_1's binary_error: 0.082\n",
      "[723]\ttraining's binary_error: 0.00895\tvalid_1's binary_error: 0.082\n",
      "[724]\ttraining's binary_error: 0.0089\tvalid_1's binary_error: 0.0819\n",
      "[725]\ttraining's binary_error: 0.008875\tvalid_1's binary_error: 0.0817\n",
      "[726]\ttraining's binary_error: 0.00885\tvalid_1's binary_error: 0.0817\n",
      "[727]\ttraining's binary_error: 0.0088\tvalid_1's binary_error: 0.0818\n",
      "[728]\ttraining's binary_error: 0.0088\tvalid_1's binary_error: 0.0819\n",
      "[729]\ttraining's binary_error: 0.008775\tvalid_1's binary_error: 0.0819\n",
      "[730]\ttraining's binary_error: 0.008675\tvalid_1's binary_error: 0.0819\n",
      "[731]\ttraining's binary_error: 0.008525\tvalid_1's binary_error: 0.082\n",
      "[732]\ttraining's binary_error: 0.0085\tvalid_1's binary_error: 0.082\n",
      "[733]\ttraining's binary_error: 0.008525\tvalid_1's binary_error: 0.082\n",
      "[734]\ttraining's binary_error: 0.008525\tvalid_1's binary_error: 0.082\n",
      "[735]\ttraining's binary_error: 0.00845\tvalid_1's binary_error: 0.082\n",
      "[736]\ttraining's binary_error: 0.008325\tvalid_1's binary_error: 0.0818\n",
      "[737]\ttraining's binary_error: 0.0083\tvalid_1's binary_error: 0.0818\n",
      "[738]\ttraining's binary_error: 0.008175\tvalid_1's binary_error: 0.0819\n",
      "[739]\ttraining's binary_error: 0.008125\tvalid_1's binary_error: 0.0819\n",
      "[740]\ttraining's binary_error: 0.008075\tvalid_1's binary_error: 0.0819\n",
      "[741]\ttraining's binary_error: 0.00795\tvalid_1's binary_error: 0.0819\n",
      "[742]\ttraining's binary_error: 0.007925\tvalid_1's binary_error: 0.082\n",
      "[743]\ttraining's binary_error: 0.00795\tvalid_1's binary_error: 0.0818\n",
      "[744]\ttraining's binary_error: 0.0079\tvalid_1's binary_error: 0.0819\n",
      "[745]\ttraining's binary_error: 0.0078\tvalid_1's binary_error: 0.0821\n",
      "[746]\ttraining's binary_error: 0.0078\tvalid_1's binary_error: 0.0819\n",
      "[747]\ttraining's binary_error: 0.0078\tvalid_1's binary_error: 0.0817\n",
      "[748]\ttraining's binary_error: 0.0078\tvalid_1's binary_error: 0.0817\n",
      "[749]\ttraining's binary_error: 0.007775\tvalid_1's binary_error: 0.0817\n",
      "[750]\ttraining's binary_error: 0.00775\tvalid_1's binary_error: 0.0817\n",
      "[751]\ttraining's binary_error: 0.0077\tvalid_1's binary_error: 0.0817\n",
      "[752]\ttraining's binary_error: 0.0077\tvalid_1's binary_error: 0.0818\n",
      "[753]\ttraining's binary_error: 0.0077\tvalid_1's binary_error: 0.0817\n",
      "[754]\ttraining's binary_error: 0.0077\tvalid_1's binary_error: 0.0817\n",
      "[755]\ttraining's binary_error: 0.007675\tvalid_1's binary_error: 0.0817\n",
      "[756]\ttraining's binary_error: 0.007625\tvalid_1's binary_error: 0.0816\n",
      "[757]\ttraining's binary_error: 0.007625\tvalid_1's binary_error: 0.0816\n",
      "[758]\ttraining's binary_error: 0.007625\tvalid_1's binary_error: 0.0817\n",
      "[759]\ttraining's binary_error: 0.007625\tvalid_1's binary_error: 0.0816\n",
      "[760]\ttraining's binary_error: 0.0076\tvalid_1's binary_error: 0.0817\n",
      "[761]\ttraining's binary_error: 0.00755\tvalid_1's binary_error: 0.0816\n",
      "[762]\ttraining's binary_error: 0.00755\tvalid_1's binary_error: 0.0817\n",
      "[763]\ttraining's binary_error: 0.00755\tvalid_1's binary_error: 0.0817\n",
      "[764]\ttraining's binary_error: 0.00755\tvalid_1's binary_error: 0.0817\n",
      "[765]\ttraining's binary_error: 0.007575\tvalid_1's binary_error: 0.0817\n",
      "[766]\ttraining's binary_error: 0.007575\tvalid_1's binary_error: 0.0817\n",
      "[767]\ttraining's binary_error: 0.007575\tvalid_1's binary_error: 0.0818\n",
      "[768]\ttraining's binary_error: 0.007525\tvalid_1's binary_error: 0.0821\n",
      "[769]\ttraining's binary_error: 0.007575\tvalid_1's binary_error: 0.0821\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[770]\ttraining's binary_error: 0.007475\tvalid_1's binary_error: 0.0819\n",
      "[771]\ttraining's binary_error: 0.00745\tvalid_1's binary_error: 0.082\n",
      "[772]\ttraining's binary_error: 0.007425\tvalid_1's binary_error: 0.082\n",
      "[773]\ttraining's binary_error: 0.0073\tvalid_1's binary_error: 0.082\n",
      "[774]\ttraining's binary_error: 0.007225\tvalid_1's binary_error: 0.0819\n",
      "[775]\ttraining's binary_error: 0.007175\tvalid_1's binary_error: 0.0821\n",
      "[776]\ttraining's binary_error: 0.007175\tvalid_1's binary_error: 0.0821\n",
      "[777]\ttraining's binary_error: 0.007175\tvalid_1's binary_error: 0.0821\n",
      "[778]\ttraining's binary_error: 0.007125\tvalid_1's binary_error: 0.0822\n",
      "[779]\ttraining's binary_error: 0.007125\tvalid_1's binary_error: 0.0823\n",
      "[780]\ttraining's binary_error: 0.00715\tvalid_1's binary_error: 0.0823\n",
      "[781]\ttraining's binary_error: 0.007125\tvalid_1's binary_error: 0.0822\n",
      "[782]\ttraining's binary_error: 0.007075\tvalid_1's binary_error: 0.0822\n",
      "[783]\ttraining's binary_error: 0.00705\tvalid_1's binary_error: 0.0823\n",
      "[784]\ttraining's binary_error: 0.00695\tvalid_1's binary_error: 0.0824\n",
      "[785]\ttraining's binary_error: 0.006925\tvalid_1's binary_error: 0.0822\n",
      "[786]\ttraining's binary_error: 0.006925\tvalid_1's binary_error: 0.0822\n",
      "[787]\ttraining's binary_error: 0.006875\tvalid_1's binary_error: 0.0823\n",
      "[788]\ttraining's binary_error: 0.006825\tvalid_1's binary_error: 0.0824\n",
      "[789]\ttraining's binary_error: 0.006775\tvalid_1's binary_error: 0.0825\n",
      "[790]\ttraining's binary_error: 0.00665\tvalid_1's binary_error: 0.0825\n",
      "[791]\ttraining's binary_error: 0.006625\tvalid_1's binary_error: 0.0824\n",
      "[792]\ttraining's binary_error: 0.0066\tvalid_1's binary_error: 0.0823\n",
      "[793]\ttraining's binary_error: 0.006575\tvalid_1's binary_error: 0.0825\n",
      "[794]\ttraining's binary_error: 0.00655\tvalid_1's binary_error: 0.0824\n",
      "[795]\ttraining's binary_error: 0.0065\tvalid_1's binary_error: 0.0824\n",
      "[796]\ttraining's binary_error: 0.006525\tvalid_1's binary_error: 0.0825\n",
      "[797]\ttraining's binary_error: 0.006475\tvalid_1's binary_error: 0.0826\n",
      "[798]\ttraining's binary_error: 0.00645\tvalid_1's binary_error: 0.0827\n",
      "[799]\ttraining's binary_error: 0.00645\tvalid_1's binary_error: 0.0827\n",
      "[800]\ttraining's binary_error: 0.006425\tvalid_1's binary_error: 0.0826\n",
      "[801]\ttraining's binary_error: 0.006325\tvalid_1's binary_error: 0.0823\n",
      "[802]\ttraining's binary_error: 0.00635\tvalid_1's binary_error: 0.0823\n",
      "[803]\ttraining's binary_error: 0.0063\tvalid_1's binary_error: 0.0823\n",
      "[804]\ttraining's binary_error: 0.0063\tvalid_1's binary_error: 0.0823\n",
      "[805]\ttraining's binary_error: 0.00625\tvalid_1's binary_error: 0.0823\n",
      "[806]\ttraining's binary_error: 0.006225\tvalid_1's binary_error: 0.0823\n",
      "[807]\ttraining's binary_error: 0.006225\tvalid_1's binary_error: 0.0823\n",
      "[808]\ttraining's binary_error: 0.006225\tvalid_1's binary_error: 0.0823\n",
      "[809]\ttraining's binary_error: 0.006225\tvalid_1's binary_error: 0.0822\n",
      "[810]\ttraining's binary_error: 0.006175\tvalid_1's binary_error: 0.0823\n",
      "[811]\ttraining's binary_error: 0.006175\tvalid_1's binary_error: 0.0822\n",
      "[812]\ttraining's binary_error: 0.00615\tvalid_1's binary_error: 0.0822\n",
      "[813]\ttraining's binary_error: 0.006125\tvalid_1's binary_error: 0.0822\n",
      "[814]\ttraining's binary_error: 0.006125\tvalid_1's binary_error: 0.0821\n",
      "[815]\ttraining's binary_error: 0.00615\tvalid_1's binary_error: 0.0821\n",
      "[816]\ttraining's binary_error: 0.006125\tvalid_1's binary_error: 0.0822\n",
      "[817]\ttraining's binary_error: 0.0061\tvalid_1's binary_error: 0.0823\n",
      "[818]\ttraining's binary_error: 0.006075\tvalid_1's binary_error: 0.0821\n",
      "[819]\ttraining's binary_error: 0.00605\tvalid_1's binary_error: 0.082\n",
      "[820]\ttraining's binary_error: 0.005975\tvalid_1's binary_error: 0.082\n",
      "[821]\ttraining's binary_error: 0.005875\tvalid_1's binary_error: 0.0822\n",
      "[822]\ttraining's binary_error: 0.0059\tvalid_1's binary_error: 0.0821\n",
      "[823]\ttraining's binary_error: 0.0059\tvalid_1's binary_error: 0.082\n",
      "[824]\ttraining's binary_error: 0.0058\tvalid_1's binary_error: 0.0819\n",
      "[825]\ttraining's binary_error: 0.0058\tvalid_1's binary_error: 0.0819\n",
      "[826]\ttraining's binary_error: 0.0058\tvalid_1's binary_error: 0.0818\n",
      "[827]\ttraining's binary_error: 0.005775\tvalid_1's binary_error: 0.0818\n",
      "[828]\ttraining's binary_error: 0.005725\tvalid_1's binary_error: 0.0817\n",
      "[829]\ttraining's binary_error: 0.005675\tvalid_1's binary_error: 0.0818\n",
      "[830]\ttraining's binary_error: 0.0056\tvalid_1's binary_error: 0.0817\n",
      "[831]\ttraining's binary_error: 0.0056\tvalid_1's binary_error: 0.0818\n",
      "[832]\ttraining's binary_error: 0.0056\tvalid_1's binary_error: 0.082\n",
      "[833]\ttraining's binary_error: 0.0056\tvalid_1's binary_error: 0.0822\n",
      "[834]\ttraining's binary_error: 0.005575\tvalid_1's binary_error: 0.0824\n",
      "[835]\ttraining's binary_error: 0.00555\tvalid_1's binary_error: 0.0825\n",
      "[836]\ttraining's binary_error: 0.0055\tvalid_1's binary_error: 0.0825\n",
      "[837]\ttraining's binary_error: 0.0054\tvalid_1's binary_error: 0.0826\n",
      "[838]\ttraining's binary_error: 0.0054\tvalid_1's binary_error: 0.0823\n",
      "[839]\ttraining's binary_error: 0.00535\tvalid_1's binary_error: 0.0823\n",
      "[840]\ttraining's binary_error: 0.0053\tvalid_1's binary_error: 0.0823\n",
      "[841]\ttraining's binary_error: 0.0053\tvalid_1's binary_error: 0.0823\n",
      "[842]\ttraining's binary_error: 0.0053\tvalid_1's binary_error: 0.0823\n",
      "[843]\ttraining's binary_error: 0.0053\tvalid_1's binary_error: 0.0824\n",
      "[844]\ttraining's binary_error: 0.005275\tvalid_1's binary_error: 0.0826\n",
      "[845]\ttraining's binary_error: 0.005275\tvalid_1's binary_error: 0.0826\n",
      "[846]\ttraining's binary_error: 0.005275\tvalid_1's binary_error: 0.0826\n",
      "[847]\ttraining's binary_error: 0.0052\tvalid_1's binary_error: 0.0826\n",
      "[848]\ttraining's binary_error: 0.005175\tvalid_1's binary_error: 0.0827\n",
      "[849]\ttraining's binary_error: 0.005125\tvalid_1's binary_error: 0.0827\n",
      "[850]\ttraining's binary_error: 0.005125\tvalid_1's binary_error: 0.0827\n",
      "[851]\ttraining's binary_error: 0.005125\tvalid_1's binary_error: 0.0829\n",
      "[852]\ttraining's binary_error: 0.00505\tvalid_1's binary_error: 0.0829\n",
      "[853]\ttraining's binary_error: 0.005\tvalid_1's binary_error: 0.083\n",
      "[854]\ttraining's binary_error: 0.004975\tvalid_1's binary_error: 0.083\n",
      "[855]\ttraining's binary_error: 0.004925\tvalid_1's binary_error: 0.0828\n",
      "[856]\ttraining's binary_error: 0.00495\tvalid_1's binary_error: 0.0829\n",
      "[857]\ttraining's binary_error: 0.00495\tvalid_1's binary_error: 0.083\n",
      "[858]\ttraining's binary_error: 0.00495\tvalid_1's binary_error: 0.083\n",
      "[859]\ttraining's binary_error: 0.004925\tvalid_1's binary_error: 0.083\n",
      "[860]\ttraining's binary_error: 0.004925\tvalid_1's binary_error: 0.083\n",
      "[861]\ttraining's binary_error: 0.004925\tvalid_1's binary_error: 0.0832\n",
      "[862]\ttraining's binary_error: 0.004925\tvalid_1's binary_error: 0.0832\n",
      "[863]\ttraining's binary_error: 0.0049\tvalid_1's binary_error: 0.0832\n",
      "[864]\ttraining's binary_error: 0.0049\tvalid_1's binary_error: 0.0831\n",
      "[865]\ttraining's binary_error: 0.004875\tvalid_1's binary_error: 0.0831\n",
      "[866]\ttraining's binary_error: 0.00485\tvalid_1's binary_error: 0.0832\n",
      "[867]\ttraining's binary_error: 0.004825\tvalid_1's binary_error: 0.0831\n",
      "[868]\ttraining's binary_error: 0.0048\tvalid_1's binary_error: 0.083\n",
      "[869]\ttraining's binary_error: 0.0048\tvalid_1's binary_error: 0.0827\n",
      "[870]\ttraining's binary_error: 0.004775\tvalid_1's binary_error: 0.0827\n",
      "[871]\ttraining's binary_error: 0.00475\tvalid_1's binary_error: 0.0827\n",
      "[872]\ttraining's binary_error: 0.004725\tvalid_1's binary_error: 0.0827\n",
      "[873]\ttraining's binary_error: 0.004725\tvalid_1's binary_error: 0.0827\n",
      "[874]\ttraining's binary_error: 0.004725\tvalid_1's binary_error: 0.0826\n",
      "[875]\ttraining's binary_error: 0.004675\tvalid_1's binary_error: 0.0826\n",
      "[876]\ttraining's binary_error: 0.004675\tvalid_1's binary_error: 0.0826\n",
      "[877]\ttraining's binary_error: 0.004625\tvalid_1's binary_error: 0.0826\n",
      "[878]\ttraining's binary_error: 0.0046\tvalid_1's binary_error: 0.0827\n",
      "[879]\ttraining's binary_error: 0.004625\tvalid_1's binary_error: 0.0827\n",
      "[880]\ttraining's binary_error: 0.004575\tvalid_1's binary_error: 0.0828\n",
      "[881]\ttraining's binary_error: 0.0046\tvalid_1's binary_error: 0.0828\n",
      "[882]\ttraining's binary_error: 0.0045\tvalid_1's binary_error: 0.0825\n",
      "[883]\ttraining's binary_error: 0.0044\tvalid_1's binary_error: 0.0826\n",
      "[884]\ttraining's binary_error: 0.0044\tvalid_1's binary_error: 0.0827\n",
      "[885]\ttraining's binary_error: 0.0044\tvalid_1's binary_error: 0.083\n",
      "[886]\ttraining's binary_error: 0.00435\tvalid_1's binary_error: 0.083\n",
      "[887]\ttraining's binary_error: 0.00435\tvalid_1's binary_error: 0.0832\n",
      "[888]\ttraining's binary_error: 0.004275\tvalid_1's binary_error: 0.0832\n",
      "[889]\ttraining's binary_error: 0.004225\tvalid_1's binary_error: 0.0832\n",
      "[890]\ttraining's binary_error: 0.004225\tvalid_1's binary_error: 0.0832\n",
      "[891]\ttraining's binary_error: 0.004225\tvalid_1's binary_error: 0.0833\n",
      "[892]\ttraining's binary_error: 0.0042\tvalid_1's binary_error: 0.0833\n",
      "[893]\ttraining's binary_error: 0.0042\tvalid_1's binary_error: 0.0832\n",
      "[894]\ttraining's binary_error: 0.0042\tvalid_1's binary_error: 0.0831\n",
      "[895]\ttraining's binary_error: 0.0042\tvalid_1's binary_error: 0.0831\n",
      "[896]\ttraining's binary_error: 0.0042\tvalid_1's binary_error: 0.0833\n",
      "[897]\ttraining's binary_error: 0.00415\tvalid_1's binary_error: 0.0834\n",
      "[898]\ttraining's binary_error: 0.00415\tvalid_1's binary_error: 0.0834\n",
      "[899]\ttraining's binary_error: 0.004125\tvalid_1's binary_error: 0.0834\n",
      "[900]\ttraining's binary_error: 0.004125\tvalid_1's binary_error: 0.0834\n",
      "[901]\ttraining's binary_error: 0.004125\tvalid_1's binary_error: 0.0834\n",
      "[902]\ttraining's binary_error: 0.004125\tvalid_1's binary_error: 0.0834\n",
      "[903]\ttraining's binary_error: 0.004125\tvalid_1's binary_error: 0.0834\n",
      "[904]\ttraining's binary_error: 0.004125\tvalid_1's binary_error: 0.0834\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[905]\ttraining's binary_error: 0.0041\tvalid_1's binary_error: 0.0834\n",
      "[906]\ttraining's binary_error: 0.00405\tvalid_1's binary_error: 0.0834\n",
      "[907]\ttraining's binary_error: 0.004\tvalid_1's binary_error: 0.0834\n",
      "[908]\ttraining's binary_error: 0.004\tvalid_1's binary_error: 0.0835\n",
      "[909]\ttraining's binary_error: 0.004\tvalid_1's binary_error: 0.0834\n",
      "[910]\ttraining's binary_error: 0.003975\tvalid_1's binary_error: 0.0833\n",
      "[911]\ttraining's binary_error: 0.003925\tvalid_1's binary_error: 0.0834\n",
      "[912]\ttraining's binary_error: 0.003925\tvalid_1's binary_error: 0.0834\n",
      "[913]\ttraining's binary_error: 0.0039\tvalid_1's binary_error: 0.0835\n",
      "[914]\ttraining's binary_error: 0.0039\tvalid_1's binary_error: 0.0835\n",
      "[915]\ttraining's binary_error: 0.0039\tvalid_1's binary_error: 0.0835\n",
      "[916]\ttraining's binary_error: 0.0039\tvalid_1's binary_error: 0.0834\n",
      "[917]\ttraining's binary_error: 0.00385\tvalid_1's binary_error: 0.0833\n",
      "[918]\ttraining's binary_error: 0.00385\tvalid_1's binary_error: 0.0833\n",
      "[919]\ttraining's binary_error: 0.0038\tvalid_1's binary_error: 0.0833\n",
      "[920]\ttraining's binary_error: 0.0038\tvalid_1's binary_error: 0.0833\n",
      "[921]\ttraining's binary_error: 0.00375\tvalid_1's binary_error: 0.0835\n",
      "[922]\ttraining's binary_error: 0.00375\tvalid_1's binary_error: 0.0835\n",
      "[923]\ttraining's binary_error: 0.003725\tvalid_1's binary_error: 0.0835\n",
      "[924]\ttraining's binary_error: 0.003725\tvalid_1's binary_error: 0.0834\n",
      "[925]\ttraining's binary_error: 0.0037\tvalid_1's binary_error: 0.0834\n",
      "[926]\ttraining's binary_error: 0.0037\tvalid_1's binary_error: 0.0835\n",
      "[927]\ttraining's binary_error: 0.003675\tvalid_1's binary_error: 0.0835\n",
      "[928]\ttraining's binary_error: 0.003675\tvalid_1's binary_error: 0.0836\n",
      "[929]\ttraining's binary_error: 0.00365\tvalid_1's binary_error: 0.0836\n",
      "[930]\ttraining's binary_error: 0.00365\tvalid_1's binary_error: 0.0837\n",
      "[931]\ttraining's binary_error: 0.003625\tvalid_1's binary_error: 0.0835\n",
      "[932]\ttraining's binary_error: 0.003625\tvalid_1's binary_error: 0.0834\n",
      "[933]\ttraining's binary_error: 0.003625\tvalid_1's binary_error: 0.0834\n",
      "[934]\ttraining's binary_error: 0.003625\tvalid_1's binary_error: 0.0834\n",
      "[935]\ttraining's binary_error: 0.0036\tvalid_1's binary_error: 0.0834\n",
      "[936]\ttraining's binary_error: 0.0036\tvalid_1's binary_error: 0.0834\n",
      "[937]\ttraining's binary_error: 0.0036\tvalid_1's binary_error: 0.0835\n",
      "[938]\ttraining's binary_error: 0.00355\tvalid_1's binary_error: 0.0835\n",
      "[939]\ttraining's binary_error: 0.00355\tvalid_1's binary_error: 0.0835\n",
      "[940]\ttraining's binary_error: 0.00355\tvalid_1's binary_error: 0.0835\n",
      "[941]\ttraining's binary_error: 0.00355\tvalid_1's binary_error: 0.0836\n",
      "[942]\ttraining's binary_error: 0.0035\tvalid_1's binary_error: 0.0835\n",
      "[943]\ttraining's binary_error: 0.0035\tvalid_1's binary_error: 0.0835\n",
      "[944]\ttraining's binary_error: 0.0035\tvalid_1's binary_error: 0.0835\n",
      "[945]\ttraining's binary_error: 0.003525\tvalid_1's binary_error: 0.0835\n",
      "[946]\ttraining's binary_error: 0.003525\tvalid_1's binary_error: 0.0836\n",
      "[947]\ttraining's binary_error: 0.00345\tvalid_1's binary_error: 0.0835\n",
      "[948]\ttraining's binary_error: 0.00345\tvalid_1's binary_error: 0.0835\n",
      "[949]\ttraining's binary_error: 0.00345\tvalid_1's binary_error: 0.0835\n",
      "[950]\ttraining's binary_error: 0.003475\tvalid_1's binary_error: 0.0834\n",
      "[951]\ttraining's binary_error: 0.00345\tvalid_1's binary_error: 0.0834\n",
      "[952]\ttraining's binary_error: 0.003425\tvalid_1's binary_error: 0.0834\n",
      "[953]\ttraining's binary_error: 0.003425\tvalid_1's binary_error: 0.0834\n",
      "[954]\ttraining's binary_error: 0.003425\tvalid_1's binary_error: 0.0835\n",
      "[955]\ttraining's binary_error: 0.0034\tvalid_1's binary_error: 0.0836\n",
      "[956]\ttraining's binary_error: 0.0034\tvalid_1's binary_error: 0.0835\n",
      "[957]\ttraining's binary_error: 0.00335\tvalid_1's binary_error: 0.0835\n",
      "[958]\ttraining's binary_error: 0.00335\tvalid_1's binary_error: 0.0834\n",
      "[959]\ttraining's binary_error: 0.00335\tvalid_1's binary_error: 0.0834\n",
      "[960]\ttraining's binary_error: 0.00335\tvalid_1's binary_error: 0.0834\n",
      "[961]\ttraining's binary_error: 0.003325\tvalid_1's binary_error: 0.0834\n",
      "[962]\ttraining's binary_error: 0.0033\tvalid_1's binary_error: 0.0834\n",
      "[963]\ttraining's binary_error: 0.003225\tvalid_1's binary_error: 0.0835\n",
      "[964]\ttraining's binary_error: 0.0032\tvalid_1's binary_error: 0.0833\n",
      "[965]\ttraining's binary_error: 0.0032\tvalid_1's binary_error: 0.0832\n",
      "[966]\ttraining's binary_error: 0.0032\tvalid_1's binary_error: 0.0831\n",
      "[967]\ttraining's binary_error: 0.0032\tvalid_1's binary_error: 0.0831\n",
      "[968]\ttraining's binary_error: 0.0032\tvalid_1's binary_error: 0.0832\n",
      "[969]\ttraining's binary_error: 0.003175\tvalid_1's binary_error: 0.0832\n",
      "[970]\ttraining's binary_error: 0.003175\tvalid_1's binary_error: 0.0833\n",
      "[971]\ttraining's binary_error: 0.003175\tvalid_1's binary_error: 0.0832\n",
      "[972]\ttraining's binary_error: 0.0032\tvalid_1's binary_error: 0.0831\n",
      "[973]\ttraining's binary_error: 0.003225\tvalid_1's binary_error: 0.0831\n",
      "[974]\ttraining's binary_error: 0.003225\tvalid_1's binary_error: 0.0832\n",
      "[975]\ttraining's binary_error: 0.003225\tvalid_1's binary_error: 0.0831\n",
      "[976]\ttraining's binary_error: 0.003225\tvalid_1's binary_error: 0.0832\n",
      "[977]\ttraining's binary_error: 0.003175\tvalid_1's binary_error: 0.0832\n",
      "[978]\ttraining's binary_error: 0.003175\tvalid_1's binary_error: 0.0831\n",
      "[979]\ttraining's binary_error: 0.00315\tvalid_1's binary_error: 0.0833\n",
      "[980]\ttraining's binary_error: 0.003075\tvalid_1's binary_error: 0.0834\n",
      "[981]\ttraining's binary_error: 0.003075\tvalid_1's binary_error: 0.0834\n",
      "[982]\ttraining's binary_error: 0.00305\tvalid_1's binary_error: 0.0834\n",
      "[983]\ttraining's binary_error: 0.003025\tvalid_1's binary_error: 0.0832\n",
      "[984]\ttraining's binary_error: 0.00295\tvalid_1's binary_error: 0.083\n",
      "[985]\ttraining's binary_error: 0.0029\tvalid_1's binary_error: 0.083\n",
      "[986]\ttraining's binary_error: 0.002875\tvalid_1's binary_error: 0.083\n",
      "[987]\ttraining's binary_error: 0.002875\tvalid_1's binary_error: 0.0829\n",
      "[988]\ttraining's binary_error: 0.00285\tvalid_1's binary_error: 0.0828\n",
      "[989]\ttraining's binary_error: 0.0028\tvalid_1's binary_error: 0.0828\n",
      "[990]\ttraining's binary_error: 0.002825\tvalid_1's binary_error: 0.0827\n",
      "[991]\ttraining's binary_error: 0.002825\tvalid_1's binary_error: 0.0827\n",
      "[992]\ttraining's binary_error: 0.002825\tvalid_1's binary_error: 0.0825\n",
      "[993]\ttraining's binary_error: 0.002825\tvalid_1's binary_error: 0.0826\n",
      "[994]\ttraining's binary_error: 0.0028\tvalid_1's binary_error: 0.0826\n",
      "[995]\ttraining's binary_error: 0.0028\tvalid_1's binary_error: 0.0826\n",
      "[996]\ttraining's binary_error: 0.002775\tvalid_1's binary_error: 0.0829\n",
      "[997]\ttraining's binary_error: 0.002775\tvalid_1's binary_error: 0.083\n",
      "[998]\ttraining's binary_error: 0.002775\tvalid_1's binary_error: 0.083\n",
      "[999]\ttraining's binary_error: 0.002775\tvalid_1's binary_error: 0.083\n",
      "[1000]\ttraining's binary_error: 0.002775\tvalid_1's binary_error: 0.0829\n",
      "[1001]\ttraining's binary_error: 0.00275\tvalid_1's binary_error: 0.083\n",
      "[1002]\ttraining's binary_error: 0.002725\tvalid_1's binary_error: 0.0831\n",
      "[1003]\ttraining's binary_error: 0.00265\tvalid_1's binary_error: 0.0829\n",
      "[1004]\ttraining's binary_error: 0.00265\tvalid_1's binary_error: 0.0829\n",
      "[1005]\ttraining's binary_error: 0.00265\tvalid_1's binary_error: 0.0829\n",
      "[1006]\ttraining's binary_error: 0.002625\tvalid_1's binary_error: 0.0831\n",
      "[1007]\ttraining's binary_error: 0.002625\tvalid_1's binary_error: 0.0831\n",
      "[1008]\ttraining's binary_error: 0.0026\tvalid_1's binary_error: 0.0831\n",
      "[1009]\ttraining's binary_error: 0.0026\tvalid_1's binary_error: 0.0832\n",
      "[1010]\ttraining's binary_error: 0.0026\tvalid_1's binary_error: 0.0831\n",
      "[1011]\ttraining's binary_error: 0.0026\tvalid_1's binary_error: 0.083\n",
      "[1012]\ttraining's binary_error: 0.0026\tvalid_1's binary_error: 0.0829\n",
      "[1013]\ttraining's binary_error: 0.0026\tvalid_1's binary_error: 0.0829\n",
      "[1014]\ttraining's binary_error: 0.0026\tvalid_1's binary_error: 0.0829\n",
      "[1015]\ttraining's binary_error: 0.0026\tvalid_1's binary_error: 0.0831\n",
      "[1016]\ttraining's binary_error: 0.0026\tvalid_1's binary_error: 0.0831\n",
      "[1017]\ttraining's binary_error: 0.0026\tvalid_1's binary_error: 0.0831\n",
      "[1018]\ttraining's binary_error: 0.0026\tvalid_1's binary_error: 0.0832\n",
      "[1019]\ttraining's binary_error: 0.0026\tvalid_1's binary_error: 0.0832\n",
      "[1020]\ttraining's binary_error: 0.0026\tvalid_1's binary_error: 0.0831\n",
      "[1021]\ttraining's binary_error: 0.0026\tvalid_1's binary_error: 0.0831\n",
      "[1022]\ttraining's binary_error: 0.002575\tvalid_1's binary_error: 0.0831\n",
      "[1023]\ttraining's binary_error: 0.00255\tvalid_1's binary_error: 0.083\n",
      "[1024]\ttraining's binary_error: 0.00255\tvalid_1's binary_error: 0.0828\n",
      "[1025]\ttraining's binary_error: 0.002525\tvalid_1's binary_error: 0.0828\n",
      "[1026]\ttraining's binary_error: 0.002525\tvalid_1's binary_error: 0.0827\n",
      "[1027]\ttraining's binary_error: 0.002525\tvalid_1's binary_error: 0.0827\n",
      "[1028]\ttraining's binary_error: 0.0025\tvalid_1's binary_error: 0.0827\n",
      "[1029]\ttraining's binary_error: 0.002475\tvalid_1's binary_error: 0.0828\n",
      "[1030]\ttraining's binary_error: 0.002475\tvalid_1's binary_error: 0.0829\n",
      "[1031]\ttraining's binary_error: 0.002475\tvalid_1's binary_error: 0.0828\n",
      "[1032]\ttraining's binary_error: 0.002475\tvalid_1's binary_error: 0.0829\n",
      "[1033]\ttraining's binary_error: 0.002475\tvalid_1's binary_error: 0.0829\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[1034]\ttraining's binary_error: 0.002475\tvalid_1's binary_error: 0.0829\n",
      "[1035]\ttraining's binary_error: 0.002475\tvalid_1's binary_error: 0.0829\n",
      "[1036]\ttraining's binary_error: 0.002475\tvalid_1's binary_error: 0.0831\n",
      "[1037]\ttraining's binary_error: 0.002475\tvalid_1's binary_error: 0.0832\n",
      "[1038]\ttraining's binary_error: 0.002475\tvalid_1's binary_error: 0.0832\n",
      "[1039]\ttraining's binary_error: 0.002475\tvalid_1's binary_error: 0.0832\n",
      "[1040]\ttraining's binary_error: 0.002475\tvalid_1's binary_error: 0.0831\n",
      "[1041]\ttraining's binary_error: 0.002475\tvalid_1's binary_error: 0.0831\n",
      "[1042]\ttraining's binary_error: 0.002475\tvalid_1's binary_error: 0.083\n",
      "[1043]\ttraining's binary_error: 0.002475\tvalid_1's binary_error: 0.0831\n",
      "[1044]\ttraining's binary_error: 0.00245\tvalid_1's binary_error: 0.083\n",
      "[1045]\ttraining's binary_error: 0.00245\tvalid_1's binary_error: 0.0827\n",
      "[1046]\ttraining's binary_error: 0.00245\tvalid_1's binary_error: 0.0827\n",
      "[1047]\ttraining's binary_error: 0.00245\tvalid_1's binary_error: 0.0828\n",
      "[1048]\ttraining's binary_error: 0.00245\tvalid_1's binary_error: 0.0828\n",
      "[1049]\ttraining's binary_error: 0.0024\tvalid_1's binary_error: 0.0829\n",
      "[1050]\ttraining's binary_error: 0.0024\tvalid_1's binary_error: 0.0828\n",
      "[1051]\ttraining's binary_error: 0.002375\tvalid_1's binary_error: 0.0829\n",
      "[1052]\ttraining's binary_error: 0.0023\tvalid_1's binary_error: 0.0828\n",
      "[1053]\ttraining's binary_error: 0.002325\tvalid_1's binary_error: 0.0828\n",
      "[1054]\ttraining's binary_error: 0.002275\tvalid_1's binary_error: 0.0826\n",
      "[1055]\ttraining's binary_error: 0.00225\tvalid_1's binary_error: 0.0828\n",
      "[1056]\ttraining's binary_error: 0.00225\tvalid_1's binary_error: 0.0829\n",
      "[1057]\ttraining's binary_error: 0.0022\tvalid_1's binary_error: 0.0829\n",
      "[1058]\ttraining's binary_error: 0.002175\tvalid_1's binary_error: 0.0829\n",
      "[1059]\ttraining's binary_error: 0.002175\tvalid_1's binary_error: 0.0828\n",
      "[1060]\ttraining's binary_error: 0.002175\tvalid_1's binary_error: 0.0829\n",
      "[1061]\ttraining's binary_error: 0.00215\tvalid_1's binary_error: 0.0831\n",
      "[1062]\ttraining's binary_error: 0.00215\tvalid_1's binary_error: 0.0831\n",
      "[1063]\ttraining's binary_error: 0.0021\tvalid_1's binary_error: 0.0831\n",
      "[1064]\ttraining's binary_error: 0.00205\tvalid_1's binary_error: 0.0831\n",
      "[1065]\ttraining's binary_error: 0.00205\tvalid_1's binary_error: 0.0831\n",
      "[1066]\ttraining's binary_error: 0.00205\tvalid_1's binary_error: 0.0833\n",
      "[1067]\ttraining's binary_error: 0.002025\tvalid_1's binary_error: 0.0833\n",
      "[1068]\ttraining's binary_error: 0.002025\tvalid_1's binary_error: 0.0832\n",
      "[1069]\ttraining's binary_error: 0.002\tvalid_1's binary_error: 0.0832\n",
      "[1070]\ttraining's binary_error: 0.002\tvalid_1's binary_error: 0.0831\n",
      "[1071]\ttraining's binary_error: 0.002\tvalid_1's binary_error: 0.0834\n",
      "[1072]\ttraining's binary_error: 0.00195\tvalid_1's binary_error: 0.0837\n",
      "[1073]\ttraining's binary_error: 0.00195\tvalid_1's binary_error: 0.0835\n",
      "[1074]\ttraining's binary_error: 0.00195\tvalid_1's binary_error: 0.0835\n",
      "[1075]\ttraining's binary_error: 0.00195\tvalid_1's binary_error: 0.0833\n",
      "[1076]\ttraining's binary_error: 0.00195\tvalid_1's binary_error: 0.0834\n",
      "[1077]\ttraining's binary_error: 0.00195\tvalid_1's binary_error: 0.0835\n",
      "[1078]\ttraining's binary_error: 0.00195\tvalid_1's binary_error: 0.0835\n",
      "[1079]\ttraining's binary_error: 0.00195\tvalid_1's binary_error: 0.0836\n",
      "[1080]\ttraining's binary_error: 0.001925\tvalid_1's binary_error: 0.0835\n",
      "[1081]\ttraining's binary_error: 0.00185\tvalid_1's binary_error: 0.0834\n",
      "[1082]\ttraining's binary_error: 0.00185\tvalid_1's binary_error: 0.0834\n",
      "[1083]\ttraining's binary_error: 0.00185\tvalid_1's binary_error: 0.0835\n",
      "[1084]\ttraining's binary_error: 0.00185\tvalid_1's binary_error: 0.0835\n",
      "[1085]\ttraining's binary_error: 0.001875\tvalid_1's binary_error: 0.0836\n",
      "[1086]\ttraining's binary_error: 0.0019\tvalid_1's binary_error: 0.0836\n",
      "[1087]\ttraining's binary_error: 0.00185\tvalid_1's binary_error: 0.0835\n",
      "[1088]\ttraining's binary_error: 0.001825\tvalid_1's binary_error: 0.0835\n",
      "[1089]\ttraining's binary_error: 0.0018\tvalid_1's binary_error: 0.0834\n",
      "[1090]\ttraining's binary_error: 0.0018\tvalid_1's binary_error: 0.0834\n",
      "[1091]\ttraining's binary_error: 0.0018\tvalid_1's binary_error: 0.0834\n",
      "[1092]\ttraining's binary_error: 0.0018\tvalid_1's binary_error: 0.0833\n",
      "[1093]\ttraining's binary_error: 0.001775\tvalid_1's binary_error: 0.0833\n",
      "[1094]\ttraining's binary_error: 0.00175\tvalid_1's binary_error: 0.0833\n",
      "[1095]\ttraining's binary_error: 0.00175\tvalid_1's binary_error: 0.0833\n",
      "[1096]\ttraining's binary_error: 0.00175\tvalid_1's binary_error: 0.0834\n",
      "[1097]\ttraining's binary_error: 0.00175\tvalid_1's binary_error: 0.0836\n",
      "[1098]\ttraining's binary_error: 0.001725\tvalid_1's binary_error: 0.0836\n",
      "[1099]\ttraining's binary_error: 0.001725\tvalid_1's binary_error: 0.0836\n",
      "[1100]\ttraining's binary_error: 0.001725\tvalid_1's binary_error: 0.0836\n",
      "[1101]\ttraining's binary_error: 0.001725\tvalid_1's binary_error: 0.0836\n",
      "[1102]\ttraining's binary_error: 0.001725\tvalid_1's binary_error: 0.0836\n",
      "[1103]\ttraining's binary_error: 0.0017\tvalid_1's binary_error: 0.0836\n",
      "[1104]\ttraining's binary_error: 0.0017\tvalid_1's binary_error: 0.0837\n",
      "[1105]\ttraining's binary_error: 0.001675\tvalid_1's binary_error: 0.0837\n",
      "[1106]\ttraining's binary_error: 0.001675\tvalid_1's binary_error: 0.0834\n",
      "[1107]\ttraining's binary_error: 0.001675\tvalid_1's binary_error: 0.0834\n",
      "[1108]\ttraining's binary_error: 0.001675\tvalid_1's binary_error: 0.0834\n",
      "[1109]\ttraining's binary_error: 0.001675\tvalid_1's binary_error: 0.0834\n",
      "[1110]\ttraining's binary_error: 0.001675\tvalid_1's binary_error: 0.0833\n",
      "[1111]\ttraining's binary_error: 0.0016\tvalid_1's binary_error: 0.0833\n",
      "[1112]\ttraining's binary_error: 0.0016\tvalid_1's binary_error: 0.0832\n",
      "[1113]\ttraining's binary_error: 0.0016\tvalid_1's binary_error: 0.0834\n",
      "[1114]\ttraining's binary_error: 0.0016\tvalid_1's binary_error: 0.0835\n",
      "[1115]\ttraining's binary_error: 0.0016\tvalid_1's binary_error: 0.0834\n",
      "[1116]\ttraining's binary_error: 0.0016\tvalid_1's binary_error: 0.0834\n",
      "[1117]\ttraining's binary_error: 0.001575\tvalid_1's binary_error: 0.0835\n",
      "[1118]\ttraining's binary_error: 0.001575\tvalid_1's binary_error: 0.0835\n",
      "[1119]\ttraining's binary_error: 0.0016\tvalid_1's binary_error: 0.0834\n",
      "[1120]\ttraining's binary_error: 0.0016\tvalid_1's binary_error: 0.0834\n",
      "[1121]\ttraining's binary_error: 0.001575\tvalid_1's binary_error: 0.0834\n",
      "[1122]\ttraining's binary_error: 0.00155\tvalid_1's binary_error: 0.0833\n",
      "[1123]\ttraining's binary_error: 0.00155\tvalid_1's binary_error: 0.0832\n",
      "[1124]\ttraining's binary_error: 0.00155\tvalid_1's binary_error: 0.0831\n",
      "[1125]\ttraining's binary_error: 0.0015\tvalid_1's binary_error: 0.0828\n",
      "[1126]\ttraining's binary_error: 0.00145\tvalid_1's binary_error: 0.0829\n",
      "[1127]\ttraining's binary_error: 0.00145\tvalid_1's binary_error: 0.0828\n",
      "[1128]\ttraining's binary_error: 0.00145\tvalid_1's binary_error: 0.0829\n",
      "[1129]\ttraining's binary_error: 0.00145\tvalid_1's binary_error: 0.0829\n",
      "[1130]\ttraining's binary_error: 0.001425\tvalid_1's binary_error: 0.0831\n",
      "[1131]\ttraining's binary_error: 0.001425\tvalid_1's binary_error: 0.0832\n",
      "[1132]\ttraining's binary_error: 0.00135\tvalid_1's binary_error: 0.0833\n",
      "[1133]\ttraining's binary_error: 0.001325\tvalid_1's binary_error: 0.0832\n",
      "[1134]\ttraining's binary_error: 0.001325\tvalid_1's binary_error: 0.083\n",
      "[1135]\ttraining's binary_error: 0.0013\tvalid_1's binary_error: 0.0829\n",
      "[1136]\ttraining's binary_error: 0.0013\tvalid_1's binary_error: 0.0829\n",
      "[1137]\ttraining's binary_error: 0.001275\tvalid_1's binary_error: 0.0828\n",
      "[1138]\ttraining's binary_error: 0.00125\tvalid_1's binary_error: 0.0828\n",
      "[1139]\ttraining's binary_error: 0.00125\tvalid_1's binary_error: 0.0829\n",
      "[1140]\ttraining's binary_error: 0.00125\tvalid_1's binary_error: 0.083\n",
      "[1141]\ttraining's binary_error: 0.00125\tvalid_1's binary_error: 0.083\n",
      "[1142]\ttraining's binary_error: 0.001225\tvalid_1's binary_error: 0.083\n",
      "[1143]\ttraining's binary_error: 0.001225\tvalid_1's binary_error: 0.083\n",
      "[1144]\ttraining's binary_error: 0.001225\tvalid_1's binary_error: 0.0829\n",
      "[1145]\ttraining's binary_error: 0.001225\tvalid_1's binary_error: 0.083\n",
      "[1146]\ttraining's binary_error: 0.001225\tvalid_1's binary_error: 0.0832\n",
      "[1147]\ttraining's binary_error: 0.001225\tvalid_1's binary_error: 0.0831\n",
      "[1148]\ttraining's binary_error: 0.001225\tvalid_1's binary_error: 0.0831\n",
      "[1149]\ttraining's binary_error: 0.001225\tvalid_1's binary_error: 0.0831\n",
      "[1150]\ttraining's binary_error: 0.0012\tvalid_1's binary_error: 0.0832\n",
      "[1151]\ttraining's binary_error: 0.0012\tvalid_1's binary_error: 0.0832\n",
      "[1152]\ttraining's binary_error: 0.0012\tvalid_1's binary_error: 0.0834\n",
      "[1153]\ttraining's binary_error: 0.001175\tvalid_1's binary_error: 0.0832\n",
      "[1154]\ttraining's binary_error: 0.001175\tvalid_1's binary_error: 0.0831\n",
      "[1155]\ttraining's binary_error: 0.001175\tvalid_1's binary_error: 0.0833\n",
      "[1156]\ttraining's binary_error: 0.001175\tvalid_1's binary_error: 0.0833\n",
      "[1157]\ttraining's binary_error: 0.00115\tvalid_1's binary_error: 0.0833\n",
      "[1158]\ttraining's binary_error: 0.001125\tvalid_1's binary_error: 0.0833\n",
      "[1159]\ttraining's binary_error: 0.001125\tvalid_1's binary_error: 0.0833\n",
      "[1160]\ttraining's binary_error: 0.001125\tvalid_1's binary_error: 0.0834\n",
      "[1161]\ttraining's binary_error: 0.001125\tvalid_1's binary_error: 0.0836\n",
      "[1162]\ttraining's binary_error: 0.001125\tvalid_1's binary_error: 0.0836\n",
      "[1163]\ttraining's binary_error: 0.001125\tvalid_1's binary_error: 0.0836\n",
      "[1164]\ttraining's binary_error: 0.0011\tvalid_1's binary_error: 0.0836\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[1165]\ttraining's binary_error: 0.0011\tvalid_1's binary_error: 0.0835\n",
      "[1166]\ttraining's binary_error: 0.0011\tvalid_1's binary_error: 0.0834\n",
      "[1167]\ttraining's binary_error: 0.0011\tvalid_1's binary_error: 0.0833\n",
      "[1168]\ttraining's binary_error: 0.0011\tvalid_1's binary_error: 0.0833\n",
      "[1169]\ttraining's binary_error: 0.001075\tvalid_1's binary_error: 0.0834\n",
      "[1170]\ttraining's binary_error: 0.001075\tvalid_1's binary_error: 0.0834\n",
      "[1171]\ttraining's binary_error: 0.001075\tvalid_1's binary_error: 0.0834\n",
      "[1172]\ttraining's binary_error: 0.001075\tvalid_1's binary_error: 0.0833\n",
      "[1173]\ttraining's binary_error: 0.001075\tvalid_1's binary_error: 0.0833\n",
      "[1174]\ttraining's binary_error: 0.001075\tvalid_1's binary_error: 0.0833\n",
      "[1175]\ttraining's binary_error: 0.001075\tvalid_1's binary_error: 0.0834\n",
      "[1176]\ttraining's binary_error: 0.001075\tvalid_1's binary_error: 0.0833\n",
      "[1177]\ttraining's binary_error: 0.0011\tvalid_1's binary_error: 0.0833\n",
      "[1178]\ttraining's binary_error: 0.0011\tvalid_1's binary_error: 0.0833\n",
      "[1179]\ttraining's binary_error: 0.001075\tvalid_1's binary_error: 0.0833\n",
      "[1180]\ttraining's binary_error: 0.00105\tvalid_1's binary_error: 0.0832\n",
      "[1181]\ttraining's binary_error: 0.001025\tvalid_1's binary_error: 0.0831\n",
      "[1182]\ttraining's binary_error: 0.001025\tvalid_1's binary_error: 0.0832\n",
      "[1183]\ttraining's binary_error: 0.001025\tvalid_1's binary_error: 0.0833\n",
      "[1184]\ttraining's binary_error: 0.00105\tvalid_1's binary_error: 0.0833\n",
      "[1185]\ttraining's binary_error: 0.001\tvalid_1's binary_error: 0.0833\n",
      "[1186]\ttraining's binary_error: 0.001\tvalid_1's binary_error: 0.0834\n",
      "[1187]\ttraining's binary_error: 0.001\tvalid_1's binary_error: 0.0834\n",
      "[1188]\ttraining's binary_error: 0.001\tvalid_1's binary_error: 0.0833\n",
      "[1189]\ttraining's binary_error: 0.001\tvalid_1's binary_error: 0.0832\n",
      "[1190]\ttraining's binary_error: 0.001\tvalid_1's binary_error: 0.0832\n",
      "[1191]\ttraining's binary_error: 0.000975\tvalid_1's binary_error: 0.0832\n",
      "[1192]\ttraining's binary_error: 0.000975\tvalid_1's binary_error: 0.0832\n",
      "[1193]\ttraining's binary_error: 0.00095\tvalid_1's binary_error: 0.0832\n",
      "[1194]\ttraining's binary_error: 0.00095\tvalid_1's binary_error: 0.0832\n",
      "[1195]\ttraining's binary_error: 0.000925\tvalid_1's binary_error: 0.0831\n",
      "[1196]\ttraining's binary_error: 0.000925\tvalid_1's binary_error: 0.0831\n",
      "[1197]\ttraining's binary_error: 0.0009\tvalid_1's binary_error: 0.0828\n",
      "[1198]\ttraining's binary_error: 0.0009\tvalid_1's binary_error: 0.0828\n",
      "[1199]\ttraining's binary_error: 0.0009\tvalid_1's binary_error: 0.083\n",
      "[1200]\ttraining's binary_error: 0.0009\tvalid_1's binary_error: 0.0831\n",
      "[1201]\ttraining's binary_error: 0.0009\tvalid_1's binary_error: 0.0831\n",
      "[1202]\ttraining's binary_error: 0.0009\tvalid_1's binary_error: 0.0831\n",
      "[1203]\ttraining's binary_error: 0.000875\tvalid_1's binary_error: 0.0831\n",
      "[1204]\ttraining's binary_error: 0.000875\tvalid_1's binary_error: 0.0832\n",
      "[1205]\ttraining's binary_error: 0.00085\tvalid_1's binary_error: 0.0832\n",
      "[1206]\ttraining's binary_error: 0.00085\tvalid_1's binary_error: 0.0832\n",
      "[1207]\ttraining's binary_error: 0.00085\tvalid_1's binary_error: 0.0832\n",
      "[1208]\ttraining's binary_error: 0.00085\tvalid_1's binary_error: 0.0833\n",
      "[1209]\ttraining's binary_error: 0.00085\tvalid_1's binary_error: 0.0832\n",
      "[1210]\ttraining's binary_error: 0.00085\tvalid_1's binary_error: 0.0831\n",
      "[1211]\ttraining's binary_error: 0.00085\tvalid_1's binary_error: 0.0831\n",
      "[1212]\ttraining's binary_error: 0.00085\tvalid_1's binary_error: 0.0831\n",
      "[1213]\ttraining's binary_error: 0.00085\tvalid_1's binary_error: 0.0831\n",
      "[1214]\ttraining's binary_error: 0.00085\tvalid_1's binary_error: 0.0832\n",
      "[1215]\ttraining's binary_error: 0.00085\tvalid_1's binary_error: 0.0833\n",
      "[1216]\ttraining's binary_error: 0.00085\tvalid_1's binary_error: 0.0834\n",
      "[1217]\ttraining's binary_error: 0.00085\tvalid_1's binary_error: 0.0832\n",
      "[1218]\ttraining's binary_error: 0.00085\tvalid_1's binary_error: 0.0832\n",
      "[1219]\ttraining's binary_error: 0.00085\tvalid_1's binary_error: 0.0834\n",
      "[1220]\ttraining's binary_error: 0.00085\tvalid_1's binary_error: 0.0834\n",
      "[1221]\ttraining's binary_error: 0.000875\tvalid_1's binary_error: 0.0832\n",
      "[1222]\ttraining's binary_error: 0.000875\tvalid_1's binary_error: 0.0833\n",
      "[1223]\ttraining's binary_error: 0.000875\tvalid_1's binary_error: 0.0833\n",
      "[1224]\ttraining's binary_error: 0.000875\tvalid_1's binary_error: 0.0833\n",
      "[1225]\ttraining's binary_error: 0.000875\tvalid_1's binary_error: 0.0833\n",
      "[1226]\ttraining's binary_error: 0.00085\tvalid_1's binary_error: 0.0833\n",
      "[1227]\ttraining's binary_error: 0.00085\tvalid_1's binary_error: 0.0834\n",
      "[1228]\ttraining's binary_error: 0.00085\tvalid_1's binary_error: 0.0833\n",
      "[1229]\ttraining's binary_error: 0.00085\tvalid_1's binary_error: 0.0834\n",
      "[1230]\ttraining's binary_error: 0.00085\tvalid_1's binary_error: 0.0833\n",
      "[1231]\ttraining's binary_error: 0.00085\tvalid_1's binary_error: 0.0833\n",
      "[1232]\ttraining's binary_error: 0.000825\tvalid_1's binary_error: 0.0834\n",
      "[1233]\ttraining's binary_error: 0.000775\tvalid_1's binary_error: 0.0834\n",
      "[1234]\ttraining's binary_error: 0.000775\tvalid_1's binary_error: 0.0835\n",
      "[1235]\ttraining's binary_error: 0.000775\tvalid_1's binary_error: 0.0835\n",
      "[1236]\ttraining's binary_error: 0.000775\tvalid_1's binary_error: 0.0835\n",
      "[1237]\ttraining's binary_error: 0.000775\tvalid_1's binary_error: 0.0835\n",
      "[1238]\ttraining's binary_error: 0.00075\tvalid_1's binary_error: 0.0835\n",
      "[1239]\ttraining's binary_error: 0.00075\tvalid_1's binary_error: 0.0835\n",
      "[1240]\ttraining's binary_error: 0.000725\tvalid_1's binary_error: 0.0835\n",
      "[1241]\ttraining's binary_error: 0.0007\tvalid_1's binary_error: 0.0835\n",
      "[1242]\ttraining's binary_error: 0.000675\tvalid_1's binary_error: 0.0835\n",
      "[1243]\ttraining's binary_error: 0.000675\tvalid_1's binary_error: 0.0835\n",
      "[1244]\ttraining's binary_error: 0.000675\tvalid_1's binary_error: 0.0833\n",
      "[1245]\ttraining's binary_error: 0.000675\tvalid_1's binary_error: 0.0834\n",
      "[1246]\ttraining's binary_error: 0.000675\tvalid_1's binary_error: 0.0837\n",
      "[1247]\ttraining's binary_error: 0.000675\tvalid_1's binary_error: 0.0838\n",
      "[1248]\ttraining's binary_error: 0.000675\tvalid_1's binary_error: 0.0838\n",
      "[1249]\ttraining's binary_error: 0.000675\tvalid_1's binary_error: 0.0835\n",
      "[1250]\ttraining's binary_error: 0.000675\tvalid_1's binary_error: 0.0834\n",
      "[1251]\ttraining's binary_error: 0.000675\tvalid_1's binary_error: 0.0834\n",
      "[1252]\ttraining's binary_error: 0.000675\tvalid_1's binary_error: 0.0834\n",
      "[1253]\ttraining's binary_error: 0.000675\tvalid_1's binary_error: 0.0833\n",
      "[1254]\ttraining's binary_error: 0.000675\tvalid_1's binary_error: 0.0834\n",
      "[1255]\ttraining's binary_error: 0.000675\tvalid_1's binary_error: 0.0833\n",
      "[1256]\ttraining's binary_error: 0.00065\tvalid_1's binary_error: 0.0833\n",
      "[1257]\ttraining's binary_error: 0.00065\tvalid_1's binary_error: 0.0833\n",
      "[1258]\ttraining's binary_error: 0.00065\tvalid_1's binary_error: 0.0832\n",
      "[1259]\ttraining's binary_error: 0.00065\tvalid_1's binary_error: 0.0831\n",
      "[1260]\ttraining's binary_error: 0.00065\tvalid_1's binary_error: 0.0831\n",
      "[1261]\ttraining's binary_error: 0.00065\tvalid_1's binary_error: 0.0831\n",
      "[1262]\ttraining's binary_error: 0.000625\tvalid_1's binary_error: 0.0831\n",
      "[1263]\ttraining's binary_error: 0.000625\tvalid_1's binary_error: 0.0831\n",
      "[1264]\ttraining's binary_error: 0.0006\tvalid_1's binary_error: 0.0829\n",
      "[1265]\ttraining's binary_error: 0.0006\tvalid_1's binary_error: 0.0829\n",
      "[1266]\ttraining's binary_error: 0.0006\tvalid_1's binary_error: 0.0829\n",
      "[1267]\ttraining's binary_error: 0.0006\tvalid_1's binary_error: 0.0828\n",
      "[1268]\ttraining's binary_error: 0.0006\tvalid_1's binary_error: 0.0829\n",
      "[1269]\ttraining's binary_error: 0.0006\tvalid_1's binary_error: 0.083\n",
      "[1270]\ttraining's binary_error: 0.0006\tvalid_1's binary_error: 0.0831\n",
      "[1271]\ttraining's binary_error: 0.0006\tvalid_1's binary_error: 0.0829\n",
      "[1272]\ttraining's binary_error: 0.0006\tvalid_1's binary_error: 0.0828\n",
      "[1273]\ttraining's binary_error: 0.0006\tvalid_1's binary_error: 0.0828\n",
      "[1274]\ttraining's binary_error: 0.0006\tvalid_1's binary_error: 0.0828\n",
      "[1275]\ttraining's binary_error: 0.000575\tvalid_1's binary_error: 0.0829\n",
      "[1276]\ttraining's binary_error: 0.000575\tvalid_1's binary_error: 0.083\n",
      "[1277]\ttraining's binary_error: 0.000575\tvalid_1's binary_error: 0.083\n",
      "[1278]\ttraining's binary_error: 0.000575\tvalid_1's binary_error: 0.083\n",
      "[1279]\ttraining's binary_error: 0.00055\tvalid_1's binary_error: 0.083\n",
      "[1280]\ttraining's binary_error: 0.00055\tvalid_1's binary_error: 0.0832\n",
      "[1281]\ttraining's binary_error: 0.00055\tvalid_1's binary_error: 0.0832\n",
      "[1282]\ttraining's binary_error: 0.00055\tvalid_1's binary_error: 0.0832\n",
      "[1283]\ttraining's binary_error: 0.00055\tvalid_1's binary_error: 0.0832\n",
      "[1284]\ttraining's binary_error: 0.00055\tvalid_1's binary_error: 0.0833\n",
      "[1285]\ttraining's binary_error: 0.00055\tvalid_1's binary_error: 0.0833\n",
      "[1286]\ttraining's binary_error: 0.00055\tvalid_1's binary_error: 0.0833\n",
      "[1287]\ttraining's binary_error: 0.000575\tvalid_1's binary_error: 0.0834\n",
      "[1288]\ttraining's binary_error: 0.000575\tvalid_1's binary_error: 0.0835\n",
      "[1289]\ttraining's binary_error: 0.000575\tvalid_1's binary_error: 0.0834\n",
      "[1290]\ttraining's binary_error: 0.000575\tvalid_1's binary_error: 0.0833\n",
      "[1291]\ttraining's binary_error: 0.00055\tvalid_1's binary_error: 0.0833\n",
      "[1292]\ttraining's binary_error: 0.00055\tvalid_1's binary_error: 0.0833\n",
      "[1293]\ttraining's binary_error: 0.00055\tvalid_1's binary_error: 0.0833\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[1294]\ttraining's binary_error: 0.00055\tvalid_1's binary_error: 0.0833\n",
      "[1295]\ttraining's binary_error: 0.00055\tvalid_1's binary_error: 0.0833\n",
      "[1296]\ttraining's binary_error: 0.00055\tvalid_1's binary_error: 0.0833\n",
      "[1297]\ttraining's binary_error: 0.00055\tvalid_1's binary_error: 0.0833\n",
      "[1298]\ttraining's binary_error: 0.00055\tvalid_1's binary_error: 0.0833\n",
      "[1299]\ttraining's binary_error: 0.00055\tvalid_1's binary_error: 0.0834\n",
      "[1300]\ttraining's binary_error: 0.00055\tvalid_1's binary_error: 0.0837\n",
      "[1301]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0837\n",
      "[1302]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0837\n",
      "[1303]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0837\n",
      "[1304]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0839\n",
      "[1305]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0838\n",
      "[1306]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0838\n",
      "[1307]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0838\n",
      "[1308]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0838\n",
      "[1309]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0837\n",
      "[1310]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0838\n",
      "[1311]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0837\n",
      "[1312]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0837\n",
      "[1313]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0838\n",
      "[1314]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0838\n",
      "[1315]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.084\n",
      "[1316]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.084\n",
      "[1317]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.084\n",
      "[1318]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0841\n",
      "[1319]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0841\n",
      "[1320]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.084\n",
      "[1321]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0841\n",
      "[1322]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.084\n",
      "[1323]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0838\n",
      "[1324]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0839\n",
      "[1325]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0839\n",
      "[1326]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0838\n",
      "[1327]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0837\n",
      "[1328]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0837\n",
      "[1329]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0837\n",
      "[1330]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0838\n",
      "[1331]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0838\n",
      "[1332]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0838\n",
      "[1333]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0838\n",
      "[1334]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0838\n",
      "[1335]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0838\n",
      "[1336]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0838\n",
      "[1337]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0838\n",
      "[1338]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0838\n",
      "[1339]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0837\n",
      "[1340]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0836\n",
      "[1341]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0836\n",
      "[1342]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0835\n",
      "[1343]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0835\n",
      "[1344]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0835\n",
      "[1345]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0834\n",
      "[1346]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0834\n",
      "[1347]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0835\n",
      "[1348]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0835\n",
      "[1349]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0836\n",
      "[1350]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0836\n",
      "[1351]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0836\n",
      "[1352]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0836\n",
      "[1353]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0836\n",
      "[1354]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0839\n",
      "[1355]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0838\n",
      "[1356]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0839\n",
      "[1357]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0838\n",
      "[1358]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0839\n",
      "[1359]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0839\n",
      "[1360]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0839\n",
      "[1361]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0838\n",
      "[1362]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0838\n",
      "[1363]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0837\n",
      "[1364]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0839\n",
      "[1365]\ttraining's binary_error: 0.00055\tvalid_1's binary_error: 0.0839\n",
      "[1366]\ttraining's binary_error: 0.00055\tvalid_1's binary_error: 0.0838\n",
      "[1367]\ttraining's binary_error: 0.00055\tvalid_1's binary_error: 0.0838\n",
      "[1368]\ttraining's binary_error: 0.00055\tvalid_1's binary_error: 0.0837\n",
      "[1369]\ttraining's binary_error: 0.00055\tvalid_1's binary_error: 0.0835\n",
      "[1370]\ttraining's binary_error: 0.00055\tvalid_1's binary_error: 0.0836\n",
      "[1371]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0835\n",
      "[1372]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0836\n",
      "[1373]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0837\n",
      "[1374]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0837\n",
      "[1375]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0836\n",
      "[1376]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0836\n",
      "[1377]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0834\n",
      "[1378]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0834\n",
      "[1379]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0834\n",
      "[1380]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0834\n",
      "[1381]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0835\n",
      "[1382]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0835\n",
      "[1383]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0836\n",
      "[1384]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0838\n",
      "[1385]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0837\n",
      "[1386]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0837\n",
      "[1387]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0839\n",
      "[1388]\ttraining's binary_error: 0.000525\tvalid_1's binary_error: 0.0837\n",
      "[1389]\ttraining's binary_error: 0.0005\tvalid_1's binary_error: 0.0836\n",
      "[1390]\ttraining's binary_error: 0.000475\tvalid_1's binary_error: 0.0834\n",
      "[1391]\ttraining's binary_error: 0.000475\tvalid_1's binary_error: 0.0833\n",
      "[1392]\ttraining's binary_error: 0.000475\tvalid_1's binary_error: 0.0832\n",
      "[1393]\ttraining's binary_error: 0.000475\tvalid_1's binary_error: 0.0833\n",
      "[1394]\ttraining's binary_error: 0.000475\tvalid_1's binary_error: 0.0833\n",
      "[1395]\ttraining's binary_error: 0.000475\tvalid_1's binary_error: 0.0834\n",
      "[1396]\ttraining's binary_error: 0.000475\tvalid_1's binary_error: 0.0836\n",
      "[1397]\ttraining's binary_error: 0.00045\tvalid_1's binary_error: 0.0837\n",
      "[1398]\ttraining's binary_error: 0.00045\tvalid_1's binary_error: 0.0837\n",
      "[1399]\ttraining's binary_error: 0.00045\tvalid_1's binary_error: 0.0836\n",
      "[1400]\ttraining's binary_error: 0.00045\tvalid_1's binary_error: 0.0836\n",
      "[1401]\ttraining's binary_error: 0.00045\tvalid_1's binary_error: 0.0835\n",
      "[1402]\ttraining's binary_error: 0.00045\tvalid_1's binary_error: 0.0835\n",
      "[1403]\ttraining's binary_error: 0.000425\tvalid_1's binary_error: 0.0835\n",
      "[1404]\ttraining's binary_error: 0.000425\tvalid_1's binary_error: 0.0836\n",
      "[1405]\ttraining's binary_error: 0.000425\tvalid_1's binary_error: 0.0836\n",
      "[1406]\ttraining's binary_error: 0.000425\tvalid_1's binary_error: 0.0836\n",
      "[1407]\ttraining's binary_error: 0.000425\tvalid_1's binary_error: 0.0836\n",
      "[1408]\ttraining's binary_error: 0.000425\tvalid_1's binary_error: 0.0836\n",
      "[1409]\ttraining's binary_error: 0.000425\tvalid_1's binary_error: 0.0836\n",
      "[1410]\ttraining's binary_error: 0.000425\tvalid_1's binary_error: 0.0836\n",
      "[1411]\ttraining's binary_error: 0.0004\tvalid_1's binary_error: 0.0835\n",
      "[1412]\ttraining's binary_error: 0.0004\tvalid_1's binary_error: 0.0835\n",
      "[1413]\ttraining's binary_error: 0.0004\tvalid_1's binary_error: 0.0835\n",
      "[1414]\ttraining's binary_error: 0.0004\tvalid_1's binary_error: 0.0836\n",
      "[1415]\ttraining's binary_error: 0.0004\tvalid_1's binary_error: 0.0837\n",
      "[1416]\ttraining's binary_error: 0.0004\tvalid_1's binary_error: 0.0837\n",
      "[1417]\ttraining's binary_error: 0.0004\tvalid_1's binary_error: 0.0836\n",
      "[1418]\ttraining's binary_error: 0.0004\tvalid_1's binary_error: 0.0837\n",
      "[1419]\ttraining's binary_error: 0.0004\tvalid_1's binary_error: 0.0837\n",
      "[1420]\ttraining's binary_error: 0.0004\tvalid_1's binary_error: 0.0836\n",
      "[1421]\ttraining's binary_error: 0.0004\tvalid_1's binary_error: 0.0835\n",
      "[1422]\ttraining's binary_error: 0.0004\tvalid_1's binary_error: 0.0834\n",
      "[1423]\ttraining's binary_error: 0.0004\tvalid_1's binary_error: 0.0834\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[1424]\ttraining's binary_error: 0.0004\tvalid_1's binary_error: 0.0836\n",
      "[1425]\ttraining's binary_error: 0.0004\tvalid_1's binary_error: 0.0836\n",
      "[1426]\ttraining's binary_error: 0.0004\tvalid_1's binary_error: 0.0837\n",
      "[1427]\ttraining's binary_error: 0.0004\tvalid_1's binary_error: 0.0838\n",
      "[1428]\ttraining's binary_error: 0.000375\tvalid_1's binary_error: 0.0838\n",
      "[1429]\ttraining's binary_error: 0.000375\tvalid_1's binary_error: 0.0839\n",
      "[1430]\ttraining's binary_error: 0.000375\tvalid_1's binary_error: 0.0839\n",
      "[1431]\ttraining's binary_error: 0.000375\tvalid_1's binary_error: 0.0839\n",
      "[1432]\ttraining's binary_error: 0.000375\tvalid_1's binary_error: 0.0839\n",
      "[1433]\ttraining's binary_error: 0.000375\tvalid_1's binary_error: 0.0839\n",
      "[1434]\ttraining's binary_error: 0.000375\tvalid_1's binary_error: 0.0838\n",
      "[1435]\ttraining's binary_error: 0.000375\tvalid_1's binary_error: 0.0838\n",
      "[1436]\ttraining's binary_error: 0.000375\tvalid_1's binary_error: 0.0838\n",
      "[1437]\ttraining's binary_error: 0.000375\tvalid_1's binary_error: 0.0838\n",
      "[1438]\ttraining's binary_error: 0.000375\tvalid_1's binary_error: 0.0837\n",
      "[1439]\ttraining's binary_error: 0.000375\tvalid_1's binary_error: 0.0837\n",
      "[1440]\ttraining's binary_error: 0.000375\tvalid_1's binary_error: 0.0838\n",
      "[1441]\ttraining's binary_error: 0.000375\tvalid_1's binary_error: 0.0838\n",
      "[1442]\ttraining's binary_error: 0.00035\tvalid_1's binary_error: 0.0836\n",
      "[1443]\ttraining's binary_error: 0.00035\tvalid_1's binary_error: 0.0834\n",
      "[1444]\ttraining's binary_error: 0.00035\tvalid_1's binary_error: 0.0835\n",
      "[1445]\ttraining's binary_error: 0.00035\tvalid_1's binary_error: 0.0834\n",
      "[1446]\ttraining's binary_error: 0.00035\tvalid_1's binary_error: 0.0835\n",
      "[1447]\ttraining's binary_error: 0.00035\tvalid_1's binary_error: 0.0836\n",
      "[1448]\ttraining's binary_error: 0.00035\tvalid_1's binary_error: 0.0837\n",
      "[1449]\ttraining's binary_error: 0.00035\tvalid_1's binary_error: 0.0838\n",
      "[1450]\ttraining's binary_error: 0.00035\tvalid_1's binary_error: 0.0837\n",
      "[1451]\ttraining's binary_error: 0.000325\tvalid_1's binary_error: 0.0837\n",
      "[1452]\ttraining's binary_error: 0.000325\tvalid_1's binary_error: 0.0836\n",
      "[1453]\ttraining's binary_error: 0.000325\tvalid_1's binary_error: 0.0836\n",
      "[1454]\ttraining's binary_error: 0.000325\tvalid_1's binary_error: 0.0835\n",
      "[1455]\ttraining's binary_error: 0.000325\tvalid_1's binary_error: 0.0834\n",
      "[1456]\ttraining's binary_error: 0.000325\tvalid_1's binary_error: 0.0835\n",
      "[1457]\ttraining's binary_error: 0.000325\tvalid_1's binary_error: 0.0834\n",
      "[1458]\ttraining's binary_error: 0.000325\tvalid_1's binary_error: 0.0834\n",
      "[1459]\ttraining's binary_error: 0.000325\tvalid_1's binary_error: 0.0833\n",
      "[1460]\ttraining's binary_error: 0.000325\tvalid_1's binary_error: 0.0833\n",
      "[1461]\ttraining's binary_error: 0.000325\tvalid_1's binary_error: 0.0833\n",
      "[1462]\ttraining's binary_error: 0.000325\tvalid_1's binary_error: 0.0833\n",
      "[1463]\ttraining's binary_error: 0.000325\tvalid_1's binary_error: 0.0832\n",
      "[1464]\ttraining's binary_error: 0.000325\tvalid_1's binary_error: 0.0833\n",
      "[1465]\ttraining's binary_error: 0.000325\tvalid_1's binary_error: 0.0833\n",
      "[1466]\ttraining's binary_error: 0.000325\tvalid_1's binary_error: 0.0834\n",
      "[1467]\ttraining's binary_error: 0.000325\tvalid_1's binary_error: 0.0834\n",
      "[1468]\ttraining's binary_error: 0.000325\tvalid_1's binary_error: 0.0834\n",
      "[1469]\ttraining's binary_error: 0.000325\tvalid_1's binary_error: 0.0834\n",
      "[1470]\ttraining's binary_error: 0.000325\tvalid_1's binary_error: 0.0834\n",
      "[1471]\ttraining's binary_error: 0.000325\tvalid_1's binary_error: 0.0832\n",
      "[1472]\ttraining's binary_error: 0.0003\tvalid_1's binary_error: 0.0831\n",
      "[1473]\ttraining's binary_error: 0.0003\tvalid_1's binary_error: 0.0831\n",
      "[1474]\ttraining's binary_error: 0.000325\tvalid_1's binary_error: 0.0831\n",
      "[1475]\ttraining's binary_error: 0.000325\tvalid_1's binary_error: 0.0832\n",
      "[1476]\ttraining's binary_error: 0.000325\tvalid_1's binary_error: 0.0832\n",
      "[1477]\ttraining's binary_error: 0.0003\tvalid_1's binary_error: 0.0832\n",
      "[1478]\ttraining's binary_error: 0.0003\tvalid_1's binary_error: 0.0832\n",
      "[1479]\ttraining's binary_error: 0.000325\tvalid_1's binary_error: 0.0832\n",
      "[1480]\ttraining's binary_error: 0.000325\tvalid_1's binary_error: 0.0832\n",
      "[1481]\ttraining's binary_error: 0.000275\tvalid_1's binary_error: 0.0834\n",
      "[1482]\ttraining's binary_error: 0.000275\tvalid_1's binary_error: 0.0834\n",
      "[1483]\ttraining's binary_error: 0.000275\tvalid_1's binary_error: 0.0834\n",
      "[1484]\ttraining's binary_error: 0.000275\tvalid_1's binary_error: 0.0834\n",
      "[1485]\ttraining's binary_error: 0.000275\tvalid_1's binary_error: 0.0835\n",
      "[1486]\ttraining's binary_error: 0.000275\tvalid_1's binary_error: 0.0834\n",
      "[1487]\ttraining's binary_error: 0.0003\tvalid_1's binary_error: 0.0835\n",
      "[1488]\ttraining's binary_error: 0.0003\tvalid_1's binary_error: 0.0834\n",
      "[1489]\ttraining's binary_error: 0.0003\tvalid_1's binary_error: 0.0834\n",
      "[1490]\ttraining's binary_error: 0.0003\tvalid_1's binary_error: 0.0834\n",
      "[1491]\ttraining's binary_error: 0.0003\tvalid_1's binary_error: 0.0834\n",
      "[1492]\ttraining's binary_error: 0.0003\tvalid_1's binary_error: 0.0834\n",
      "[1493]\ttraining's binary_error: 0.0003\tvalid_1's binary_error: 0.0834\n",
      "[1494]\ttraining's binary_error: 0.0003\tvalid_1's binary_error: 0.0834\n",
      "[1495]\ttraining's binary_error: 0.0003\tvalid_1's binary_error: 0.0834\n",
      "[1496]\ttraining's binary_error: 0.0003\tvalid_1's binary_error: 0.0832\n",
      "[1497]\ttraining's binary_error: 0.000225\tvalid_1's binary_error: 0.0831\n",
      "[1498]\ttraining's binary_error: 0.000225\tvalid_1's binary_error: 0.0832\n",
      "[1499]\ttraining's binary_error: 0.00025\tvalid_1's binary_error: 0.0833\n",
      "[1500]\ttraining's binary_error: 0.000225\tvalid_1's binary_error: 0.0833\n",
      "[1501]\ttraining's binary_error: 0.00025\tvalid_1's binary_error: 0.0833\n",
      "[1502]\ttraining's binary_error: 0.000225\tvalid_1's binary_error: 0.0835\n",
      "[1503]\ttraining's binary_error: 0.000225\tvalid_1's binary_error: 0.0835\n",
      "[1504]\ttraining's binary_error: 0.000225\tvalid_1's binary_error: 0.0835\n",
      "[1505]\ttraining's binary_error: 0.000225\tvalid_1's binary_error: 0.0835\n",
      "[1506]\ttraining's binary_error: 0.000225\tvalid_1's binary_error: 0.0836\n",
      "[1507]\ttraining's binary_error: 0.000225\tvalid_1's binary_error: 0.0836\n",
      "[1508]\ttraining's binary_error: 0.000225\tvalid_1's binary_error: 0.0836\n",
      "[1509]\ttraining's binary_error: 0.000225\tvalid_1's binary_error: 0.0835\n",
      "[1510]\ttraining's binary_error: 0.000225\tvalid_1's binary_error: 0.0836\n",
      "[1511]\ttraining's binary_error: 0.000225\tvalid_1's binary_error: 0.0836\n",
      "[1512]\ttraining's binary_error: 0.000225\tvalid_1's binary_error: 0.0837\n",
      "[1513]\ttraining's binary_error: 0.000225\tvalid_1's binary_error: 0.0836\n",
      "[1514]\ttraining's binary_error: 0.000225\tvalid_1's binary_error: 0.0836\n",
      "[1515]\ttraining's binary_error: 0.000225\tvalid_1's binary_error: 0.0837\n",
      "[1516]\ttraining's binary_error: 0.000225\tvalid_1's binary_error: 0.0836\n",
      "[1517]\ttraining's binary_error: 0.000225\tvalid_1's binary_error: 0.0836\n",
      "[1518]\ttraining's binary_error: 0.000225\tvalid_1's binary_error: 0.0836\n",
      "[1519]\ttraining's binary_error: 0.000225\tvalid_1's binary_error: 0.0836\n",
      "[1520]\ttraining's binary_error: 0.0002\tvalid_1's binary_error: 0.0836\n",
      "[1521]\ttraining's binary_error: 0.0002\tvalid_1's binary_error: 0.0835\n",
      "[1522]\ttraining's binary_error: 0.0002\tvalid_1's binary_error: 0.0835\n",
      "[1523]\ttraining's binary_error: 0.000225\tvalid_1's binary_error: 0.0836\n",
      "[1524]\ttraining's binary_error: 0.0002\tvalid_1's binary_error: 0.0834\n",
      "[1525]\ttraining's binary_error: 0.0002\tvalid_1's binary_error: 0.0834\n",
      "[1526]\ttraining's binary_error: 0.0002\tvalid_1's binary_error: 0.0834\n",
      "[1527]\ttraining's binary_error: 0.000225\tvalid_1's binary_error: 0.0834\n",
      "[1528]\ttraining's binary_error: 0.0002\tvalid_1's binary_error: 0.0834\n",
      "[1529]\ttraining's binary_error: 0.0002\tvalid_1's binary_error: 0.0834\n",
      "[1530]\ttraining's binary_error: 0.0002\tvalid_1's binary_error: 0.0834\n",
      "[1531]\ttraining's binary_error: 0.000225\tvalid_1's binary_error: 0.0835\n",
      "[1532]\ttraining's binary_error: 0.000225\tvalid_1's binary_error: 0.0835\n",
      "[1533]\ttraining's binary_error: 0.0002\tvalid_1's binary_error: 0.0835\n",
      "[1534]\ttraining's binary_error: 0.0002\tvalid_1's binary_error: 0.0835\n",
      "[1535]\ttraining's binary_error: 0.0002\tvalid_1's binary_error: 0.0835\n",
      "[1536]\ttraining's binary_error: 0.000175\tvalid_1's binary_error: 0.0835\n",
      "[1537]\ttraining's binary_error: 0.000175\tvalid_1's binary_error: 0.0835\n",
      "[1538]\ttraining's binary_error: 0.00015\tvalid_1's binary_error: 0.0835\n",
      "[1539]\ttraining's binary_error: 0.00015\tvalid_1's binary_error: 0.0837\n",
      "[1540]\ttraining's binary_error: 0.00015\tvalid_1's binary_error: 0.0837\n",
      "[1541]\ttraining's binary_error: 0.00015\tvalid_1's binary_error: 0.0837\n",
      "[1542]\ttraining's binary_error: 0.00015\tvalid_1's binary_error: 0.0837\n",
      "[1543]\ttraining's binary_error: 0.00015\tvalid_1's binary_error: 0.0838\n",
      "[1544]\ttraining's binary_error: 0.00015\tvalid_1's binary_error: 0.0838\n",
      "[1545]\ttraining's binary_error: 0.00015\tvalid_1's binary_error: 0.0839\n",
      "[1546]\ttraining's binary_error: 0.00015\tvalid_1's binary_error: 0.0838\n",
      "[1547]\ttraining's binary_error: 0.00015\tvalid_1's binary_error: 0.0837\n",
      "[1548]\ttraining's binary_error: 0.00015\tvalid_1's binary_error: 0.0836\n",
      "[1549]\ttraining's binary_error: 0.00015\tvalid_1's binary_error: 0.0835\n",
      "[1550]\ttraining's binary_error: 0.00015\tvalid_1's binary_error: 0.0836\n",
      "[1551]\ttraining's binary_error: 0.00015\tvalid_1's binary_error: 0.0836\n",
      "[1552]\ttraining's binary_error: 0.00015\tvalid_1's binary_error: 0.0835\n",
      "[1553]\ttraining's binary_error: 0.00015\tvalid_1's binary_error: 0.0834\n",
      "[1554]\ttraining's binary_error: 0.00015\tvalid_1's binary_error: 0.0834\n",
      "[1555]\ttraining's binary_error: 0.00015\tvalid_1's binary_error: 0.0834\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[1556]\ttraining's binary_error: 0.000125\tvalid_1's binary_error: 0.0836\n",
      "[1557]\ttraining's binary_error: 0.000125\tvalid_1's binary_error: 0.0836\n",
      "[1558]\ttraining's binary_error: 0.000125\tvalid_1's binary_error: 0.0839\n",
      "[1559]\ttraining's binary_error: 0.000125\tvalid_1's binary_error: 0.0839\n",
      "[1560]\ttraining's binary_error: 0.000125\tvalid_1's binary_error: 0.0838\n",
      "[1561]\ttraining's binary_error: 0.000125\tvalid_1's binary_error: 0.0838\n",
      "[1562]\ttraining's binary_error: 0.000125\tvalid_1's binary_error: 0.0838\n",
      "[1563]\ttraining's binary_error: 0.000125\tvalid_1's binary_error: 0.0837\n",
      "[1564]\ttraining's binary_error: 0.000125\tvalid_1's binary_error: 0.0836\n",
      "[1565]\ttraining's binary_error: 0.000125\tvalid_1's binary_error: 0.0836\n",
      "[1566]\ttraining's binary_error: 0.000125\tvalid_1's binary_error: 0.0836\n",
      "[1567]\ttraining's binary_error: 0.000125\tvalid_1's binary_error: 0.0836\n",
      "[1568]\ttraining's binary_error: 0.000125\tvalid_1's binary_error: 0.0836\n",
      "[1569]\ttraining's binary_error: 0.000125\tvalid_1's binary_error: 0.0837\n",
      "[1570]\ttraining's binary_error: 0.000125\tvalid_1's binary_error: 0.0837\n",
      "[1571]\ttraining's binary_error: 0.000125\tvalid_1's binary_error: 0.0838\n",
      "[1572]\ttraining's binary_error: 0.000125\tvalid_1's binary_error: 0.0839\n",
      "[1573]\ttraining's binary_error: 0.000125\tvalid_1's binary_error: 0.0838\n",
      "[1574]\ttraining's binary_error: 0.000125\tvalid_1's binary_error: 0.0838\n",
      "[1575]\ttraining's binary_error: 0.000125\tvalid_1's binary_error: 0.0837\n",
      "[1576]\ttraining's binary_error: 0.000125\tvalid_1's binary_error: 0.0837\n",
      "[1577]\ttraining's binary_error: 0.000125\tvalid_1's binary_error: 0.0836\n",
      "[1578]\ttraining's binary_error: 0.000125\tvalid_1's binary_error: 0.0837\n",
      "[1579]\ttraining's binary_error: 0.000125\tvalid_1's binary_error: 0.0836\n",
      "[1580]\ttraining's binary_error: 0.000125\tvalid_1's binary_error: 0.0836\n",
      "[1581]\ttraining's binary_error: 0.000125\tvalid_1's binary_error: 0.0835\n",
      "[1582]\ttraining's binary_error: 0.000125\tvalid_1's binary_error: 0.0835\n",
      "[1583]\ttraining's binary_error: 0.000125\tvalid_1's binary_error: 0.0835\n",
      "[1584]\ttraining's binary_error: 0.000125\tvalid_1's binary_error: 0.0835\n",
      "[1585]\ttraining's binary_error: 0.000125\tvalid_1's binary_error: 0.0834\n",
      "[1586]\ttraining's binary_error: 0.000125\tvalid_1's binary_error: 0.0832\n",
      "[1587]\ttraining's binary_error: 0.000125\tvalid_1's binary_error: 0.0831\n",
      "[1588]\ttraining's binary_error: 0.000125\tvalid_1's binary_error: 0.0832\n",
      "[1589]\ttraining's binary_error: 0.000125\tvalid_1's binary_error: 0.0833\n",
      "[1590]\ttraining's binary_error: 0.0001\tvalid_1's binary_error: 0.0833\n",
      "[1591]\ttraining's binary_error: 0.0001\tvalid_1's binary_error: 0.0833\n",
      "[1592]\ttraining's binary_error: 0.0001\tvalid_1's binary_error: 0.0833\n",
      "[1593]\ttraining's binary_error: 7.5e-05\tvalid_1's binary_error: 0.0832\n",
      "[1594]\ttraining's binary_error: 7.5e-05\tvalid_1's binary_error: 0.0832\n",
      "[1595]\ttraining's binary_error: 7.5e-05\tvalid_1's binary_error: 0.0833\n",
      "[1596]\ttraining's binary_error: 7.5e-05\tvalid_1's binary_error: 0.0833\n",
      "[1597]\ttraining's binary_error: 7.5e-05\tvalid_1's binary_error: 0.0833\n",
      "[1598]\ttraining's binary_error: 7.5e-05\tvalid_1's binary_error: 0.0833\n",
      "[1599]\ttraining's binary_error: 7.5e-05\tvalid_1's binary_error: 0.0833\n",
      "[1600]\ttraining's binary_error: 7.5e-05\tvalid_1's binary_error: 0.0833\n",
      "[1601]\ttraining's binary_error: 7.5e-05\tvalid_1's binary_error: 0.0833\n",
      "[1602]\ttraining's binary_error: 7.5e-05\tvalid_1's binary_error: 0.0833\n",
      "[1603]\ttraining's binary_error: 7.5e-05\tvalid_1's binary_error: 0.0833\n",
      "[1604]\ttraining's binary_error: 7.5e-05\tvalid_1's binary_error: 0.0832\n",
      "[1605]\ttraining's binary_error: 7.5e-05\tvalid_1's binary_error: 0.0832\n",
      "[1606]\ttraining's binary_error: 7.5e-05\tvalid_1's binary_error: 0.083\n",
      "[1607]\ttraining's binary_error: 7.5e-05\tvalid_1's binary_error: 0.0832\n",
      "[1608]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0832\n",
      "[1609]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0833\n",
      "[1610]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0835\n",
      "[1611]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0835\n",
      "[1612]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0835\n",
      "[1613]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0835\n",
      "[1614]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0834\n",
      "[1615]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0834\n",
      "[1616]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0833\n",
      "[1617]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0833\n",
      "[1618]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0832\n",
      "[1619]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0831\n",
      "[1620]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0832\n",
      "[1621]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0833\n",
      "[1622]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0834\n",
      "[1623]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0834\n",
      "[1624]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0834\n",
      "[1625]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0834\n",
      "[1626]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0834\n",
      "[1627]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0833\n",
      "[1628]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0833\n",
      "[1629]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0833\n",
      "[1630]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0833\n",
      "[1631]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0834\n",
      "[1632]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0833\n",
      "[1633]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0832\n",
      "[1634]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0833\n",
      "[1635]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0833\n",
      "[1636]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0833\n",
      "[1637]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0833\n",
      "[1638]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0832\n",
      "[1639]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0833\n",
      "[1640]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0834\n",
      "[1641]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0835\n",
      "[1642]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0835\n",
      "[1643]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0838\n",
      "[1644]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0837\n",
      "[1645]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0837\n",
      "[1646]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0837\n",
      "[1647]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0839\n",
      "[1648]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.084\n",
      "[1649]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0839\n",
      "[1650]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0839\n",
      "[1651]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.084\n",
      "[1652]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0841\n",
      "[1653]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0842\n",
      "[1654]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0841\n",
      "[1655]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0841\n",
      "[1656]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.084\n",
      "[1657]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.084\n",
      "[1658]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.084\n",
      "[1659]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.084\n",
      "[1660]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.084\n",
      "[1661]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0839\n",
      "[1662]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.084\n",
      "[1663]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0838\n",
      "[1664]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0838\n",
      "[1665]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0838\n",
      "[1666]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0837\n",
      "[1667]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0837\n",
      "[1668]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0838\n",
      "[1669]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0836\n",
      "[1670]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0836\n",
      "[1671]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0835\n",
      "[1672]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0835\n",
      "[1673]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0835\n",
      "[1674]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0834\n",
      "[1675]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0834\n",
      "[1676]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0834\n",
      "[1677]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0834\n",
      "[1678]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0835\n",
      "[1679]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0835\n",
      "[1680]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0835\n",
      "[1681]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0834\n",
      "[1682]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0834\n",
      "[1683]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0834\n",
      "[1684]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0834\n",
      "[1685]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0834\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[1686]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0834\n",
      "[1687]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0835\n",
      "[1688]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0835\n",
      "[1689]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0835\n",
      "[1690]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0836\n",
      "[1691]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0836\n",
      "[1692]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0835\n",
      "[1693]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0836\n",
      "[1694]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0836\n",
      "[1695]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0837\n",
      "[1696]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0837\n",
      "[1697]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0837\n",
      "[1698]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0836\n",
      "[1699]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0834\n",
      "[1700]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0835\n",
      "[1701]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0836\n",
      "[1702]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0835\n",
      "[1703]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0834\n",
      "[1704]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0835\n",
      "[1705]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0835\n",
      "[1706]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0835\n",
      "[1707]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0835\n",
      "[1708]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0835\n",
      "[1709]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0835\n",
      "[1710]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0836\n",
      "[1711]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0835\n",
      "[1712]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0836\n",
      "[1713]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0837\n",
      "[1714]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0837\n",
      "[1715]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0837\n",
      "[1716]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0837\n",
      "[1717]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0837\n",
      "[1718]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0838\n",
      "[1719]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0838\n",
      "[1720]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0838\n",
      "[1721]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0838\n",
      "[1722]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0838\n",
      "[1723]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0838\n",
      "[1724]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0839\n",
      "[1725]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0839\n",
      "[1726]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0839\n",
      "[1727]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0839\n",
      "[1728]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0839\n",
      "[1729]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0839\n",
      "[1730]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0839\n",
      "[1731]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0838\n",
      "[1732]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0838\n",
      "[1733]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0838\n",
      "[1734]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0837\n",
      "[1735]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0837\n",
      "[1736]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0838\n",
      "[1737]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0839\n",
      "[1738]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0839\n",
      "[1739]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0839\n",
      "[1740]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0839\n",
      "[1741]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0839\n",
      "[1742]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0839\n",
      "[1743]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0838\n",
      "[1744]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0838\n",
      "[1745]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0838\n",
      "[1746]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0838\n",
      "[1747]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0837\n",
      "[1748]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0837\n",
      "[1749]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0838\n",
      "[1750]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0837\n",
      "[1751]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0838\n",
      "[1752]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0837\n",
      "[1753]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0837\n",
      "[1754]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0839\n",
      "[1755]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0839\n",
      "[1756]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0839\n",
      "[1757]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.084\n",
      "[1758]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.084\n",
      "[1759]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.084\n",
      "[1760]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.084\n",
      "[1761]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0839\n",
      "[1762]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0838\n",
      "[1763]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0837\n",
      "[1764]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0837\n",
      "[1765]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0837\n",
      "[1766]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0838\n",
      "[1767]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0838\n",
      "[1768]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0839\n",
      "[1769]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0839\n",
      "[1770]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0839\n",
      "[1771]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0838\n",
      "[1772]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0838\n",
      "[1773]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0838\n",
      "[1774]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0838\n",
      "[1775]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0838\n",
      "[1776]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0836\n",
      "[1777]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0836\n",
      "[1778]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0836\n",
      "[1779]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0837\n",
      "[1780]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0837\n",
      "[1781]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0838\n",
      "[1782]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0838\n",
      "[1783]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0839\n",
      "[1784]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0838\n",
      "[1785]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0837\n",
      "[1786]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0836\n",
      "[1787]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0836\n",
      "[1788]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0837\n",
      "[1789]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0836\n",
      "[1790]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0835\n",
      "[1791]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0835\n",
      "[1792]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0836\n",
      "[1793]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0836\n",
      "[1794]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0836\n",
      "[1795]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0836\n",
      "[1796]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0836\n",
      "[1797]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0836\n",
      "[1798]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0836\n",
      "[1799]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0834\n",
      "[1800]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0834\n",
      "[1801]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0836\n",
      "[1802]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0836\n",
      "[1803]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0836\n",
      "[1804]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0835\n",
      "[1805]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0834\n",
      "[1806]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0834\n",
      "[1807]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0834\n",
      "[1808]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0834\n",
      "[1809]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0834\n",
      "[1810]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0834\n",
      "[1811]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0834\n",
      "[1812]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0833\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[1813]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0833\n",
      "[1814]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0831\n",
      "[1815]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0831\n",
      "[1816]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0831\n",
      "[1817]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0831\n",
      "[1818]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0831\n",
      "[1819]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0832\n",
      "[1820]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0832\n",
      "[1821]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0833\n",
      "[1822]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0832\n",
      "[1823]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0833\n",
      "[1824]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0833\n",
      "[1825]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0835\n",
      "[1826]\ttraining's binary_error: 5e-05\tvalid_1's binary_error: 0.0835\n",
      "[1827]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0835\n",
      "[1828]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0834\n",
      "[1829]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0834\n",
      "[1830]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0833\n",
      "[1831]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0834\n",
      "[1832]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0832\n",
      "[1833]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0832\n",
      "[1834]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.083\n",
      "[1835]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.083\n",
      "[1836]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.083\n",
      "[1837]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0829\n",
      "[1838]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0829\n",
      "[1839]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.083\n",
      "[1840]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.083\n",
      "[1841]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0829\n",
      "[1842]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.083\n",
      "[1843]\ttraining's binary_error: 2.5e-05\tvalid_1's binary_error: 0.0831\n",
      "[1844]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0832\n",
      "[1845]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.083\n",
      "[1846]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.083\n",
      "[1847]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.083\n",
      "[1848]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0829\n",
      "[1849]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0829\n",
      "[1850]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0829\n",
      "[1851]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.083\n",
      "[1852]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.083\n",
      "[1853]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0829\n",
      "[1854]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0829\n",
      "[1855]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0829\n",
      "[1856]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0829\n",
      "[1857]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.083\n",
      "[1858]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.083\n",
      "[1859]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.083\n",
      "[1860]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0829\n",
      "[1861]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0828\n",
      "[1862]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0829\n",
      "[1863]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0829\n",
      "[1864]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0829\n",
      "[1865]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.083\n",
      "[1866]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0831\n",
      "[1867]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.083\n",
      "[1868]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.083\n",
      "[1869]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.083\n",
      "[1870]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.083\n",
      "[1871]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.083\n",
      "[1872]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.083\n",
      "[1873]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0829\n",
      "[1874]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0828\n",
      "[1875]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0829\n",
      "[1876]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.083\n",
      "[1877]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0831\n",
      "[1878]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.083\n",
      "[1879]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0831\n",
      "[1880]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0832\n",
      "[1881]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0832\n",
      "[1882]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.083\n",
      "[1883]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.083\n",
      "[1884]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0831\n",
      "[1885]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0831\n",
      "[1886]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0831\n",
      "[1887]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0832\n",
      "[1888]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0832\n",
      "[1889]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0831\n",
      "[1890]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0831\n",
      "[1891]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0832\n",
      "[1892]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0833\n",
      "[1893]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0833\n",
      "[1894]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0833\n",
      "[1895]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0833\n",
      "[1896]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0833\n",
      "[1897]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0833\n",
      "[1898]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0831\n",
      "[1899]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0831\n",
      "[1900]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.083\n",
      "[1901]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.083\n",
      "[1902]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.083\n",
      "[1903]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0831\n",
      "[1904]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.083\n",
      "[1905]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0831\n",
      "[1906]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0831\n",
      "[1907]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0831\n",
      "[1908]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0832\n",
      "[1909]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0833\n",
      "[1910]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0833\n",
      "[1911]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0835\n",
      "[1912]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0835\n",
      "[1913]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0835\n",
      "[1914]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0834\n",
      "[1915]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0834\n",
      "[1916]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0833\n",
      "[1917]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0833\n",
      "[1918]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0833\n",
      "[1919]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0833\n",
      "[1920]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0833\n",
      "[1921]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0833\n",
      "[1922]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0833\n",
      "[1923]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0834\n",
      "[1924]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0834\n",
      "[1925]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0834\n",
      "[1926]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0834\n",
      "[1927]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0835\n",
      "[1928]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0835\n",
      "[1929]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0836\n",
      "[1930]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0836\n",
      "[1931]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0836\n",
      "[1932]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0836\n",
      "[1933]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0835\n",
      "[1934]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0835\n",
      "[1935]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0835\n",
      "[1936]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0836\n",
      "[1937]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0836\n",
      "[1938]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0836\n",
      "[1939]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0837\n",
      "[1940]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0835\n",
      "[1941]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0836\n",
      "[1942]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0837\n",
      "[1943]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0837\n",
      "[1944]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0838\n",
      "[1945]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0838\n",
      "[1946]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0838\n",
      "[1947]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0838\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[1948]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0838\n",
      "[1949]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0838\n",
      "[1950]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0838\n",
      "[1951]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0838\n",
      "[1952]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0838\n",
      "[1953]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0837\n",
      "[1954]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0836\n",
      "[1955]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0836\n",
      "[1956]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0836\n",
      "[1957]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0835\n",
      "[1958]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0835\n",
      "[1959]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0836\n",
      "[1960]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0836\n",
      "[1961]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0835\n",
      "[1962]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0834\n",
      "[1963]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0834\n",
      "[1964]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0837\n",
      "[1965]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0835\n",
      "[1966]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0835\n",
      "[1967]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0835\n",
      "[1968]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0834\n",
      "[1969]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0835\n",
      "[1970]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0835\n",
      "[1971]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0835\n",
      "[1972]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0836\n",
      "[1973]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0837\n",
      "[1974]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0837\n",
      "[1975]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0837\n",
      "[1976]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0838\n",
      "[1977]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0838\n",
      "[1978]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0839\n",
      "[1979]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0839\n",
      "[1980]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0839\n",
      "[1981]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0839\n",
      "[1982]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0839\n",
      "[1983]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0838\n",
      "[1984]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0837\n",
      "[1985]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0837\n",
      "[1986]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0837\n",
      "[1987]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0836\n",
      "[1988]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0837\n",
      "[1989]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0837\n",
      "[1990]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0837\n",
      "[1991]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0837\n",
      "[1992]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0839\n",
      "[1993]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0837\n",
      "[1994]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0836\n",
      "[1995]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0838\n",
      "[1996]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0839\n",
      "[1997]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0836\n",
      "[1998]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0836\n",
      "[1999]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0836\n",
      "[2000]\ttraining's binary_error: 0\tvalid_1's binary_error: 0.0837\n"
     ]
    }
   ],
   "source": [
    "model = lgb.train(param, train_dataset, num_boost_round=num_round, valid_sets=[train_dataset, test_dataset])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 51,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "准确率\n",
      "0.91282\n",
      "f1score\n",
      "0.945782907747609\n",
      "auc\n",
      "0.8624407787630743\n",
      "混淆矩阵\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "array([[ 7621,  2153],\n",
       "       [ 2206, 38020]])"
      ]
     },
     "execution_count": 51,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "#验证集结果\n",
    "from sklearn import metrics\n",
    "from sklearn.metrics import accuracy_score,roc_auc_score\n",
    "from sklearn.metrics import f1_score \n",
    "from sklearn.metrics import confusion_matrix\n",
    "y_pred0=model.predict(test_final.drop(columns='loan_status'),num_iteration=model.best_iteration)\n",
    "y_pred=[1 if x >=0.5 else 0 for x in y_pred0]\n",
    "print('准确率')\n",
    "print(accuracy_score(test_final['loan_status'],y_pred))\n",
    "print('f1score')\n",
    "print(f1_score(test_final['loan_status'],y_pred)) \n",
    "print('auc')\n",
    "print(roc_auc_score(test_final['loan_status'],y_pred))\n",
    "print('混淆矩阵')\n",
    "confusion_matrix(test_final['loan_status'],y_pred)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "id": "auwWD3H63XH8"
   },
   "source": [
    "## 三.随机探索阶段\n",
    "本阶段用于确定大致的范围。\n",
    "参考文献  \n",
    "* [LightGBM 中文文档](https://lightgbm.apachecn.org/#/docs/6)\n",
    "* [LightGBM参数设置](https://zhuanlan.zhihu.com/p/376485485)\n",
    "* [LightGBM 调参方法（具体操作）](https://www.cnblogs.com/bjwu/p/9307344.html)\n",
    "* [LightGBM+gridsearchcv调参](https://zhuanlan.zhihu.com/p/76206257)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 85,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "['accuracy',\n",
       " 'adjusted_mutual_info_score',\n",
       " 'adjusted_rand_score',\n",
       " 'average_precision',\n",
       " 'balanced_accuracy',\n",
       " 'completeness_score',\n",
       " 'explained_variance',\n",
       " 'f1',\n",
       " 'f1_macro',\n",
       " 'f1_micro',\n",
       " 'f1_samples',\n",
       " 'f1_weighted',\n",
       " 'fowlkes_mallows_score',\n",
       " 'homogeneity_score',\n",
       " 'jaccard',\n",
       " 'jaccard_macro',\n",
       " 'jaccard_micro',\n",
       " 'jaccard_samples',\n",
       " 'jaccard_weighted',\n",
       " 'max_error',\n",
       " 'mutual_info_score',\n",
       " 'neg_brier_score',\n",
       " 'neg_log_loss',\n",
       " 'neg_mean_absolute_error',\n",
       " 'neg_mean_absolute_percentage_error',\n",
       " 'neg_mean_gamma_deviance',\n",
       " 'neg_mean_poisson_deviance',\n",
       " 'neg_mean_squared_error',\n",
       " 'neg_mean_squared_log_error',\n",
       " 'neg_median_absolute_error',\n",
       " 'neg_root_mean_squared_error',\n",
       " 'normalized_mutual_info_score',\n",
       " 'precision',\n",
       " 'precision_macro',\n",
       " 'precision_micro',\n",
       " 'precision_samples',\n",
       " 'precision_weighted',\n",
       " 'r2',\n",
       " 'rand_score',\n",
       " 'recall',\n",
       " 'recall_macro',\n",
       " 'recall_micro',\n",
       " 'recall_samples',\n",
       " 'recall_weighted',\n",
       " 'roc_auc',\n",
       " 'roc_auc_ovo',\n",
       " 'roc_auc_ovo_weighted',\n",
       " 'roc_auc_ovr',\n",
       " 'roc_auc_ovr_weighted',\n",
       " 'top_k_accuracy',\n",
       " 'v_measure_score']"
      ]
     },
     "execution_count": 85,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "#查看sklearn支持的评价指标\n",
    "import sklearn\n",
    "sorted(sklearn.metrics.SCORERS.keys())"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 121,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "2021-07-06 19:44:06\n",
      "roc_auc\n",
      "[LightGBM] [Warning] bagging_fraction is set=0.4, subsample=1.0 will be ignored. Current value: bagging_fraction=0.4\n",
      "[LightGBM] [Warning] feature_fraction is set=0.6, colsample_bytree=1.0 will be ignored. Current value: feature_fraction=0.6\n",
      "{'bagging_fraction': 0.4, 'feature_fraction': 0.6, 'max_depth': 5, 'num_leaves': 50}\n",
      "0.9605377680579815\n",
      "[[ 8342  1870]\n",
      " [ 1974 37814]]\n",
      "2021-07-06 19:49:47\n",
      "f1_macro\n",
      "[LightGBM] [Warning] bagging_fraction is set=0.4, subsample=1.0 will be ignored. Current value: bagging_fraction=0.4\n",
      "[LightGBM] [Warning] feature_fraction is set=1, colsample_bytree=1.0 will be ignored. Current value: feature_fraction=1\n",
      "{'bagging_fraction': 0.4, 'feature_fraction': 1, 'max_depth': 5, 'num_leaves': 50}\n",
      "0.8764860651016686\n",
      "[[ 8363  1849]\n",
      " [ 1992 37796]]\n",
      "2021-07-06 19:55:24\n",
      "f1_micro\n",
      "[LightGBM] [Warning] bagging_fraction is set=0.4, subsample=1.0 will be ignored. Current value: bagging_fraction=0.4\n",
      "[LightGBM] [Warning] feature_fraction is set=0.6, colsample_bytree=1.0 will be ignored. Current value: feature_fraction=0.6\n",
      "{'bagging_fraction': 0.4, 'feature_fraction': 0.6, 'max_depth': 5, 'num_leaves': 50}\n",
      "0.9194000000000001\n",
      "[[ 8342  1870]\n",
      " [ 1974 37814]]\n",
      "2021-07-06 20:01:04\n",
      "accuracy\n",
      "[LightGBM] [Warning] bagging_fraction is set=0.4, subsample=1.0 will be ignored. Current value: bagging_fraction=0.4\n",
      "[LightGBM] [Warning] feature_fraction is set=0.6, colsample_bytree=1.0 will be ignored. Current value: feature_fraction=0.6\n",
      "{'bagging_fraction': 0.4, 'feature_fraction': 0.6, 'max_depth': 5, 'num_leaves': 50}\n",
      "0.9194000000000001\n",
      "[[ 8342  1870]\n",
      " [ 1974 37814]]\n",
      "2021-07-06 20:06:38\n",
      "precision\n",
      "[LightGBM] [Warning] bagging_fraction is set=0.4, subsample=1.0 will be ignored. Current value: bagging_fraction=0.4\n",
      "[LightGBM] [Warning] feature_fraction is set=1, colsample_bytree=1.0 will be ignored. Current value: feature_fraction=1\n",
      "{'bagging_fraction': 0.4, 'feature_fraction': 1, 'max_depth': 5, 'num_leaves': 50}\n",
      "0.9508960532634202\n",
      "[[ 8363  1849]\n",
      " [ 1992 37796]]\n",
      "2021-07-06 20:12:14\n",
      "recall\n",
      "[LightGBM] [Warning] bagging_fraction is set=0.4, subsample=1.0 will be ignored. Current value: bagging_fraction=0.4\n",
      "[LightGBM] [Warning] feature_fraction is set=0.4, colsample_bytree=1.0 will be ignored. Current value: feature_fraction=0.4\n",
      "{'bagging_fraction': 0.4, 'feature_fraction': 0.4, 'max_depth': 5, 'num_leaves': 50}\n",
      "0.950160849802673\n",
      "[[ 8223  1989]\n",
      " [ 1864 37924]]\n"
     ]
    }
   ],
   "source": [
    "#max_depth ：设置树深度，深度越大可能过拟合\n",
    "#num_leaves：过大可能会导致过拟合\n",
    "#learning_rate：学习速率\n",
    "#subsample/bagging_fraction:数据采样\n",
    "#colsample_bytree/feature_fraction: 特征采样\n",
    "from sklearn.model_selection import GridSearchCV\n",
    "\n",
    "model_lgb = lgb.LGBMClassifier(num_leaves=50,boosting_type = 'gbdt',\n",
    "                              learning_rate=0.1, n_estimators=43, max_depth=6,\n",
    "                              metric='binary_logloss', bagging_fraction = 0.8,feature_fraction = 0.8)\n",
    "params_test1={\n",
    "    'max_depth':range(3,8,2),\n",
    "    'num_leaves':range(50, 170, 30),\n",
    "    'bagging_fraction':[0.4,0.6,0.8,1],\n",
    "    'feature_fraction':[0.4,0.6,0.8,1]\n",
    "}\n",
    "score_list=list(['roc_auc','f1_macro','f1_micro','accuracy','precision','recall'])\n",
    "gsearch_dict={}\n",
    "import time\n",
    "#尝试多种评价指标做交叉验证\n",
    "for score_type in score_list:\n",
    "    print(time.strftime(\"%Y-%m-%d %H:%M:%S\", time.localtime()))\n",
    "    print(score_type)\n",
    "    gsearch=GridSearchCV(estimator=model_lgb, param_grid=params_test1, scoring=score_type, cv=5, verbose=0, n_jobs=4)\n",
    "    gsearch_result=gsearch.fit(train_final.drop(columns='loan_status'),train_final['loan_status'])\n",
    "    #参数与指标\n",
    "    print(gsearch_result.best_params_)\n",
    "    print(gsearch_result.best_score_)\n",
    "    #混淆矩阵\n",
    "    y_pred=gsearch_result.predict(train_final.drop(columns='loan_status'))\n",
    "    print(confusion_matrix(train_final['loan_status'],y_pred))\n",
    "    gsearch_dict[score_type]=gsearch_result"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "#从以上过程中，发现使用roc_auc指标进行寻优，混淆矩阵结果相对可以接受，所以后续使用roc_auc作为寻优指标\n",
    "#{'bagging_fraction': 0.4, 'feature_fraction': 0.6, 'max_depth': 5, 'num_leaves': 50}作为基本参数，基本符合各个指标下的寻优结果"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## 四.顺序搜索阶段\n",
    "本阶段按照重要性从高到低依次搜索各个参数。"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 142,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[LightGBM] [Warning] bagging_fraction is set=0.4, subsample=1.0 will be ignored. Current value: bagging_fraction=0.4\n",
      "[LightGBM] [Warning] feature_fraction is set=0.6, colsample_bytree=1.0 will be ignored. Current value: feature_fraction=0.6\n",
      "{'learning_rate': 0.1, 'max_depth': 7, 'num_leaves': 25}\n",
      "0.9608356076066592\n",
      "[[ 8381  1831]\n",
      " [ 1944 37844]]\n"
     ]
    }
   ],
   "source": [
    "#首先进行学习速率深度和叶子节点的调优\n",
    "lgb1 = lgb.LGBMClassifier(boosting_type = 'gbdt',n_estimators=43,\n",
    "                          metric='binary_logloss', bagging_fraction = 0.4,feature_fraction = 0.6)\n",
    "params_test1={\n",
    "    'learning_rate':np.linspace(0.02,0.1,5),\n",
    "    'max_depth':range(1,11,1),\n",
    "    'num_leaves':range(25,80,5)\n",
    "}\n",
    "gsearch1=GridSearchCV(estimator=lgb1, param_grid=params_test1, scoring='roc_auc', cv=5, verbose=0, n_jobs=4)\n",
    "gsearch_result1=gsearch1.fit(train_final.drop(columns='loan_status'),train_final['loan_status'])\n",
    "#参数与指标\n",
    "print(gsearch_result1.best_params_)\n",
    "print(gsearch_result1.best_score_)\n",
    "#混淆矩阵\n",
    "y_pred=gsearch_result1.predict(train_final.drop(columns='loan_status'))\n",
    "print(confusion_matrix(train_final['loan_status'],y_pred))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 143,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0.942971  with:   {'learning_rate': 0.02, 'max_depth': 1, 'num_leaves': 25}\n",
      "0.942971  with:   {'learning_rate': 0.02, 'max_depth': 1, 'num_leaves': 30}\n",
      "0.942971  with:   {'learning_rate': 0.02, 'max_depth': 1, 'num_leaves': 35}\n",
      "0.942971  with:   {'learning_rate': 0.02, 'max_depth': 1, 'num_leaves': 40}\n",
      "0.942971  with:   {'learning_rate': 0.02, 'max_depth': 1, 'num_leaves': 45}\n",
      "0.942971  with:   {'learning_rate': 0.02, 'max_depth': 1, 'num_leaves': 50}\n",
      "0.942971  with:   {'learning_rate': 0.02, 'max_depth': 1, 'num_leaves': 55}\n",
      "0.942971  with:   {'learning_rate': 0.02, 'max_depth': 1, 'num_leaves': 60}\n",
      "0.942971  with:   {'learning_rate': 0.02, 'max_depth': 1, 'num_leaves': 65}\n",
      "0.942971  with:   {'learning_rate': 0.02, 'max_depth': 1, 'num_leaves': 70}\n",
      "0.942971  with:   {'learning_rate': 0.02, 'max_depth': 1, 'num_leaves': 75}\n",
      "0.953006  with:   {'learning_rate': 0.02, 'max_depth': 2, 'num_leaves': 25}\n",
      "0.953006  with:   {'learning_rate': 0.02, 'max_depth': 2, 'num_leaves': 30}\n",
      "0.953006  with:   {'learning_rate': 0.02, 'max_depth': 2, 'num_leaves': 35}\n",
      "0.953006  with:   {'learning_rate': 0.02, 'max_depth': 2, 'num_leaves': 40}\n",
      "0.953006  with:   {'learning_rate': 0.02, 'max_depth': 2, 'num_leaves': 45}\n",
      "0.953006  with:   {'learning_rate': 0.02, 'max_depth': 2, 'num_leaves': 50}\n",
      "0.953006  with:   {'learning_rate': 0.02, 'max_depth': 2, 'num_leaves': 55}\n",
      "0.953006  with:   {'learning_rate': 0.02, 'max_depth': 2, 'num_leaves': 60}\n",
      "0.953006  with:   {'learning_rate': 0.02, 'max_depth': 2, 'num_leaves': 65}\n",
      "0.953006  with:   {'learning_rate': 0.02, 'max_depth': 2, 'num_leaves': 70}\n",
      "0.953006  with:   {'learning_rate': 0.02, 'max_depth': 2, 'num_leaves': 75}\n",
      "0.956814  with:   {'learning_rate': 0.02, 'max_depth': 3, 'num_leaves': 25}\n",
      "0.956814  with:   {'learning_rate': 0.02, 'max_depth': 3, 'num_leaves': 30}\n",
      "0.956814  with:   {'learning_rate': 0.02, 'max_depth': 3, 'num_leaves': 35}\n",
      "0.956814  with:   {'learning_rate': 0.02, 'max_depth': 3, 'num_leaves': 40}\n",
      "0.956814  with:   {'learning_rate': 0.02, 'max_depth': 3, 'num_leaves': 45}\n",
      "0.956814  with:   {'learning_rate': 0.02, 'max_depth': 3, 'num_leaves': 50}\n",
      "0.956814  with:   {'learning_rate': 0.02, 'max_depth': 3, 'num_leaves': 55}\n",
      "0.956814  with:   {'learning_rate': 0.02, 'max_depth': 3, 'num_leaves': 60}\n",
      "0.956814  with:   {'learning_rate': 0.02, 'max_depth': 3, 'num_leaves': 65}\n",
      "0.956814  with:   {'learning_rate': 0.02, 'max_depth': 3, 'num_leaves': 70}\n",
      "0.956814  with:   {'learning_rate': 0.02, 'max_depth': 3, 'num_leaves': 75}\n",
      "0.958537  with:   {'learning_rate': 0.02, 'max_depth': 4, 'num_leaves': 25}\n",
      "0.958537  with:   {'learning_rate': 0.02, 'max_depth': 4, 'num_leaves': 30}\n",
      "0.958537  with:   {'learning_rate': 0.02, 'max_depth': 4, 'num_leaves': 35}\n",
      "0.958537  with:   {'learning_rate': 0.02, 'max_depth': 4, 'num_leaves': 40}\n",
      "0.958537  with:   {'learning_rate': 0.02, 'max_depth': 4, 'num_leaves': 45}\n",
      "0.958537  with:   {'learning_rate': 0.02, 'max_depth': 4, 'num_leaves': 50}\n",
      "0.958537  with:   {'learning_rate': 0.02, 'max_depth': 4, 'num_leaves': 55}\n",
      "0.958537  with:   {'learning_rate': 0.02, 'max_depth': 4, 'num_leaves': 60}\n",
      "0.958537  with:   {'learning_rate': 0.02, 'max_depth': 4, 'num_leaves': 65}\n",
      "0.958537  with:   {'learning_rate': 0.02, 'max_depth': 4, 'num_leaves': 70}\n",
      "0.958537  with:   {'learning_rate': 0.02, 'max_depth': 4, 'num_leaves': 75}\n",
      "0.958700  with:   {'learning_rate': 0.02, 'max_depth': 5, 'num_leaves': 25}\n",
      "0.959136  with:   {'learning_rate': 0.02, 'max_depth': 5, 'num_leaves': 30}\n",
      "0.959235  with:   {'learning_rate': 0.02, 'max_depth': 5, 'num_leaves': 35}\n",
      "0.959235  with:   {'learning_rate': 0.02, 'max_depth': 5, 'num_leaves': 40}\n",
      "0.959235  with:   {'learning_rate': 0.02, 'max_depth': 5, 'num_leaves': 45}\n",
      "0.959235  with:   {'learning_rate': 0.02, 'max_depth': 5, 'num_leaves': 50}\n",
      "0.959235  with:   {'learning_rate': 0.02, 'max_depth': 5, 'num_leaves': 55}\n",
      "0.959235  with:   {'learning_rate': 0.02, 'max_depth': 5, 'num_leaves': 60}\n",
      "0.959235  with:   {'learning_rate': 0.02, 'max_depth': 5, 'num_leaves': 65}\n",
      "0.959235  with:   {'learning_rate': 0.02, 'max_depth': 5, 'num_leaves': 70}\n",
      "0.959235  with:   {'learning_rate': 0.02, 'max_depth': 5, 'num_leaves': 75}\n",
      "0.958258  with:   {'learning_rate': 0.02, 'max_depth': 6, 'num_leaves': 25}\n",
      "0.958513  with:   {'learning_rate': 0.02, 'max_depth': 6, 'num_leaves': 30}\n",
      "0.958662  with:   {'learning_rate': 0.02, 'max_depth': 6, 'num_leaves': 35}\n",
      "0.958782  with:   {'learning_rate': 0.02, 'max_depth': 6, 'num_leaves': 40}\n",
      "0.958989  with:   {'learning_rate': 0.02, 'max_depth': 6, 'num_leaves': 45}\n",
      "0.959141  with:   {'learning_rate': 0.02, 'max_depth': 6, 'num_leaves': 50}\n",
      "0.959280  with:   {'learning_rate': 0.02, 'max_depth': 6, 'num_leaves': 55}\n",
      "0.959268  with:   {'learning_rate': 0.02, 'max_depth': 6, 'num_leaves': 60}\n",
      "0.959252  with:   {'learning_rate': 0.02, 'max_depth': 6, 'num_leaves': 65}\n",
      "0.959252  with:   {'learning_rate': 0.02, 'max_depth': 6, 'num_leaves': 70}\n",
      "0.959252  with:   {'learning_rate': 0.02, 'max_depth': 6, 'num_leaves': 75}\n",
      "0.958207  with:   {'learning_rate': 0.02, 'max_depth': 7, 'num_leaves': 25}\n",
      "0.958438  with:   {'learning_rate': 0.02, 'max_depth': 7, 'num_leaves': 30}\n",
      "0.958598  with:   {'learning_rate': 0.02, 'max_depth': 7, 'num_leaves': 35}\n",
      "0.958669  with:   {'learning_rate': 0.02, 'max_depth': 7, 'num_leaves': 40}\n",
      "0.958757  with:   {'learning_rate': 0.02, 'max_depth': 7, 'num_leaves': 45}\n",
      "0.958849  with:   {'learning_rate': 0.02, 'max_depth': 7, 'num_leaves': 50}\n",
      "0.958854  with:   {'learning_rate': 0.02, 'max_depth': 7, 'num_leaves': 55}\n",
      "0.958901  with:   {'learning_rate': 0.02, 'max_depth': 7, 'num_leaves': 60}\n",
      "0.958998  with:   {'learning_rate': 0.02, 'max_depth': 7, 'num_leaves': 65}\n",
      "0.959014  with:   {'learning_rate': 0.02, 'max_depth': 7, 'num_leaves': 70}\n",
      "0.959065  with:   {'learning_rate': 0.02, 'max_depth': 7, 'num_leaves': 75}\n",
      "0.958124  with:   {'learning_rate': 0.02, 'max_depth': 8, 'num_leaves': 25}\n",
      "0.958407  with:   {'learning_rate': 0.02, 'max_depth': 8, 'num_leaves': 30}\n",
      "0.958626  with:   {'learning_rate': 0.02, 'max_depth': 8, 'num_leaves': 35}\n",
      "0.958705  with:   {'learning_rate': 0.02, 'max_depth': 8, 'num_leaves': 40}\n",
      "0.958690  with:   {'learning_rate': 0.02, 'max_depth': 8, 'num_leaves': 45}\n",
      "0.958769  with:   {'learning_rate': 0.02, 'max_depth': 8, 'num_leaves': 50}\n",
      "0.958788  with:   {'learning_rate': 0.02, 'max_depth': 8, 'num_leaves': 55}\n",
      "0.958882  with:   {'learning_rate': 0.02, 'max_depth': 8, 'num_leaves': 60}\n",
      "0.958889  with:   {'learning_rate': 0.02, 'max_depth': 8, 'num_leaves': 65}\n",
      "0.958876  with:   {'learning_rate': 0.02, 'max_depth': 8, 'num_leaves': 70}\n",
      "0.958926  with:   {'learning_rate': 0.02, 'max_depth': 8, 'num_leaves': 75}\n",
      "0.958108  with:   {'learning_rate': 0.02, 'max_depth': 9, 'num_leaves': 25}\n",
      "0.958396  with:   {'learning_rate': 0.02, 'max_depth': 9, 'num_leaves': 30}\n",
      "0.958526  with:   {'learning_rate': 0.02, 'max_depth': 9, 'num_leaves': 35}\n",
      "0.958686  with:   {'learning_rate': 0.02, 'max_depth': 9, 'num_leaves': 40}\n",
      "0.958719  with:   {'learning_rate': 0.02, 'max_depth': 9, 'num_leaves': 45}\n",
      "0.958723  with:   {'learning_rate': 0.02, 'max_depth': 9, 'num_leaves': 50}\n",
      "0.958738  with:   {'learning_rate': 0.02, 'max_depth': 9, 'num_leaves': 55}\n",
      "0.958774  with:   {'learning_rate': 0.02, 'max_depth': 9, 'num_leaves': 60}\n",
      "0.958790  with:   {'learning_rate': 0.02, 'max_depth': 9, 'num_leaves': 65}\n",
      "0.958824  with:   {'learning_rate': 0.02, 'max_depth': 9, 'num_leaves': 70}\n",
      "0.958823  with:   {'learning_rate': 0.02, 'max_depth': 9, 'num_leaves': 75}\n",
      "0.958121  with:   {'learning_rate': 0.02, 'max_depth': 10, 'num_leaves': 25}\n",
      "0.958370  with:   {'learning_rate': 0.02, 'max_depth': 10, 'num_leaves': 30}\n",
      "0.958518  with:   {'learning_rate': 0.02, 'max_depth': 10, 'num_leaves': 35}\n",
      "0.958597  with:   {'learning_rate': 0.02, 'max_depth': 10, 'num_leaves': 40}\n",
      "0.958668  with:   {'learning_rate': 0.02, 'max_depth': 10, 'num_leaves': 45}\n",
      "0.958716  with:   {'learning_rate': 0.02, 'max_depth': 10, 'num_leaves': 50}\n",
      "0.958710  with:   {'learning_rate': 0.02, 'max_depth': 10, 'num_leaves': 55}\n",
      "0.958716  with:   {'learning_rate': 0.02, 'max_depth': 10, 'num_leaves': 60}\n",
      "0.958740  with:   {'learning_rate': 0.02, 'max_depth': 10, 'num_leaves': 65}\n",
      "0.958770  with:   {'learning_rate': 0.02, 'max_depth': 10, 'num_leaves': 70}\n",
      "0.958772  with:   {'learning_rate': 0.02, 'max_depth': 10, 'num_leaves': 75}\n",
      "0.950230  with:   {'learning_rate': 0.04, 'max_depth': 1, 'num_leaves': 25}\n",
      "0.950230  with:   {'learning_rate': 0.04, 'max_depth': 1, 'num_leaves': 30}\n",
      "0.950230  with:   {'learning_rate': 0.04, 'max_depth': 1, 'num_leaves': 35}\n",
      "0.950230  with:   {'learning_rate': 0.04, 'max_depth': 1, 'num_leaves': 40}\n",
      "0.950230  with:   {'learning_rate': 0.04, 'max_depth': 1, 'num_leaves': 45}\n",
      "0.950230  with:   {'learning_rate': 0.04, 'max_depth': 1, 'num_leaves': 50}\n",
      "0.950230  with:   {'learning_rate': 0.04, 'max_depth': 1, 'num_leaves': 55}\n",
      "0.950230  with:   {'learning_rate': 0.04, 'max_depth': 1, 'num_leaves': 60}\n",
      "0.950230  with:   {'learning_rate': 0.04, 'max_depth': 1, 'num_leaves': 65}\n",
      "0.950230  with:   {'learning_rate': 0.04, 'max_depth': 1, 'num_leaves': 70}\n",
      "0.950230  with:   {'learning_rate': 0.04, 'max_depth': 1, 'num_leaves': 75}\n",
      "0.955591  with:   {'learning_rate': 0.04, 'max_depth': 2, 'num_leaves': 25}\n",
      "0.955591  with:   {'learning_rate': 0.04, 'max_depth': 2, 'num_leaves': 30}\n",
      "0.955591  with:   {'learning_rate': 0.04, 'max_depth': 2, 'num_leaves': 35}\n",
      "0.955591  with:   {'learning_rate': 0.04, 'max_depth': 2, 'num_leaves': 40}\n",
      "0.955591  with:   {'learning_rate': 0.04, 'max_depth': 2, 'num_leaves': 45}\n",
      "0.955591  with:   {'learning_rate': 0.04, 'max_depth': 2, 'num_leaves': 50}\n",
      "0.955591  with:   {'learning_rate': 0.04, 'max_depth': 2, 'num_leaves': 55}\n",
      "0.955591  with:   {'learning_rate': 0.04, 'max_depth': 2, 'num_leaves': 60}\n",
      "0.955591  with:   {'learning_rate': 0.04, 'max_depth': 2, 'num_leaves': 65}\n",
      "0.955591  with:   {'learning_rate': 0.04, 'max_depth': 2, 'num_leaves': 70}\n",
      "0.955591  with:   {'learning_rate': 0.04, 'max_depth': 2, 'num_leaves': 75}\n",
      "0.958454  with:   {'learning_rate': 0.04, 'max_depth': 3, 'num_leaves': 25}\n",
      "0.958454  with:   {'learning_rate': 0.04, 'max_depth': 3, 'num_leaves': 30}\n",
      "0.958454  with:   {'learning_rate': 0.04, 'max_depth': 3, 'num_leaves': 35}\n",
      "0.958454  with:   {'learning_rate': 0.04, 'max_depth': 3, 'num_leaves': 40}\n",
      "0.958454  with:   {'learning_rate': 0.04, 'max_depth': 3, 'num_leaves': 45}\n",
      "0.958454  with:   {'learning_rate': 0.04, 'max_depth': 3, 'num_leaves': 50}\n",
      "0.958454  with:   {'learning_rate': 0.04, 'max_depth': 3, 'num_leaves': 55}\n",
      "0.958454  with:   {'learning_rate': 0.04, 'max_depth': 3, 'num_leaves': 60}\n",
      "0.958454  with:   {'learning_rate': 0.04, 'max_depth': 3, 'num_leaves': 65}\n",
      "0.958454  with:   {'learning_rate': 0.04, 'max_depth': 3, 'num_leaves': 70}\n",
      "0.958454  with:   {'learning_rate': 0.04, 'max_depth': 3, 'num_leaves': 75}\n",
      "0.959635  with:   {'learning_rate': 0.04, 'max_depth': 4, 'num_leaves': 25}\n",
      "0.959635  with:   {'learning_rate': 0.04, 'max_depth': 4, 'num_leaves': 30}\n",
      "0.959635  with:   {'learning_rate': 0.04, 'max_depth': 4, 'num_leaves': 35}\n",
      "0.959635  with:   {'learning_rate': 0.04, 'max_depth': 4, 'num_leaves': 40}\n",
      "0.959635  with:   {'learning_rate': 0.04, 'max_depth': 4, 'num_leaves': 45}\n",
      "0.959635  with:   {'learning_rate': 0.04, 'max_depth': 4, 'num_leaves': 50}\n",
      "0.959635  with:   {'learning_rate': 0.04, 'max_depth': 4, 'num_leaves': 55}\n",
      "0.959635  with:   {'learning_rate': 0.04, 'max_depth': 4, 'num_leaves': 60}\n",
      "0.959635  with:   {'learning_rate': 0.04, 'max_depth': 4, 'num_leaves': 65}\n",
      "0.959635  with:   {'learning_rate': 0.04, 'max_depth': 4, 'num_leaves': 70}\n",
      "0.959635  with:   {'learning_rate': 0.04, 'max_depth': 4, 'num_leaves': 75}\n",
      "0.959781  with:   {'learning_rate': 0.04, 'max_depth': 5, 'num_leaves': 25}\n",
      "0.959922  with:   {'learning_rate': 0.04, 'max_depth': 5, 'num_leaves': 30}\n",
      "0.959954  with:   {'learning_rate': 0.04, 'max_depth': 5, 'num_leaves': 35}\n",
      "0.959954  with:   {'learning_rate': 0.04, 'max_depth': 5, 'num_leaves': 40}\n",
      "0.959954  with:   {'learning_rate': 0.04, 'max_depth': 5, 'num_leaves': 45}\n",
      "0.959954  with:   {'learning_rate': 0.04, 'max_depth': 5, 'num_leaves': 50}\n",
      "0.959954  with:   {'learning_rate': 0.04, 'max_depth': 5, 'num_leaves': 55}\n",
      "0.959954  with:   {'learning_rate': 0.04, 'max_depth': 5, 'num_leaves': 60}\n",
      "0.959954  with:   {'learning_rate': 0.04, 'max_depth': 5, 'num_leaves': 65}\n",
      "0.959954  with:   {'learning_rate': 0.04, 'max_depth': 5, 'num_leaves': 70}\n",
      "0.959954  with:   {'learning_rate': 0.04, 'max_depth': 5, 'num_leaves': 75}\n",
      "0.959436  with:   {'learning_rate': 0.04, 'max_depth': 6, 'num_leaves': 25}\n",
      "0.959625  with:   {'learning_rate': 0.04, 'max_depth': 6, 'num_leaves': 30}\n",
      "0.959771  with:   {'learning_rate': 0.04, 'max_depth': 6, 'num_leaves': 35}\n",
      "0.959824  with:   {'learning_rate': 0.04, 'max_depth': 6, 'num_leaves': 40}\n",
      "0.959854  with:   {'learning_rate': 0.04, 'max_depth': 6, 'num_leaves': 45}\n",
      "0.959832  with:   {'learning_rate': 0.04, 'max_depth': 6, 'num_leaves': 50}\n",
      "0.959799  with:   {'learning_rate': 0.04, 'max_depth': 6, 'num_leaves': 55}\n",
      "0.959840  with:   {'learning_rate': 0.04, 'max_depth': 6, 'num_leaves': 60}\n",
      "0.959830  with:   {'learning_rate': 0.04, 'max_depth': 6, 'num_leaves': 65}\n",
      "0.959830  with:   {'learning_rate': 0.04, 'max_depth': 6, 'num_leaves': 70}\n",
      "0.959830  with:   {'learning_rate': 0.04, 'max_depth': 6, 'num_leaves': 75}\n",
      "0.959467  with:   {'learning_rate': 0.04, 'max_depth': 7, 'num_leaves': 25}\n",
      "0.959545  with:   {'learning_rate': 0.04, 'max_depth': 7, 'num_leaves': 30}\n",
      "0.959664  with:   {'learning_rate': 0.04, 'max_depth': 7, 'num_leaves': 35}\n",
      "0.959642  with:   {'learning_rate': 0.04, 'max_depth': 7, 'num_leaves': 40}\n",
      "0.959754  with:   {'learning_rate': 0.04, 'max_depth': 7, 'num_leaves': 45}\n",
      "0.959817  with:   {'learning_rate': 0.04, 'max_depth': 7, 'num_leaves': 50}\n",
      "0.959787  with:   {'learning_rate': 0.04, 'max_depth': 7, 'num_leaves': 55}\n",
      "0.959821  with:   {'learning_rate': 0.04, 'max_depth': 7, 'num_leaves': 60}\n",
      "0.959746  with:   {'learning_rate': 0.04, 'max_depth': 7, 'num_leaves': 65}\n",
      "0.959779  with:   {'learning_rate': 0.04, 'max_depth': 7, 'num_leaves': 70}\n",
      "0.959743  with:   {'learning_rate': 0.04, 'max_depth': 7, 'num_leaves': 75}\n",
      "0.959403  with:   {'learning_rate': 0.04, 'max_depth': 8, 'num_leaves': 25}\n",
      "0.959573  with:   {'learning_rate': 0.04, 'max_depth': 8, 'num_leaves': 30}\n",
      "0.959621  with:   {'learning_rate': 0.04, 'max_depth': 8, 'num_leaves': 35}\n",
      "0.959660  with:   {'learning_rate': 0.04, 'max_depth': 8, 'num_leaves': 40}\n",
      "0.959711  with:   {'learning_rate': 0.04, 'max_depth': 8, 'num_leaves': 45}\n",
      "0.959746  with:   {'learning_rate': 0.04, 'max_depth': 8, 'num_leaves': 50}\n",
      "0.959750  with:   {'learning_rate': 0.04, 'max_depth': 8, 'num_leaves': 55}\n",
      "0.959759  with:   {'learning_rate': 0.04, 'max_depth': 8, 'num_leaves': 60}\n",
      "0.959759  with:   {'learning_rate': 0.04, 'max_depth': 8, 'num_leaves': 65}\n",
      "0.959774  with:   {'learning_rate': 0.04, 'max_depth': 8, 'num_leaves': 70}\n",
      "0.959797  with:   {'learning_rate': 0.04, 'max_depth': 8, 'num_leaves': 75}\n",
      "0.959353  with:   {'learning_rate': 0.04, 'max_depth': 9, 'num_leaves': 25}\n",
      "0.959501  with:   {'learning_rate': 0.04, 'max_depth': 9, 'num_leaves': 30}\n",
      "0.959558  with:   {'learning_rate': 0.04, 'max_depth': 9, 'num_leaves': 35}\n",
      "0.959663  with:   {'learning_rate': 0.04, 'max_depth': 9, 'num_leaves': 40}\n",
      "0.959686  with:   {'learning_rate': 0.04, 'max_depth': 9, 'num_leaves': 45}\n",
      "0.959707  with:   {'learning_rate': 0.04, 'max_depth': 9, 'num_leaves': 50}\n",
      "0.959772  with:   {'learning_rate': 0.04, 'max_depth': 9, 'num_leaves': 55}\n",
      "0.959768  with:   {'learning_rate': 0.04, 'max_depth': 9, 'num_leaves': 60}\n",
      "0.959757  with:   {'learning_rate': 0.04, 'max_depth': 9, 'num_leaves': 65}\n",
      "0.959758  with:   {'learning_rate': 0.04, 'max_depth': 9, 'num_leaves': 70}\n",
      "0.959711  with:   {'learning_rate': 0.04, 'max_depth': 9, 'num_leaves': 75}\n",
      "0.959394  with:   {'learning_rate': 0.04, 'max_depth': 10, 'num_leaves': 25}\n",
      "0.959519  with:   {'learning_rate': 0.04, 'max_depth': 10, 'num_leaves': 30}\n",
      "0.959566  with:   {'learning_rate': 0.04, 'max_depth': 10, 'num_leaves': 35}\n",
      "0.959682  with:   {'learning_rate': 0.04, 'max_depth': 10, 'num_leaves': 40}\n",
      "0.959707  with:   {'learning_rate': 0.04, 'max_depth': 10, 'num_leaves': 45}\n",
      "0.959733  with:   {'learning_rate': 0.04, 'max_depth': 10, 'num_leaves': 50}\n",
      "0.959673  with:   {'learning_rate': 0.04, 'max_depth': 10, 'num_leaves': 55}\n",
      "0.959679  with:   {'learning_rate': 0.04, 'max_depth': 10, 'num_leaves': 60}\n",
      "0.959693  with:   {'learning_rate': 0.04, 'max_depth': 10, 'num_leaves': 65}\n",
      "0.959705  with:   {'learning_rate': 0.04, 'max_depth': 10, 'num_leaves': 70}\n",
      "0.959690  with:   {'learning_rate': 0.04, 'max_depth': 10, 'num_leaves': 75}\n",
      "0.952467  with:   {'learning_rate': 0.06, 'max_depth': 1, 'num_leaves': 25}\n",
      "0.952467  with:   {'learning_rate': 0.06, 'max_depth': 1, 'num_leaves': 30}\n",
      "0.952467  with:   {'learning_rate': 0.06, 'max_depth': 1, 'num_leaves': 35}\n",
      "0.952467  with:   {'learning_rate': 0.06, 'max_depth': 1, 'num_leaves': 40}\n",
      "0.952467  with:   {'learning_rate': 0.06, 'max_depth': 1, 'num_leaves': 45}\n",
      "0.952467  with:   {'learning_rate': 0.06, 'max_depth': 1, 'num_leaves': 50}\n",
      "0.952467  with:   {'learning_rate': 0.06, 'max_depth': 1, 'num_leaves': 55}\n",
      "0.952467  with:   {'learning_rate': 0.06, 'max_depth': 1, 'num_leaves': 60}\n",
      "0.952467  with:   {'learning_rate': 0.06, 'max_depth': 1, 'num_leaves': 65}\n",
      "0.952467  with:   {'learning_rate': 0.06, 'max_depth': 1, 'num_leaves': 70}\n",
      "0.952467  with:   {'learning_rate': 0.06, 'max_depth': 1, 'num_leaves': 75}\n",
      "0.957156  with:   {'learning_rate': 0.06, 'max_depth': 2, 'num_leaves': 25}\n",
      "0.957156  with:   {'learning_rate': 0.06, 'max_depth': 2, 'num_leaves': 30}\n",
      "0.957156  with:   {'learning_rate': 0.06, 'max_depth': 2, 'num_leaves': 35}\n",
      "0.957156  with:   {'learning_rate': 0.06, 'max_depth': 2, 'num_leaves': 40}\n",
      "0.957156  with:   {'learning_rate': 0.06, 'max_depth': 2, 'num_leaves': 45}\n",
      "0.957156  with:   {'learning_rate': 0.06, 'max_depth': 2, 'num_leaves': 50}\n",
      "0.957156  with:   {'learning_rate': 0.06, 'max_depth': 2, 'num_leaves': 55}\n",
      "0.957156  with:   {'learning_rate': 0.06, 'max_depth': 2, 'num_leaves': 60}\n",
      "0.957156  with:   {'learning_rate': 0.06, 'max_depth': 2, 'num_leaves': 65}\n",
      "0.957156  with:   {'learning_rate': 0.06, 'max_depth': 2, 'num_leaves': 70}\n",
      "0.957156  with:   {'learning_rate': 0.06, 'max_depth': 2, 'num_leaves': 75}\n",
      "0.959483  with:   {'learning_rate': 0.06, 'max_depth': 3, 'num_leaves': 25}\n",
      "0.959483  with:   {'learning_rate': 0.06, 'max_depth': 3, 'num_leaves': 30}\n",
      "0.959483  with:   {'learning_rate': 0.06, 'max_depth': 3, 'num_leaves': 35}\n",
      "0.959483  with:   {'learning_rate': 0.06, 'max_depth': 3, 'num_leaves': 40}\n",
      "0.959483  with:   {'learning_rate': 0.06, 'max_depth': 3, 'num_leaves': 45}\n",
      "0.959483  with:   {'learning_rate': 0.06, 'max_depth': 3, 'num_leaves': 50}\n",
      "0.959483  with:   {'learning_rate': 0.06, 'max_depth': 3, 'num_leaves': 55}\n",
      "0.959483  with:   {'learning_rate': 0.06, 'max_depth': 3, 'num_leaves': 60}\n",
      "0.959483  with:   {'learning_rate': 0.06, 'max_depth': 3, 'num_leaves': 65}\n",
      "0.959483  with:   {'learning_rate': 0.06, 'max_depth': 3, 'num_leaves': 70}\n",
      "0.959483  with:   {'learning_rate': 0.06, 'max_depth': 3, 'num_leaves': 75}\n",
      "0.960240  with:   {'learning_rate': 0.06, 'max_depth': 4, 'num_leaves': 25}\n",
      "0.960240  with:   {'learning_rate': 0.06, 'max_depth': 4, 'num_leaves': 30}\n",
      "0.960240  with:   {'learning_rate': 0.06, 'max_depth': 4, 'num_leaves': 35}\n",
      "0.960240  with:   {'learning_rate': 0.06, 'max_depth': 4, 'num_leaves': 40}\n",
      "0.960240  with:   {'learning_rate': 0.06, 'max_depth': 4, 'num_leaves': 45}\n",
      "0.960240  with:   {'learning_rate': 0.06, 'max_depth': 4, 'num_leaves': 50}\n",
      "0.960240  with:   {'learning_rate': 0.06, 'max_depth': 4, 'num_leaves': 55}\n",
      "0.960240  with:   {'learning_rate': 0.06, 'max_depth': 4, 'num_leaves': 60}\n",
      "0.960240  with:   {'learning_rate': 0.06, 'max_depth': 4, 'num_leaves': 65}\n",
      "0.960240  with:   {'learning_rate': 0.06, 'max_depth': 4, 'num_leaves': 70}\n",
      "0.960240  with:   {'learning_rate': 0.06, 'max_depth': 4, 'num_leaves': 75}\n",
      "0.960338  with:   {'learning_rate': 0.06, 'max_depth': 5, 'num_leaves': 25}\n",
      "0.960329  with:   {'learning_rate': 0.06, 'max_depth': 5, 'num_leaves': 30}\n",
      "0.960275  with:   {'learning_rate': 0.06, 'max_depth': 5, 'num_leaves': 35}\n",
      "0.960275  with:   {'learning_rate': 0.06, 'max_depth': 5, 'num_leaves': 40}\n",
      "0.960275  with:   {'learning_rate': 0.06, 'max_depth': 5, 'num_leaves': 45}\n",
      "0.960275  with:   {'learning_rate': 0.06, 'max_depth': 5, 'num_leaves': 50}\n",
      "0.960275  with:   {'learning_rate': 0.06, 'max_depth': 5, 'num_leaves': 55}\n",
      "0.960275  with:   {'learning_rate': 0.06, 'max_depth': 5, 'num_leaves': 60}\n",
      "0.960275  with:   {'learning_rate': 0.06, 'max_depth': 5, 'num_leaves': 65}\n",
      "0.960275  with:   {'learning_rate': 0.06, 'max_depth': 5, 'num_leaves': 70}\n",
      "0.960275  with:   {'learning_rate': 0.06, 'max_depth': 5, 'num_leaves': 75}\n",
      "0.960369  with:   {'learning_rate': 0.06, 'max_depth': 6, 'num_leaves': 25}\n",
      "0.960347  with:   {'learning_rate': 0.06, 'max_depth': 6, 'num_leaves': 30}\n",
      "0.960415  with:   {'learning_rate': 0.06, 'max_depth': 6, 'num_leaves': 35}\n",
      "0.960379  with:   {'learning_rate': 0.06, 'max_depth': 6, 'num_leaves': 40}\n",
      "0.960252  with:   {'learning_rate': 0.06, 'max_depth': 6, 'num_leaves': 45}\n",
      "0.960205  with:   {'learning_rate': 0.06, 'max_depth': 6, 'num_leaves': 50}\n",
      "0.960044  with:   {'learning_rate': 0.06, 'max_depth': 6, 'num_leaves': 55}\n",
      "0.960054  with:   {'learning_rate': 0.06, 'max_depth': 6, 'num_leaves': 60}\n",
      "0.960052  with:   {'learning_rate': 0.06, 'max_depth': 6, 'num_leaves': 65}\n",
      "0.960052  with:   {'learning_rate': 0.06, 'max_depth': 6, 'num_leaves': 70}\n",
      "0.960052  with:   {'learning_rate': 0.06, 'max_depth': 6, 'num_leaves': 75}\n",
      "0.960286  with:   {'learning_rate': 0.06, 'max_depth': 7, 'num_leaves': 25}\n",
      "0.960330  with:   {'learning_rate': 0.06, 'max_depth': 7, 'num_leaves': 30}\n",
      "0.960403  with:   {'learning_rate': 0.06, 'max_depth': 7, 'num_leaves': 35}\n",
      "0.960315  with:   {'learning_rate': 0.06, 'max_depth': 7, 'num_leaves': 40}\n",
      "0.960327  with:   {'learning_rate': 0.06, 'max_depth': 7, 'num_leaves': 45}\n",
      "0.960214  with:   {'learning_rate': 0.06, 'max_depth': 7, 'num_leaves': 50}\n",
      "0.960298  with:   {'learning_rate': 0.06, 'max_depth': 7, 'num_leaves': 55}\n",
      "0.960170  with:   {'learning_rate': 0.06, 'max_depth': 7, 'num_leaves': 60}\n",
      "0.960012  with:   {'learning_rate': 0.06, 'max_depth': 7, 'num_leaves': 65}\n",
      "0.960141  with:   {'learning_rate': 0.06, 'max_depth': 7, 'num_leaves': 70}\n",
      "0.960068  with:   {'learning_rate': 0.06, 'max_depth': 7, 'num_leaves': 75}\n",
      "0.960278  with:   {'learning_rate': 0.06, 'max_depth': 8, 'num_leaves': 25}\n",
      "0.960236  with:   {'learning_rate': 0.06, 'max_depth': 8, 'num_leaves': 30}\n",
      "0.960322  with:   {'learning_rate': 0.06, 'max_depth': 8, 'num_leaves': 35}\n",
      "0.960335  with:   {'learning_rate': 0.06, 'max_depth': 8, 'num_leaves': 40}\n",
      "0.960377  with:   {'learning_rate': 0.06, 'max_depth': 8, 'num_leaves': 45}\n",
      "0.960310  with:   {'learning_rate': 0.06, 'max_depth': 8, 'num_leaves': 50}\n",
      "0.960217  with:   {'learning_rate': 0.06, 'max_depth': 8, 'num_leaves': 55}\n",
      "0.960165  with:   {'learning_rate': 0.06, 'max_depth': 8, 'num_leaves': 60}\n",
      "0.960163  with:   {'learning_rate': 0.06, 'max_depth': 8, 'num_leaves': 65}\n",
      "0.960134  with:   {'learning_rate': 0.06, 'max_depth': 8, 'num_leaves': 70}\n",
      "0.960044  with:   {'learning_rate': 0.06, 'max_depth': 8, 'num_leaves': 75}\n",
      "0.960290  with:   {'learning_rate': 0.06, 'max_depth': 9, 'num_leaves': 25}\n",
      "0.960203  with:   {'learning_rate': 0.06, 'max_depth': 9, 'num_leaves': 30}\n",
      "0.960362  with:   {'learning_rate': 0.06, 'max_depth': 9, 'num_leaves': 35}\n",
      "0.960313  with:   {'learning_rate': 0.06, 'max_depth': 9, 'num_leaves': 40}\n",
      "0.960266  with:   {'learning_rate': 0.06, 'max_depth': 9, 'num_leaves': 45}\n",
      "0.960331  with:   {'learning_rate': 0.06, 'max_depth': 9, 'num_leaves': 50}\n",
      "0.960282  with:   {'learning_rate': 0.06, 'max_depth': 9, 'num_leaves': 55}\n",
      "0.960301  with:   {'learning_rate': 0.06, 'max_depth': 9, 'num_leaves': 60}\n",
      "0.960215  with:   {'learning_rate': 0.06, 'max_depth': 9, 'num_leaves': 65}\n",
      "0.960143  with:   {'learning_rate': 0.06, 'max_depth': 9, 'num_leaves': 70}\n",
      "0.960116  with:   {'learning_rate': 0.06, 'max_depth': 9, 'num_leaves': 75}\n",
      "0.960246  with:   {'learning_rate': 0.06, 'max_depth': 10, 'num_leaves': 25}\n",
      "0.960207  with:   {'learning_rate': 0.06, 'max_depth': 10, 'num_leaves': 30}\n",
      "0.960289  with:   {'learning_rate': 0.06, 'max_depth': 10, 'num_leaves': 35}\n",
      "0.960322  with:   {'learning_rate': 0.06, 'max_depth': 10, 'num_leaves': 40}\n",
      "0.960265  with:   {'learning_rate': 0.06, 'max_depth': 10, 'num_leaves': 45}\n",
      "0.960296  with:   {'learning_rate': 0.06, 'max_depth': 10, 'num_leaves': 50}\n",
      "0.960269  with:   {'learning_rate': 0.06, 'max_depth': 10, 'num_leaves': 55}\n",
      "0.960226  with:   {'learning_rate': 0.06, 'max_depth': 10, 'num_leaves': 60}\n",
      "0.960214  with:   {'learning_rate': 0.06, 'max_depth': 10, 'num_leaves': 65}\n",
      "0.960119  with:   {'learning_rate': 0.06, 'max_depth': 10, 'num_leaves': 70}\n",
      "0.960059  with:   {'learning_rate': 0.06, 'max_depth': 10, 'num_leaves': 75}\n",
      "0.953836  with:   {'learning_rate': 0.08, 'max_depth': 1, 'num_leaves': 25}\n",
      "0.953836  with:   {'learning_rate': 0.08, 'max_depth': 1, 'num_leaves': 30}\n",
      "0.953836  with:   {'learning_rate': 0.08, 'max_depth': 1, 'num_leaves': 35}\n",
      "0.953836  with:   {'learning_rate': 0.08, 'max_depth': 1, 'num_leaves': 40}\n",
      "0.953836  with:   {'learning_rate': 0.08, 'max_depth': 1, 'num_leaves': 45}\n",
      "0.953836  with:   {'learning_rate': 0.08, 'max_depth': 1, 'num_leaves': 50}\n",
      "0.953836  with:   {'learning_rate': 0.08, 'max_depth': 1, 'num_leaves': 55}\n",
      "0.953836  with:   {'learning_rate': 0.08, 'max_depth': 1, 'num_leaves': 60}\n",
      "0.953836  with:   {'learning_rate': 0.08, 'max_depth': 1, 'num_leaves': 65}\n",
      "0.953836  with:   {'learning_rate': 0.08, 'max_depth': 1, 'num_leaves': 70}\n",
      "0.953836  with:   {'learning_rate': 0.08, 'max_depth': 1, 'num_leaves': 75}\n",
      "0.958394  with:   {'learning_rate': 0.08, 'max_depth': 2, 'num_leaves': 25}\n",
      "0.958394  with:   {'learning_rate': 0.08, 'max_depth': 2, 'num_leaves': 30}\n",
      "0.958394  with:   {'learning_rate': 0.08, 'max_depth': 2, 'num_leaves': 35}\n",
      "0.958394  with:   {'learning_rate': 0.08, 'max_depth': 2, 'num_leaves': 40}\n",
      "0.958394  with:   {'learning_rate': 0.08, 'max_depth': 2, 'num_leaves': 45}\n",
      "0.958394  with:   {'learning_rate': 0.08, 'max_depth': 2, 'num_leaves': 50}\n",
      "0.958394  with:   {'learning_rate': 0.08, 'max_depth': 2, 'num_leaves': 55}\n",
      "0.958394  with:   {'learning_rate': 0.08, 'max_depth': 2, 'num_leaves': 60}\n",
      "0.958394  with:   {'learning_rate': 0.08, 'max_depth': 2, 'num_leaves': 65}\n",
      "0.958394  with:   {'learning_rate': 0.08, 'max_depth': 2, 'num_leaves': 70}\n",
      "0.958394  with:   {'learning_rate': 0.08, 'max_depth': 2, 'num_leaves': 75}\n",
      "0.960069  with:   {'learning_rate': 0.08, 'max_depth': 3, 'num_leaves': 25}\n",
      "0.960069  with:   {'learning_rate': 0.08, 'max_depth': 3, 'num_leaves': 30}\n",
      "0.960069  with:   {'learning_rate': 0.08, 'max_depth': 3, 'num_leaves': 35}\n",
      "0.960069  with:   {'learning_rate': 0.08, 'max_depth': 3, 'num_leaves': 40}\n",
      "0.960069  with:   {'learning_rate': 0.08, 'max_depth': 3, 'num_leaves': 45}\n",
      "0.960069  with:   {'learning_rate': 0.08, 'max_depth': 3, 'num_leaves': 50}\n",
      "0.960069  with:   {'learning_rate': 0.08, 'max_depth': 3, 'num_leaves': 55}\n",
      "0.960069  with:   {'learning_rate': 0.08, 'max_depth': 3, 'num_leaves': 60}\n",
      "0.960069  with:   {'learning_rate': 0.08, 'max_depth': 3, 'num_leaves': 65}\n",
      "0.960069  with:   {'learning_rate': 0.08, 'max_depth': 3, 'num_leaves': 70}\n",
      "0.960069  with:   {'learning_rate': 0.08, 'max_depth': 3, 'num_leaves': 75}\n",
      "0.960509  with:   {'learning_rate': 0.08, 'max_depth': 4, 'num_leaves': 25}\n",
      "0.960509  with:   {'learning_rate': 0.08, 'max_depth': 4, 'num_leaves': 30}\n",
      "0.960509  with:   {'learning_rate': 0.08, 'max_depth': 4, 'num_leaves': 35}\n",
      "0.960509  with:   {'learning_rate': 0.08, 'max_depth': 4, 'num_leaves': 40}\n",
      "0.960509  with:   {'learning_rate': 0.08, 'max_depth': 4, 'num_leaves': 45}\n",
      "0.960509  with:   {'learning_rate': 0.08, 'max_depth': 4, 'num_leaves': 50}\n",
      "0.960509  with:   {'learning_rate': 0.08, 'max_depth': 4, 'num_leaves': 55}\n",
      "0.960509  with:   {'learning_rate': 0.08, 'max_depth': 4, 'num_leaves': 60}\n",
      "0.960509  with:   {'learning_rate': 0.08, 'max_depth': 4, 'num_leaves': 65}\n",
      "0.960509  with:   {'learning_rate': 0.08, 'max_depth': 4, 'num_leaves': 70}\n",
      "0.960509  with:   {'learning_rate': 0.08, 'max_depth': 4, 'num_leaves': 75}\n",
      "0.960686  with:   {'learning_rate': 0.08, 'max_depth': 5, 'num_leaves': 25}\n",
      "0.960580  with:   {'learning_rate': 0.08, 'max_depth': 5, 'num_leaves': 30}\n",
      "0.960494  with:   {'learning_rate': 0.08, 'max_depth': 5, 'num_leaves': 35}\n",
      "0.960494  with:   {'learning_rate': 0.08, 'max_depth': 5, 'num_leaves': 40}\n",
      "0.960494  with:   {'learning_rate': 0.08, 'max_depth': 5, 'num_leaves': 45}\n",
      "0.960494  with:   {'learning_rate': 0.08, 'max_depth': 5, 'num_leaves': 50}\n",
      "0.960494  with:   {'learning_rate': 0.08, 'max_depth': 5, 'num_leaves': 55}\n",
      "0.960494  with:   {'learning_rate': 0.08, 'max_depth': 5, 'num_leaves': 60}\n",
      "0.960494  with:   {'learning_rate': 0.08, 'max_depth': 5, 'num_leaves': 65}\n",
      "0.960494  with:   {'learning_rate': 0.08, 'max_depth': 5, 'num_leaves': 70}\n",
      "0.960494  with:   {'learning_rate': 0.08, 'max_depth': 5, 'num_leaves': 75}\n",
      "0.960640  with:   {'learning_rate': 0.08, 'max_depth': 6, 'num_leaves': 25}\n",
      "0.960746  with:   {'learning_rate': 0.08, 'max_depth': 6, 'num_leaves': 30}\n",
      "0.960554  with:   {'learning_rate': 0.08, 'max_depth': 6, 'num_leaves': 35}\n",
      "0.960525  with:   {'learning_rate': 0.08, 'max_depth': 6, 'num_leaves': 40}\n",
      "0.960298  with:   {'learning_rate': 0.08, 'max_depth': 6, 'num_leaves': 45}\n",
      "0.960199  with:   {'learning_rate': 0.08, 'max_depth': 6, 'num_leaves': 50}\n",
      "0.960145  with:   {'learning_rate': 0.08, 'max_depth': 6, 'num_leaves': 55}\n",
      "0.960260  with:   {'learning_rate': 0.08, 'max_depth': 6, 'num_leaves': 60}\n",
      "0.960223  with:   {'learning_rate': 0.08, 'max_depth': 6, 'num_leaves': 65}\n",
      "0.960223  with:   {'learning_rate': 0.08, 'max_depth': 6, 'num_leaves': 70}\n",
      "0.960223  with:   {'learning_rate': 0.08, 'max_depth': 6, 'num_leaves': 75}\n",
      "0.960655  with:   {'learning_rate': 0.08, 'max_depth': 7, 'num_leaves': 25}\n",
      "0.960706  with:   {'learning_rate': 0.08, 'max_depth': 7, 'num_leaves': 30}\n",
      "0.960584  with:   {'learning_rate': 0.08, 'max_depth': 7, 'num_leaves': 35}\n",
      "0.960643  with:   {'learning_rate': 0.08, 'max_depth': 7, 'num_leaves': 40}\n",
      "0.960525  with:   {'learning_rate': 0.08, 'max_depth': 7, 'num_leaves': 45}\n",
      "0.960488  with:   {'learning_rate': 0.08, 'max_depth': 7, 'num_leaves': 50}\n",
      "0.960291  with:   {'learning_rate': 0.08, 'max_depth': 7, 'num_leaves': 55}\n",
      "0.960231  with:   {'learning_rate': 0.08, 'max_depth': 7, 'num_leaves': 60}\n",
      "0.960182  with:   {'learning_rate': 0.08, 'max_depth': 7, 'num_leaves': 65}\n",
      "0.960019  with:   {'learning_rate': 0.08, 'max_depth': 7, 'num_leaves': 70}\n",
      "0.959981  with:   {'learning_rate': 0.08, 'max_depth': 7, 'num_leaves': 75}\n",
      "0.960654  with:   {'learning_rate': 0.08, 'max_depth': 8, 'num_leaves': 25}\n",
      "0.960661  with:   {'learning_rate': 0.08, 'max_depth': 8, 'num_leaves': 30}\n",
      "0.960630  with:   {'learning_rate': 0.08, 'max_depth': 8, 'num_leaves': 35}\n",
      "0.960626  with:   {'learning_rate': 0.08, 'max_depth': 8, 'num_leaves': 40}\n",
      "0.960473  with:   {'learning_rate': 0.08, 'max_depth': 8, 'num_leaves': 45}\n",
      "0.960474  with:   {'learning_rate': 0.08, 'max_depth': 8, 'num_leaves': 50}\n",
      "0.960352  with:   {'learning_rate': 0.08, 'max_depth': 8, 'num_leaves': 55}\n",
      "0.960285  with:   {'learning_rate': 0.08, 'max_depth': 8, 'num_leaves': 60}\n",
      "0.960184  with:   {'learning_rate': 0.08, 'max_depth': 8, 'num_leaves': 65}\n",
      "0.960101  with:   {'learning_rate': 0.08, 'max_depth': 8, 'num_leaves': 70}\n",
      "0.959993  with:   {'learning_rate': 0.08, 'max_depth': 8, 'num_leaves': 75}\n",
      "0.960680  with:   {'learning_rate': 0.08, 'max_depth': 9, 'num_leaves': 25}\n",
      "0.960652  with:   {'learning_rate': 0.08, 'max_depth': 9, 'num_leaves': 30}\n",
      "0.960596  with:   {'learning_rate': 0.08, 'max_depth': 9, 'num_leaves': 35}\n",
      "0.960557  with:   {'learning_rate': 0.08, 'max_depth': 9, 'num_leaves': 40}\n",
      "0.960464  with:   {'learning_rate': 0.08, 'max_depth': 9, 'num_leaves': 45}\n",
      "0.960560  with:   {'learning_rate': 0.08, 'max_depth': 9, 'num_leaves': 50}\n",
      "0.960427  with:   {'learning_rate': 0.08, 'max_depth': 9, 'num_leaves': 55}\n",
      "0.960310  with:   {'learning_rate': 0.08, 'max_depth': 9, 'num_leaves': 60}\n",
      "0.960284  with:   {'learning_rate': 0.08, 'max_depth': 9, 'num_leaves': 65}\n",
      "0.960090  with:   {'learning_rate': 0.08, 'max_depth': 9, 'num_leaves': 70}\n",
      "0.960333  with:   {'learning_rate': 0.08, 'max_depth': 9, 'num_leaves': 75}\n",
      "0.960629  with:   {'learning_rate': 0.08, 'max_depth': 10, 'num_leaves': 25}\n",
      "0.960616  with:   {'learning_rate': 0.08, 'max_depth': 10, 'num_leaves': 30}\n",
      "0.960636  with:   {'learning_rate': 0.08, 'max_depth': 10, 'num_leaves': 35}\n",
      "0.960503  with:   {'learning_rate': 0.08, 'max_depth': 10, 'num_leaves': 40}\n",
      "0.960529  with:   {'learning_rate': 0.08, 'max_depth': 10, 'num_leaves': 45}\n",
      "0.960535  with:   {'learning_rate': 0.08, 'max_depth': 10, 'num_leaves': 50}\n",
      "0.960395  with:   {'learning_rate': 0.08, 'max_depth': 10, 'num_leaves': 55}\n",
      "0.960299  with:   {'learning_rate': 0.08, 'max_depth': 10, 'num_leaves': 60}\n",
      "0.960213  with:   {'learning_rate': 0.08, 'max_depth': 10, 'num_leaves': 65}\n",
      "0.960202  with:   {'learning_rate': 0.08, 'max_depth': 10, 'num_leaves': 70}\n",
      "0.960053  with:   {'learning_rate': 0.08, 'max_depth': 10, 'num_leaves': 75}\n",
      "0.954994  with:   {'learning_rate': 0.1, 'max_depth': 1, 'num_leaves': 25}\n",
      "0.954994  with:   {'learning_rate': 0.1, 'max_depth': 1, 'num_leaves': 30}\n",
      "0.954994  with:   {'learning_rate': 0.1, 'max_depth': 1, 'num_leaves': 35}\n",
      "0.954994  with:   {'learning_rate': 0.1, 'max_depth': 1, 'num_leaves': 40}\n",
      "0.954994  with:   {'learning_rate': 0.1, 'max_depth': 1, 'num_leaves': 45}\n",
      "0.954994  with:   {'learning_rate': 0.1, 'max_depth': 1, 'num_leaves': 50}\n",
      "0.954994  with:   {'learning_rate': 0.1, 'max_depth': 1, 'num_leaves': 55}\n",
      "0.954994  with:   {'learning_rate': 0.1, 'max_depth': 1, 'num_leaves': 60}\n",
      "0.954994  with:   {'learning_rate': 0.1, 'max_depth': 1, 'num_leaves': 65}\n",
      "0.954994  with:   {'learning_rate': 0.1, 'max_depth': 1, 'num_leaves': 70}\n",
      "0.954994  with:   {'learning_rate': 0.1, 'max_depth': 1, 'num_leaves': 75}\n",
      "0.959227  with:   {'learning_rate': 0.1, 'max_depth': 2, 'num_leaves': 25}\n",
      "0.959227  with:   {'learning_rate': 0.1, 'max_depth': 2, 'num_leaves': 30}\n",
      "0.959227  with:   {'learning_rate': 0.1, 'max_depth': 2, 'num_leaves': 35}\n",
      "0.959227  with:   {'learning_rate': 0.1, 'max_depth': 2, 'num_leaves': 40}\n",
      "0.959227  with:   {'learning_rate': 0.1, 'max_depth': 2, 'num_leaves': 45}\n",
      "0.959227  with:   {'learning_rate': 0.1, 'max_depth': 2, 'num_leaves': 50}\n",
      "0.959227  with:   {'learning_rate': 0.1, 'max_depth': 2, 'num_leaves': 55}\n",
      "0.959227  with:   {'learning_rate': 0.1, 'max_depth': 2, 'num_leaves': 60}\n",
      "0.959227  with:   {'learning_rate': 0.1, 'max_depth': 2, 'num_leaves': 65}\n",
      "0.959227  with:   {'learning_rate': 0.1, 'max_depth': 2, 'num_leaves': 70}\n",
      "0.959227  with:   {'learning_rate': 0.1, 'max_depth': 2, 'num_leaves': 75}\n",
      "0.960359  with:   {'learning_rate': 0.1, 'max_depth': 3, 'num_leaves': 25}\n",
      "0.960359  with:   {'learning_rate': 0.1, 'max_depth': 3, 'num_leaves': 30}\n",
      "0.960359  with:   {'learning_rate': 0.1, 'max_depth': 3, 'num_leaves': 35}\n",
      "0.960359  with:   {'learning_rate': 0.1, 'max_depth': 3, 'num_leaves': 40}\n",
      "0.960359  with:   {'learning_rate': 0.1, 'max_depth': 3, 'num_leaves': 45}\n",
      "0.960359  with:   {'learning_rate': 0.1, 'max_depth': 3, 'num_leaves': 50}\n",
      "0.960359  with:   {'learning_rate': 0.1, 'max_depth': 3, 'num_leaves': 55}\n",
      "0.960359  with:   {'learning_rate': 0.1, 'max_depth': 3, 'num_leaves': 60}\n",
      "0.960359  with:   {'learning_rate': 0.1, 'max_depth': 3, 'num_leaves': 65}\n",
      "0.960359  with:   {'learning_rate': 0.1, 'max_depth': 3, 'num_leaves': 70}\n",
      "0.960359  with:   {'learning_rate': 0.1, 'max_depth': 3, 'num_leaves': 75}\n",
      "0.960707  with:   {'learning_rate': 0.1, 'max_depth': 4, 'num_leaves': 25}\n",
      "0.960707  with:   {'learning_rate': 0.1, 'max_depth': 4, 'num_leaves': 30}\n",
      "0.960707  with:   {'learning_rate': 0.1, 'max_depth': 4, 'num_leaves': 35}\n",
      "0.960707  with:   {'learning_rate': 0.1, 'max_depth': 4, 'num_leaves': 40}\n",
      "0.960707  with:   {'learning_rate': 0.1, 'max_depth': 4, 'num_leaves': 45}\n",
      "0.960707  with:   {'learning_rate': 0.1, 'max_depth': 4, 'num_leaves': 50}\n",
      "0.960707  with:   {'learning_rate': 0.1, 'max_depth': 4, 'num_leaves': 55}\n",
      "0.960707  with:   {'learning_rate': 0.1, 'max_depth': 4, 'num_leaves': 60}\n",
      "0.960707  with:   {'learning_rate': 0.1, 'max_depth': 4, 'num_leaves': 65}\n",
      "0.960707  with:   {'learning_rate': 0.1, 'max_depth': 4, 'num_leaves': 70}\n",
      "0.960707  with:   {'learning_rate': 0.1, 'max_depth': 4, 'num_leaves': 75}\n",
      "0.960745  with:   {'learning_rate': 0.1, 'max_depth': 5, 'num_leaves': 25}\n",
      "0.960666  with:   {'learning_rate': 0.1, 'max_depth': 5, 'num_leaves': 30}\n",
      "0.960538  with:   {'learning_rate': 0.1, 'max_depth': 5, 'num_leaves': 35}\n",
      "0.960538  with:   {'learning_rate': 0.1, 'max_depth': 5, 'num_leaves': 40}\n",
      "0.960538  with:   {'learning_rate': 0.1, 'max_depth': 5, 'num_leaves': 45}\n",
      "0.960538  with:   {'learning_rate': 0.1, 'max_depth': 5, 'num_leaves': 50}\n",
      "0.960538  with:   {'learning_rate': 0.1, 'max_depth': 5, 'num_leaves': 55}\n",
      "0.960538  with:   {'learning_rate': 0.1, 'max_depth': 5, 'num_leaves': 60}\n",
      "0.960538  with:   {'learning_rate': 0.1, 'max_depth': 5, 'num_leaves': 65}\n",
      "0.960538  with:   {'learning_rate': 0.1, 'max_depth': 5, 'num_leaves': 70}\n",
      "0.960538  with:   {'learning_rate': 0.1, 'max_depth': 5, 'num_leaves': 75}\n",
      "0.960797  with:   {'learning_rate': 0.1, 'max_depth': 6, 'num_leaves': 25}\n",
      "0.960593  with:   {'learning_rate': 0.1, 'max_depth': 6, 'num_leaves': 30}\n",
      "0.960567  with:   {'learning_rate': 0.1, 'max_depth': 6, 'num_leaves': 35}\n",
      "0.960483  with:   {'learning_rate': 0.1, 'max_depth': 6, 'num_leaves': 40}\n",
      "0.960303  with:   {'learning_rate': 0.1, 'max_depth': 6, 'num_leaves': 45}\n",
      "0.960404  with:   {'learning_rate': 0.1, 'max_depth': 6, 'num_leaves': 50}\n",
      "0.960285  with:   {'learning_rate': 0.1, 'max_depth': 6, 'num_leaves': 55}\n",
      "0.960221  with:   {'learning_rate': 0.1, 'max_depth': 6, 'num_leaves': 60}\n",
      "0.960241  with:   {'learning_rate': 0.1, 'max_depth': 6, 'num_leaves': 65}\n",
      "0.960241  with:   {'learning_rate': 0.1, 'max_depth': 6, 'num_leaves': 70}\n",
      "0.960241  with:   {'learning_rate': 0.1, 'max_depth': 6, 'num_leaves': 75}\n",
      "0.960836  with:   {'learning_rate': 0.1, 'max_depth': 7, 'num_leaves': 25}\n",
      "0.960677  with:   {'learning_rate': 0.1, 'max_depth': 7, 'num_leaves': 30}\n",
      "0.960633  with:   {'learning_rate': 0.1, 'max_depth': 7, 'num_leaves': 35}\n",
      "0.960561  with:   {'learning_rate': 0.1, 'max_depth': 7, 'num_leaves': 40}\n",
      "0.960470  with:   {'learning_rate': 0.1, 'max_depth': 7, 'num_leaves': 45}\n",
      "0.960456  with:   {'learning_rate': 0.1, 'max_depth': 7, 'num_leaves': 50}\n",
      "0.960338  with:   {'learning_rate': 0.1, 'max_depth': 7, 'num_leaves': 55}\n",
      "0.960105  with:   {'learning_rate': 0.1, 'max_depth': 7, 'num_leaves': 60}\n",
      "0.959909  with:   {'learning_rate': 0.1, 'max_depth': 7, 'num_leaves': 65}\n",
      "0.959979  with:   {'learning_rate': 0.1, 'max_depth': 7, 'num_leaves': 70}\n",
      "0.959786  with:   {'learning_rate': 0.1, 'max_depth': 7, 'num_leaves': 75}\n",
      "0.960774  with:   {'learning_rate': 0.1, 'max_depth': 8, 'num_leaves': 25}\n",
      "0.960757  with:   {'learning_rate': 0.1, 'max_depth': 8, 'num_leaves': 30}\n",
      "0.960713  with:   {'learning_rate': 0.1, 'max_depth': 8, 'num_leaves': 35}\n",
      "0.960602  with:   {'learning_rate': 0.1, 'max_depth': 8, 'num_leaves': 40}\n",
      "0.960477  with:   {'learning_rate': 0.1, 'max_depth': 8, 'num_leaves': 45}\n",
      "0.960456  with:   {'learning_rate': 0.1, 'max_depth': 8, 'num_leaves': 50}\n",
      "0.960243  with:   {'learning_rate': 0.1, 'max_depth': 8, 'num_leaves': 55}\n",
      "0.960142  with:   {'learning_rate': 0.1, 'max_depth': 8, 'num_leaves': 60}\n",
      "0.960212  with:   {'learning_rate': 0.1, 'max_depth': 8, 'num_leaves': 65}\n",
      "0.960302  with:   {'learning_rate': 0.1, 'max_depth': 8, 'num_leaves': 70}\n",
      "0.959813  with:   {'learning_rate': 0.1, 'max_depth': 8, 'num_leaves': 75}\n",
      "0.960791  with:   {'learning_rate': 0.1, 'max_depth': 9, 'num_leaves': 25}\n",
      "0.960650  with:   {'learning_rate': 0.1, 'max_depth': 9, 'num_leaves': 30}\n",
      "0.960613  with:   {'learning_rate': 0.1, 'max_depth': 9, 'num_leaves': 35}\n",
      "0.960579  with:   {'learning_rate': 0.1, 'max_depth': 9, 'num_leaves': 40}\n",
      "0.960508  with:   {'learning_rate': 0.1, 'max_depth': 9, 'num_leaves': 45}\n",
      "0.960500  with:   {'learning_rate': 0.1, 'max_depth': 9, 'num_leaves': 50}\n",
      "0.960380  with:   {'learning_rate': 0.1, 'max_depth': 9, 'num_leaves': 55}\n",
      "0.960292  with:   {'learning_rate': 0.1, 'max_depth': 9, 'num_leaves': 60}\n",
      "0.960028  with:   {'learning_rate': 0.1, 'max_depth': 9, 'num_leaves': 65}\n",
      "0.960076  with:   {'learning_rate': 0.1, 'max_depth': 9, 'num_leaves': 70}\n",
      "0.959900  with:   {'learning_rate': 0.1, 'max_depth': 9, 'num_leaves': 75}\n",
      "0.960725  with:   {'learning_rate': 0.1, 'max_depth': 10, 'num_leaves': 25}\n",
      "0.960698  with:   {'learning_rate': 0.1, 'max_depth': 10, 'num_leaves': 30}\n",
      "0.960660  with:   {'learning_rate': 0.1, 'max_depth': 10, 'num_leaves': 35}\n",
      "0.960541  with:   {'learning_rate': 0.1, 'max_depth': 10, 'num_leaves': 40}\n",
      "0.960535  with:   {'learning_rate': 0.1, 'max_depth': 10, 'num_leaves': 45}\n",
      "0.960523  with:   {'learning_rate': 0.1, 'max_depth': 10, 'num_leaves': 50}\n",
      "0.960414  with:   {'learning_rate': 0.1, 'max_depth': 10, 'num_leaves': 55}\n",
      "0.960401  with:   {'learning_rate': 0.1, 'max_depth': 10, 'num_leaves': 60}\n",
      "0.960293  with:   {'learning_rate': 0.1, 'max_depth': 10, 'num_leaves': 65}\n",
      "0.959984  with:   {'learning_rate': 0.1, 'max_depth': 10, 'num_leaves': 70}\n",
      "0.960152  with:   {'learning_rate': 0.1, 'max_depth': 10, 'num_leaves': 75}\n"
     ]
    }
   ],
   "source": [
    "#观察参数的趋势\n",
    "means = gsearch_result1.cv_results_['mean_test_score']\n",
    "params = gsearch_result1.cv_results_['params']\n",
    "for mean,param in zip(means,params):\n",
    "    print(\"%f  with:   %r\" % (mean,param))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 151,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[LightGBM] [Warning] bagging_fraction is set=0.4, subsample=1.0 will be ignored. Current value: bagging_fraction=0.4\n",
      "[LightGBM] [Warning] feature_fraction is set=0.6, colsample_bytree=1.0 will be ignored. Current value: feature_fraction=0.6\n",
      "{'learning_rate': 0.1, 'max_depth': 11, 'num_leaves': 20}\n",
      "0.960851962177529\n",
      "[[ 8360  1852]\n",
      " [ 1958 37830]]\n"
     ]
    }
   ],
   "source": [
    "#数据显示学习速率小于0.1并不理想 ，尝试提高学习速率\n",
    "#上一步已经获得的'max_depth': 7, 尝试增加深度\n",
    "#上一步已经获得的'num_leaves': 25，尝试减少叶片数量\n",
    "lgb1 = lgb.LGBMClassifier(boosting_type = 'gbdt',n_estimators=43,\n",
    "                          metric='binary_logloss', bagging_fraction = 0.4,feature_fraction = 0.6)\n",
    "params_test1={\n",
    "    'learning_rate':np.linspace(0.1,0.5,5),\n",
    "    'max_depth':range(7,12,1),\n",
    "    'num_leaves':range(15,26,1)\n",
    "}\n",
    "gsearch1=GridSearchCV(estimator=lgb1, param_grid=params_test1, scoring='roc_auc', cv=5, verbose=0, n_jobs=4)\n",
    "gsearch_result1=gsearch1.fit(train_final.drop(columns='loan_status'),train_final['loan_status'])\n",
    "#参数与指标\n",
    "print(gsearch_result1.best_params_)\n",
    "print(gsearch_result1.best_score_)\n",
    "#混淆矩阵\n",
    "y_pred=gsearch_result1.predict(train_final.drop(columns='loan_status'))\n",
    "print(confusion_matrix(train_final['loan_status'],y_pred))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 153,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0.960752  with:   {'learning_rate': 0.1, 'max_depth': 7, 'num_leaves': 15}\n",
      "0.960695  with:   {'learning_rate': 0.1, 'max_depth': 7, 'num_leaves': 16}\n",
      "0.960721  with:   {'learning_rate': 0.1, 'max_depth': 7, 'num_leaves': 17}\n",
      "0.960744  with:   {'learning_rate': 0.1, 'max_depth': 7, 'num_leaves': 18}\n",
      "0.960692  with:   {'learning_rate': 0.1, 'max_depth': 7, 'num_leaves': 19}\n",
      "0.960734  with:   {'learning_rate': 0.1, 'max_depth': 7, 'num_leaves': 20}\n",
      "0.960806  with:   {'learning_rate': 0.1, 'max_depth': 7, 'num_leaves': 21}\n",
      "0.960811  with:   {'learning_rate': 0.1, 'max_depth': 7, 'num_leaves': 22}\n",
      "0.960691  with:   {'learning_rate': 0.1, 'max_depth': 7, 'num_leaves': 23}\n",
      "0.960754  with:   {'learning_rate': 0.1, 'max_depth': 7, 'num_leaves': 24}\n",
      "0.960836  with:   {'learning_rate': 0.1, 'max_depth': 7, 'num_leaves': 25}\n",
      "0.960823  with:   {'learning_rate': 0.1, 'max_depth': 8, 'num_leaves': 15}\n",
      "0.960765  with:   {'learning_rate': 0.1, 'max_depth': 8, 'num_leaves': 16}\n",
      "0.960699  with:   {'learning_rate': 0.1, 'max_depth': 8, 'num_leaves': 17}\n",
      "0.960729  with:   {'learning_rate': 0.1, 'max_depth': 8, 'num_leaves': 18}\n",
      "0.960746  with:   {'learning_rate': 0.1, 'max_depth': 8, 'num_leaves': 19}\n",
      "0.960740  with:   {'learning_rate': 0.1, 'max_depth': 8, 'num_leaves': 20}\n",
      "0.960689  with:   {'learning_rate': 0.1, 'max_depth': 8, 'num_leaves': 21}\n",
      "0.960849  with:   {'learning_rate': 0.1, 'max_depth': 8, 'num_leaves': 22}\n",
      "0.960652  with:   {'learning_rate': 0.1, 'max_depth': 8, 'num_leaves': 23}\n",
      "0.960758  with:   {'learning_rate': 0.1, 'max_depth': 8, 'num_leaves': 24}\n",
      "0.960774  with:   {'learning_rate': 0.1, 'max_depth': 8, 'num_leaves': 25}\n",
      "0.960826  with:   {'learning_rate': 0.1, 'max_depth': 9, 'num_leaves': 15}\n",
      "0.960773  with:   {'learning_rate': 0.1, 'max_depth': 9, 'num_leaves': 16}\n",
      "0.960734  with:   {'learning_rate': 0.1, 'max_depth': 9, 'num_leaves': 17}\n",
      "0.960775  with:   {'learning_rate': 0.1, 'max_depth': 9, 'num_leaves': 18}\n",
      "0.960671  with:   {'learning_rate': 0.1, 'max_depth': 9, 'num_leaves': 19}\n",
      "0.960849  with:   {'learning_rate': 0.1, 'max_depth': 9, 'num_leaves': 20}\n",
      "0.960699  with:   {'learning_rate': 0.1, 'max_depth': 9, 'num_leaves': 21}\n",
      "0.960768  with:   {'learning_rate': 0.1, 'max_depth': 9, 'num_leaves': 22}\n",
      "0.960770  with:   {'learning_rate': 0.1, 'max_depth': 9, 'num_leaves': 23}\n",
      "0.960749  with:   {'learning_rate': 0.1, 'max_depth': 9, 'num_leaves': 24}\n",
      "0.960791  with:   {'learning_rate': 0.1, 'max_depth': 9, 'num_leaves': 25}\n",
      "0.960822  with:   {'learning_rate': 0.1, 'max_depth': 10, 'num_leaves': 15}\n",
      "0.960756  with:   {'learning_rate': 0.1, 'max_depth': 10, 'num_leaves': 16}\n",
      "0.960756  with:   {'learning_rate': 0.1, 'max_depth': 10, 'num_leaves': 17}\n",
      "0.960796  with:   {'learning_rate': 0.1, 'max_depth': 10, 'num_leaves': 18}\n",
      "0.960627  with:   {'learning_rate': 0.1, 'max_depth': 10, 'num_leaves': 19}\n",
      "0.960851  with:   {'learning_rate': 0.1, 'max_depth': 10, 'num_leaves': 20}\n",
      "0.960742  with:   {'learning_rate': 0.1, 'max_depth': 10, 'num_leaves': 21}\n",
      "0.960827  with:   {'learning_rate': 0.1, 'max_depth': 10, 'num_leaves': 22}\n",
      "0.960784  with:   {'learning_rate': 0.1, 'max_depth': 10, 'num_leaves': 23}\n",
      "0.960697  with:   {'learning_rate': 0.1, 'max_depth': 10, 'num_leaves': 24}\n",
      "0.960725  with:   {'learning_rate': 0.1, 'max_depth': 10, 'num_leaves': 25}\n",
      "0.960822  with:   {'learning_rate': 0.1, 'max_depth': 11, 'num_leaves': 15}\n",
      "0.960756  with:   {'learning_rate': 0.1, 'max_depth': 11, 'num_leaves': 16}\n",
      "0.960756  with:   {'learning_rate': 0.1, 'max_depth': 11, 'num_leaves': 17}\n",
      "0.960800  with:   {'learning_rate': 0.1, 'max_depth': 11, 'num_leaves': 18}\n",
      "0.960648  with:   {'learning_rate': 0.1, 'max_depth': 11, 'num_leaves': 19}\n",
      "0.960852  with:   {'learning_rate': 0.1, 'max_depth': 11, 'num_leaves': 20}\n",
      "0.960729  with:   {'learning_rate': 0.1, 'max_depth': 11, 'num_leaves': 21}\n",
      "0.960849  with:   {'learning_rate': 0.1, 'max_depth': 11, 'num_leaves': 22}\n",
      "0.960820  with:   {'learning_rate': 0.1, 'max_depth': 11, 'num_leaves': 23}\n",
      "0.960695  with:   {'learning_rate': 0.1, 'max_depth': 11, 'num_leaves': 24}\n",
      "0.960715  with:   {'learning_rate': 0.1, 'max_depth': 11, 'num_leaves': 25}\n",
      "0.960361  with:   {'learning_rate': 0.2, 'max_depth': 7, 'num_leaves': 15}\n",
      "0.960293  with:   {'learning_rate': 0.2, 'max_depth': 7, 'num_leaves': 16}\n",
      "0.960250  with:   {'learning_rate': 0.2, 'max_depth': 7, 'num_leaves': 17}\n",
      "0.960417  with:   {'learning_rate': 0.2, 'max_depth': 7, 'num_leaves': 18}\n",
      "0.960204  with:   {'learning_rate': 0.2, 'max_depth': 7, 'num_leaves': 19}\n",
      "0.960078  with:   {'learning_rate': 0.2, 'max_depth': 7, 'num_leaves': 20}\n",
      "0.960240  with:   {'learning_rate': 0.2, 'max_depth': 7, 'num_leaves': 21}\n",
      "0.959956  with:   {'learning_rate': 0.2, 'max_depth': 7, 'num_leaves': 22}\n",
      "0.959631  with:   {'learning_rate': 0.2, 'max_depth': 7, 'num_leaves': 23}\n",
      "0.959907  with:   {'learning_rate': 0.2, 'max_depth': 7, 'num_leaves': 24}\n",
      "0.959927  with:   {'learning_rate': 0.2, 'max_depth': 7, 'num_leaves': 25}\n",
      "0.960349  with:   {'learning_rate': 0.2, 'max_depth': 8, 'num_leaves': 15}\n",
      "0.960458  with:   {'learning_rate': 0.2, 'max_depth': 8, 'num_leaves': 16}\n",
      "0.960267  with:   {'learning_rate': 0.2, 'max_depth': 8, 'num_leaves': 17}\n",
      "0.960249  with:   {'learning_rate': 0.2, 'max_depth': 8, 'num_leaves': 18}\n",
      "0.960115  with:   {'learning_rate': 0.2, 'max_depth': 8, 'num_leaves': 19}\n",
      "0.959954  with:   {'learning_rate': 0.2, 'max_depth': 8, 'num_leaves': 20}\n",
      "0.959988  with:   {'learning_rate': 0.2, 'max_depth': 8, 'num_leaves': 21}\n",
      "0.960071  with:   {'learning_rate': 0.2, 'max_depth': 8, 'num_leaves': 22}\n",
      "0.959702  with:   {'learning_rate': 0.2, 'max_depth': 8, 'num_leaves': 23}\n",
      "0.959989  with:   {'learning_rate': 0.2, 'max_depth': 8, 'num_leaves': 24}\n",
      "0.959635  with:   {'learning_rate': 0.2, 'max_depth': 8, 'num_leaves': 25}\n",
      "0.960425  with:   {'learning_rate': 0.2, 'max_depth': 9, 'num_leaves': 15}\n",
      "0.960413  with:   {'learning_rate': 0.2, 'max_depth': 9, 'num_leaves': 16}\n",
      "0.960212  with:   {'learning_rate': 0.2, 'max_depth': 9, 'num_leaves': 17}\n",
      "0.960240  with:   {'learning_rate': 0.2, 'max_depth': 9, 'num_leaves': 18}\n",
      "0.960143  with:   {'learning_rate': 0.2, 'max_depth': 9, 'num_leaves': 19}\n",
      "0.959967  with:   {'learning_rate': 0.2, 'max_depth': 9, 'num_leaves': 20}\n",
      "0.960064  with:   {'learning_rate': 0.2, 'max_depth': 9, 'num_leaves': 21}\n",
      "0.959955  with:   {'learning_rate': 0.2, 'max_depth': 9, 'num_leaves': 22}\n",
      "0.959698  with:   {'learning_rate': 0.2, 'max_depth': 9, 'num_leaves': 23}\n",
      "0.959766  with:   {'learning_rate': 0.2, 'max_depth': 9, 'num_leaves': 24}\n",
      "0.959740  with:   {'learning_rate': 0.2, 'max_depth': 9, 'num_leaves': 25}\n",
      "0.960272  with:   {'learning_rate': 0.2, 'max_depth': 10, 'num_leaves': 15}\n",
      "0.960127  with:   {'learning_rate': 0.2, 'max_depth': 10, 'num_leaves': 16}\n",
      "0.960062  with:   {'learning_rate': 0.2, 'max_depth': 10, 'num_leaves': 17}\n",
      "0.960307  with:   {'learning_rate': 0.2, 'max_depth': 10, 'num_leaves': 18}\n",
      "0.959968  with:   {'learning_rate': 0.2, 'max_depth': 10, 'num_leaves': 19}\n",
      "0.959900  with:   {'learning_rate': 0.2, 'max_depth': 10, 'num_leaves': 20}\n",
      "0.960075  with:   {'learning_rate': 0.2, 'max_depth': 10, 'num_leaves': 21}\n",
      "0.960013  with:   {'learning_rate': 0.2, 'max_depth': 10, 'num_leaves': 22}\n",
      "0.959698  with:   {'learning_rate': 0.2, 'max_depth': 10, 'num_leaves': 23}\n",
      "0.959810  with:   {'learning_rate': 0.2, 'max_depth': 10, 'num_leaves': 24}\n",
      "0.959630  with:   {'learning_rate': 0.2, 'max_depth': 10, 'num_leaves': 25}\n",
      "0.960269  with:   {'learning_rate': 0.2, 'max_depth': 11, 'num_leaves': 15}\n",
      "0.960203  with:   {'learning_rate': 0.2, 'max_depth': 11, 'num_leaves': 16}\n",
      "0.959989  with:   {'learning_rate': 0.2, 'max_depth': 11, 'num_leaves': 17}\n",
      "0.960246  with:   {'learning_rate': 0.2, 'max_depth': 11, 'num_leaves': 18}\n",
      "0.959920  with:   {'learning_rate': 0.2, 'max_depth': 11, 'num_leaves': 19}\n",
      "0.959992  with:   {'learning_rate': 0.2, 'max_depth': 11, 'num_leaves': 20}\n",
      "0.959912  with:   {'learning_rate': 0.2, 'max_depth': 11, 'num_leaves': 21}\n",
      "0.959975  with:   {'learning_rate': 0.2, 'max_depth': 11, 'num_leaves': 22}\n",
      "0.959824  with:   {'learning_rate': 0.2, 'max_depth': 11, 'num_leaves': 23}\n",
      "0.959794  with:   {'learning_rate': 0.2, 'max_depth': 11, 'num_leaves': 24}\n",
      "0.959642  with:   {'learning_rate': 0.2, 'max_depth': 11, 'num_leaves': 25}\n",
      "0.959051  with:   {'learning_rate': 0.30000000000000004, 'max_depth': 7, 'num_leaves': 15}\n",
      "0.958944  with:   {'learning_rate': 0.30000000000000004, 'max_depth': 7, 'num_leaves': 16}\n",
      "0.957560  with:   {'learning_rate': 0.30000000000000004, 'max_depth': 7, 'num_leaves': 17}\n",
      "0.958655  with:   {'learning_rate': 0.30000000000000004, 'max_depth': 7, 'num_leaves': 18}\n",
      "0.958335  with:   {'learning_rate': 0.30000000000000004, 'max_depth': 7, 'num_leaves': 19}\n",
      "0.958223  with:   {'learning_rate': 0.30000000000000004, 'max_depth': 7, 'num_leaves': 20}\n",
      "0.958194  with:   {'learning_rate': 0.30000000000000004, 'max_depth': 7, 'num_leaves': 21}\n",
      "0.958152  with:   {'learning_rate': 0.30000000000000004, 'max_depth': 7, 'num_leaves': 22}\n",
      "0.957446  with:   {'learning_rate': 0.30000000000000004, 'max_depth': 7, 'num_leaves': 23}\n",
      "0.958031  with:   {'learning_rate': 0.30000000000000004, 'max_depth': 7, 'num_leaves': 24}\n",
      "0.957718  with:   {'learning_rate': 0.30000000000000004, 'max_depth': 7, 'num_leaves': 25}\n",
      "0.958508  with:   {'learning_rate': 0.30000000000000004, 'max_depth': 8, 'num_leaves': 15}\n",
      "0.958027  with:   {'learning_rate': 0.30000000000000004, 'max_depth': 8, 'num_leaves': 16}\n",
      "0.958013  with:   {'learning_rate': 0.30000000000000004, 'max_depth': 8, 'num_leaves': 17}\n",
      "0.958172  with:   {'learning_rate': 0.30000000000000004, 'max_depth': 8, 'num_leaves': 18}\n",
      "0.958115  with:   {'learning_rate': 0.30000000000000004, 'max_depth': 8, 'num_leaves': 19}\n",
      "0.957573  with:   {'learning_rate': 0.30000000000000004, 'max_depth': 8, 'num_leaves': 20}\n",
      "0.957744  with:   {'learning_rate': 0.30000000000000004, 'max_depth': 8, 'num_leaves': 21}\n",
      "0.957536  with:   {'learning_rate': 0.30000000000000004, 'max_depth': 8, 'num_leaves': 22}\n",
      "0.957800  with:   {'learning_rate': 0.30000000000000004, 'max_depth': 8, 'num_leaves': 23}\n",
      "0.957766  with:   {'learning_rate': 0.30000000000000004, 'max_depth': 8, 'num_leaves': 24}\n",
      "0.958171  with:   {'learning_rate': 0.30000000000000004, 'max_depth': 8, 'num_leaves': 25}\n",
      "0.958672  with:   {'learning_rate': 0.30000000000000004, 'max_depth': 9, 'num_leaves': 15}\n",
      "0.958787  with:   {'learning_rate': 0.30000000000000004, 'max_depth': 9, 'num_leaves': 16}\n",
      "0.958624  with:   {'learning_rate': 0.30000000000000004, 'max_depth': 9, 'num_leaves': 17}\n",
      "0.957823  with:   {'learning_rate': 0.30000000000000004, 'max_depth': 9, 'num_leaves': 18}\n",
      "0.957717  with:   {'learning_rate': 0.30000000000000004, 'max_depth': 9, 'num_leaves': 19}\n",
      "0.957857  with:   {'learning_rate': 0.30000000000000004, 'max_depth': 9, 'num_leaves': 20}\n",
      "0.957324  with:   {'learning_rate': 0.30000000000000004, 'max_depth': 9, 'num_leaves': 21}\n",
      "0.956915  with:   {'learning_rate': 0.30000000000000004, 'max_depth': 9, 'num_leaves': 22}\n",
      "0.957736  with:   {'learning_rate': 0.30000000000000004, 'max_depth': 9, 'num_leaves': 23}\n",
      "0.957647  with:   {'learning_rate': 0.30000000000000004, 'max_depth': 9, 'num_leaves': 24}\n",
      "0.956483  with:   {'learning_rate': 0.30000000000000004, 'max_depth': 9, 'num_leaves': 25}\n",
      "0.957757  with:   {'learning_rate': 0.30000000000000004, 'max_depth': 10, 'num_leaves': 15}\n",
      "0.957860  with:   {'learning_rate': 0.30000000000000004, 'max_depth': 10, 'num_leaves': 16}\n",
      "0.957158  with:   {'learning_rate': 0.30000000000000004, 'max_depth': 10, 'num_leaves': 17}\n",
      "0.957760  with:   {'learning_rate': 0.30000000000000004, 'max_depth': 10, 'num_leaves': 18}\n",
      "0.957665  with:   {'learning_rate': 0.30000000000000004, 'max_depth': 10, 'num_leaves': 19}\n",
      "0.957510  with:   {'learning_rate': 0.30000000000000004, 'max_depth': 10, 'num_leaves': 20}\n",
      "0.957799  with:   {'learning_rate': 0.30000000000000004, 'max_depth': 10, 'num_leaves': 21}\n",
      "0.957537  with:   {'learning_rate': 0.30000000000000004, 'max_depth': 10, 'num_leaves': 22}\n",
      "0.957131  with:   {'learning_rate': 0.30000000000000004, 'max_depth': 10, 'num_leaves': 23}\n",
      "0.957566  with:   {'learning_rate': 0.30000000000000004, 'max_depth': 10, 'num_leaves': 24}\n",
      "0.957348  with:   {'learning_rate': 0.30000000000000004, 'max_depth': 10, 'num_leaves': 25}\n",
      "0.958497  with:   {'learning_rate': 0.30000000000000004, 'max_depth': 11, 'num_leaves': 15}\n",
      "0.958147  with:   {'learning_rate': 0.30000000000000004, 'max_depth': 11, 'num_leaves': 16}\n",
      "0.956331  with:   {'learning_rate': 0.30000000000000004, 'max_depth': 11, 'num_leaves': 17}\n",
      "0.958201  with:   {'learning_rate': 0.30000000000000004, 'max_depth': 11, 'num_leaves': 18}\n",
      "0.955976  with:   {'learning_rate': 0.30000000000000004, 'max_depth': 11, 'num_leaves': 19}\n",
      "0.957472  with:   {'learning_rate': 0.30000000000000004, 'max_depth': 11, 'num_leaves': 20}\n",
      "0.957415  with:   {'learning_rate': 0.30000000000000004, 'max_depth': 11, 'num_leaves': 21}\n",
      "0.957690  with:   {'learning_rate': 0.30000000000000004, 'max_depth': 11, 'num_leaves': 22}\n",
      "0.957126  with:   {'learning_rate': 0.30000000000000004, 'max_depth': 11, 'num_leaves': 23}\n",
      "0.957340  with:   {'learning_rate': 0.30000000000000004, 'max_depth': 11, 'num_leaves': 24}\n",
      "0.957386  with:   {'learning_rate': 0.30000000000000004, 'max_depth': 11, 'num_leaves': 25}\n",
      "0.955804  with:   {'learning_rate': 0.4, 'max_depth': 7, 'num_leaves': 15}\n",
      "0.956352  with:   {'learning_rate': 0.4, 'max_depth': 7, 'num_leaves': 16}\n",
      "0.957211  with:   {'learning_rate': 0.4, 'max_depth': 7, 'num_leaves': 17}\n",
      "0.956004  with:   {'learning_rate': 0.4, 'max_depth': 7, 'num_leaves': 18}\n",
      "0.956015  with:   {'learning_rate': 0.4, 'max_depth': 7, 'num_leaves': 19}\n",
      "0.955607  with:   {'learning_rate': 0.4, 'max_depth': 7, 'num_leaves': 20}\n",
      "0.955834  with:   {'learning_rate': 0.4, 'max_depth': 7, 'num_leaves': 21}\n",
      "0.955460  with:   {'learning_rate': 0.4, 'max_depth': 7, 'num_leaves': 22}\n",
      "0.955838  with:   {'learning_rate': 0.4, 'max_depth': 7, 'num_leaves': 23}\n",
      "0.956047  with:   {'learning_rate': 0.4, 'max_depth': 7, 'num_leaves': 24}\n",
      "0.955493  with:   {'learning_rate': 0.4, 'max_depth': 7, 'num_leaves': 25}\n",
      "0.956890  with:   {'learning_rate': 0.4, 'max_depth': 8, 'num_leaves': 15}\n",
      "0.954108  with:   {'learning_rate': 0.4, 'max_depth': 8, 'num_leaves': 16}\n",
      "0.955818  with:   {'learning_rate': 0.4, 'max_depth': 8, 'num_leaves': 17}\n",
      "0.955405  with:   {'learning_rate': 0.4, 'max_depth': 8, 'num_leaves': 18}\n",
      "0.954183  with:   {'learning_rate': 0.4, 'max_depth': 8, 'num_leaves': 19}\n",
      "0.953386  with:   {'learning_rate': 0.4, 'max_depth': 8, 'num_leaves': 20}\n",
      "0.954467  with:   {'learning_rate': 0.4, 'max_depth': 8, 'num_leaves': 21}\n",
      "0.954117  with:   {'learning_rate': 0.4, 'max_depth': 8, 'num_leaves': 22}\n",
      "0.954802  with:   {'learning_rate': 0.4, 'max_depth': 8, 'num_leaves': 23}\n",
      "0.956094  with:   {'learning_rate': 0.4, 'max_depth': 8, 'num_leaves': 24}\n",
      "0.954315  with:   {'learning_rate': 0.4, 'max_depth': 8, 'num_leaves': 25}\n",
      "0.955348  with:   {'learning_rate': 0.4, 'max_depth': 9, 'num_leaves': 15}\n",
      "0.955236  with:   {'learning_rate': 0.4, 'max_depth': 9, 'num_leaves': 16}\n",
      "0.955743  with:   {'learning_rate': 0.4, 'max_depth': 9, 'num_leaves': 17}\n",
      "0.954706  with:   {'learning_rate': 0.4, 'max_depth': 9, 'num_leaves': 18}\n",
      "0.953926  with:   {'learning_rate': 0.4, 'max_depth': 9, 'num_leaves': 19}\n",
      "0.954159  with:   {'learning_rate': 0.4, 'max_depth': 9, 'num_leaves': 20}\n",
      "0.954956  with:   {'learning_rate': 0.4, 'max_depth': 9, 'num_leaves': 21}\n",
      "0.954430  with:   {'learning_rate': 0.4, 'max_depth': 9, 'num_leaves': 22}\n",
      "0.955014  with:   {'learning_rate': 0.4, 'max_depth': 9, 'num_leaves': 23}\n",
      "0.953760  with:   {'learning_rate': 0.4, 'max_depth': 9, 'num_leaves': 24}\n",
      "0.953830  with:   {'learning_rate': 0.4, 'max_depth': 9, 'num_leaves': 25}\n",
      "0.956028  with:   {'learning_rate': 0.4, 'max_depth': 10, 'num_leaves': 15}\n",
      "0.953587  with:   {'learning_rate': 0.4, 'max_depth': 10, 'num_leaves': 16}\n",
      "0.954771  with:   {'learning_rate': 0.4, 'max_depth': 10, 'num_leaves': 17}\n",
      "0.954465  with:   {'learning_rate': 0.4, 'max_depth': 10, 'num_leaves': 18}\n",
      "0.953419  with:   {'learning_rate': 0.4, 'max_depth': 10, 'num_leaves': 19}\n",
      "0.954422  with:   {'learning_rate': 0.4, 'max_depth': 10, 'num_leaves': 20}\n",
      "0.954441  with:   {'learning_rate': 0.4, 'max_depth': 10, 'num_leaves': 21}\n",
      "0.953829  with:   {'learning_rate': 0.4, 'max_depth': 10, 'num_leaves': 22}\n",
      "0.954036  with:   {'learning_rate': 0.4, 'max_depth': 10, 'num_leaves': 23}\n",
      "0.954472  with:   {'learning_rate': 0.4, 'max_depth': 10, 'num_leaves': 24}\n",
      "0.954610  with:   {'learning_rate': 0.4, 'max_depth': 10, 'num_leaves': 25}\n",
      "0.955708  with:   {'learning_rate': 0.4, 'max_depth': 11, 'num_leaves': 15}\n",
      "0.953584  with:   {'learning_rate': 0.4, 'max_depth': 11, 'num_leaves': 16}\n",
      "0.956161  with:   {'learning_rate': 0.4, 'max_depth': 11, 'num_leaves': 17}\n",
      "0.954440  with:   {'learning_rate': 0.4, 'max_depth': 11, 'num_leaves': 18}\n",
      "0.955164  with:   {'learning_rate': 0.4, 'max_depth': 11, 'num_leaves': 19}\n",
      "0.953548  with:   {'learning_rate': 0.4, 'max_depth': 11, 'num_leaves': 20}\n",
      "0.954912  with:   {'learning_rate': 0.4, 'max_depth': 11, 'num_leaves': 21}\n",
      "0.954274  with:   {'learning_rate': 0.4, 'max_depth': 11, 'num_leaves': 22}\n",
      "0.953760  with:   {'learning_rate': 0.4, 'max_depth': 11, 'num_leaves': 23}\n",
      "0.953846  with:   {'learning_rate': 0.4, 'max_depth': 11, 'num_leaves': 24}\n",
      "0.953383  with:   {'learning_rate': 0.4, 'max_depth': 11, 'num_leaves': 25}\n",
      "0.953911  with:   {'learning_rate': 0.5, 'max_depth': 7, 'num_leaves': 15}\n",
      "0.953980  with:   {'learning_rate': 0.5, 'max_depth': 7, 'num_leaves': 16}\n",
      "0.952840  with:   {'learning_rate': 0.5, 'max_depth': 7, 'num_leaves': 17}\n",
      "0.955081  with:   {'learning_rate': 0.5, 'max_depth': 7, 'num_leaves': 18}\n",
      "0.953197  with:   {'learning_rate': 0.5, 'max_depth': 7, 'num_leaves': 19}\n",
      "0.950037  with:   {'learning_rate': 0.5, 'max_depth': 7, 'num_leaves': 20}\n",
      "0.953609  with:   {'learning_rate': 0.5, 'max_depth': 7, 'num_leaves': 21}\n",
      "0.952303  with:   {'learning_rate': 0.5, 'max_depth': 7, 'num_leaves': 22}\n",
      "0.952162  with:   {'learning_rate': 0.5, 'max_depth': 7, 'num_leaves': 23}\n",
      "0.952535  with:   {'learning_rate': 0.5, 'max_depth': 7, 'num_leaves': 24}\n",
      "0.950972  with:   {'learning_rate': 0.5, 'max_depth': 7, 'num_leaves': 25}\n",
      "0.954827  with:   {'learning_rate': 0.5, 'max_depth': 8, 'num_leaves': 15}\n",
      "0.954004  with:   {'learning_rate': 0.5, 'max_depth': 8, 'num_leaves': 16}\n",
      "0.954557  with:   {'learning_rate': 0.5, 'max_depth': 8, 'num_leaves': 17}\n",
      "0.952522  with:   {'learning_rate': 0.5, 'max_depth': 8, 'num_leaves': 18}\n",
      "0.952477  with:   {'learning_rate': 0.5, 'max_depth': 8, 'num_leaves': 19}\n",
      "0.951488  with:   {'learning_rate': 0.5, 'max_depth': 8, 'num_leaves': 20}\n",
      "0.951052  with:   {'learning_rate': 0.5, 'max_depth': 8, 'num_leaves': 21}\n",
      "0.949368  with:   {'learning_rate': 0.5, 'max_depth': 8, 'num_leaves': 22}\n",
      "0.948944  with:   {'learning_rate': 0.5, 'max_depth': 8, 'num_leaves': 23}\n",
      "0.950375  with:   {'learning_rate': 0.5, 'max_depth': 8, 'num_leaves': 24}\n",
      "0.950075  with:   {'learning_rate': 0.5, 'max_depth': 8, 'num_leaves': 25}\n",
      "0.952858  with:   {'learning_rate': 0.5, 'max_depth': 9, 'num_leaves': 15}\n",
      "0.954973  with:   {'learning_rate': 0.5, 'max_depth': 9, 'num_leaves': 16}\n",
      "0.953440  with:   {'learning_rate': 0.5, 'max_depth': 9, 'num_leaves': 17}\n",
      "0.951675  with:   {'learning_rate': 0.5, 'max_depth': 9, 'num_leaves': 18}\n",
      "0.947081  with:   {'learning_rate': 0.5, 'max_depth': 9, 'num_leaves': 19}\n",
      "0.950315  with:   {'learning_rate': 0.5, 'max_depth': 9, 'num_leaves': 20}\n",
      "0.951713  with:   {'learning_rate': 0.5, 'max_depth': 9, 'num_leaves': 21}\n",
      "0.950520  with:   {'learning_rate': 0.5, 'max_depth': 9, 'num_leaves': 22}\n",
      "0.950562  with:   {'learning_rate': 0.5, 'max_depth': 9, 'num_leaves': 23}\n",
      "0.951047  with:   {'learning_rate': 0.5, 'max_depth': 9, 'num_leaves': 24}\n",
      "0.946762  with:   {'learning_rate': 0.5, 'max_depth': 9, 'num_leaves': 25}\n",
      "0.954175  with:   {'learning_rate': 0.5, 'max_depth': 10, 'num_leaves': 15}\n",
      "0.955749  with:   {'learning_rate': 0.5, 'max_depth': 10, 'num_leaves': 16}\n",
      "0.951881  with:   {'learning_rate': 0.5, 'max_depth': 10, 'num_leaves': 17}\n",
      "0.949802  with:   {'learning_rate': 0.5, 'max_depth': 10, 'num_leaves': 18}\n",
      "0.951550  with:   {'learning_rate': 0.5, 'max_depth': 10, 'num_leaves': 19}\n",
      "0.950130  with:   {'learning_rate': 0.5, 'max_depth': 10, 'num_leaves': 20}\n",
      "0.952634  with:   {'learning_rate': 0.5, 'max_depth': 10, 'num_leaves': 21}\n",
      "0.950663  with:   {'learning_rate': 0.5, 'max_depth': 10, 'num_leaves': 22}\n",
      "0.947742  with:   {'learning_rate': 0.5, 'max_depth': 10, 'num_leaves': 23}\n",
      "0.949334  with:   {'learning_rate': 0.5, 'max_depth': 10, 'num_leaves': 24}\n",
      "0.948574  with:   {'learning_rate': 0.5, 'max_depth': 10, 'num_leaves': 25}\n",
      "0.953728  with:   {'learning_rate': 0.5, 'max_depth': 11, 'num_leaves': 15}\n",
      "0.955795  with:   {'learning_rate': 0.5, 'max_depth': 11, 'num_leaves': 16}\n",
      "0.952881  with:   {'learning_rate': 0.5, 'max_depth': 11, 'num_leaves': 17}\n",
      "0.952114  with:   {'learning_rate': 0.5, 'max_depth': 11, 'num_leaves': 18}\n",
      "0.950908  with:   {'learning_rate': 0.5, 'max_depth': 11, 'num_leaves': 19}\n",
      "0.951228  with:   {'learning_rate': 0.5, 'max_depth': 11, 'num_leaves': 20}\n",
      "0.952843  with:   {'learning_rate': 0.5, 'max_depth': 11, 'num_leaves': 21}\n",
      "0.949179  with:   {'learning_rate': 0.5, 'max_depth': 11, 'num_leaves': 22}\n",
      "0.949408  with:   {'learning_rate': 0.5, 'max_depth': 11, 'num_leaves': 23}\n",
      "0.948978  with:   {'learning_rate': 0.5, 'max_depth': 11, 'num_leaves': 24}\n",
      "0.949756  with:   {'learning_rate': 0.5, 'max_depth': 11, 'num_leaves': 25}\n"
     ]
    }
   ],
   "source": [
    "#继续观察参数的趋势\n",
    "means = gsearch_result1.cv_results_['mean_test_score']\n",
    "params = gsearch_result1.cv_results_['params']\n",
    "for mean,param in zip(means,params):\n",
    "    print(\"%f  with:   %r\" % (mean,param))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 158,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[LightGBM] [Warning] bagging_fraction is set=0.4, subsample=1.0 will be ignored. Current value: bagging_fraction=0.4\n",
      "[LightGBM] [Warning] feature_fraction is set=0.6, colsample_bytree=1.0 will be ignored. Current value: feature_fraction=0.6\n",
      "{'bagging_fraction': 0.4, 'feature_fraction': 0.6}\n",
      "0.960851962177529\n",
      "[[ 8360  1852]\n",
      " [ 1958 37830]]\n"
     ]
    }
   ],
   "source": [
    "#然后调整bagging_fraction和feature_fraction\n",
    "lgb1 = lgb.LGBMClassifier(boosting_type = 'gbdt',n_estimators=43,metric='binary_logloss',\n",
    "                          learning_rate=0.1, max_depth=11, num_leaves=20)\n",
    "params_test1={\n",
    "    'bagging_fraction':[0.4,0.6,0.8,1],\n",
    "    'feature_fraction':[0.4,0.6,0.8,1]\n",
    "}\n",
    "gsearch1=GridSearchCV(estimator=lgb1, param_grid=params_test1, scoring='roc_auc', cv=5, verbose=0, n_jobs=4)\n",
    "gsearch_result1=gsearch1.fit(train_final.drop(columns='loan_status'),train_final['loan_status'])\n",
    "#参数与指标\n",
    "print(gsearch_result1.best_params_)\n",
    "print(gsearch_result1.best_score_)\n",
    "#混淆矩阵\n",
    "y_pred=gsearch_result1.predict(train_final.drop(columns='loan_status'))\n",
    "print(confusion_matrix(train_final['loan_status'],y_pred))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 159,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0.960243  with:   {'bagging_fraction': 0.4, 'feature_fraction': 0.4}\n",
      "0.960852  with:   {'bagging_fraction': 0.4, 'feature_fraction': 0.6}\n",
      "0.960799  with:   {'bagging_fraction': 0.4, 'feature_fraction': 0.8}\n",
      "0.960674  with:   {'bagging_fraction': 0.4, 'feature_fraction': 1}\n",
      "0.960243  with:   {'bagging_fraction': 0.6, 'feature_fraction': 0.4}\n",
      "0.960852  with:   {'bagging_fraction': 0.6, 'feature_fraction': 0.6}\n",
      "0.960799  with:   {'bagging_fraction': 0.6, 'feature_fraction': 0.8}\n",
      "0.960674  with:   {'bagging_fraction': 0.6, 'feature_fraction': 1}\n",
      "0.960243  with:   {'bagging_fraction': 0.8, 'feature_fraction': 0.4}\n",
      "0.960852  with:   {'bagging_fraction': 0.8, 'feature_fraction': 0.6}\n",
      "0.960799  with:   {'bagging_fraction': 0.8, 'feature_fraction': 0.8}\n",
      "0.960674  with:   {'bagging_fraction': 0.8, 'feature_fraction': 1}\n",
      "0.960243  with:   {'bagging_fraction': 1, 'feature_fraction': 0.4}\n",
      "0.960852  with:   {'bagging_fraction': 1, 'feature_fraction': 0.6}\n",
      "0.960799  with:   {'bagging_fraction': 1, 'feature_fraction': 0.8}\n",
      "0.960674  with:   {'bagging_fraction': 1, 'feature_fraction': 1}\n"
     ]
    }
   ],
   "source": [
    "#继续观察参数的趋势\n",
    "means = gsearch_result1.cv_results_['mean_test_score']\n",
    "params = gsearch_result1.cv_results_['params']\n",
    "for mean,param in zip(means,params):\n",
    "    print(\"%f  with:   %r\" % (mean,param))"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### 顺序搜索最优结果  \n",
    "\n",
    "params_opt={\n",
    "    'learning_rate': 0.1, 'max_depth': 11, 'num_leaves': 20,\n",
    "    'bagging_fraction': 1, 'feature_fraction': 0.4\n",
    "}"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 160,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "测试集结果\n",
      "准确率\n",
      "0.91654\n",
      "f1score\n",
      "0.9479331727949892\n",
      "auc\n",
      "0.8732337884364433\n",
      "混淆矩阵\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "array([[ 7840,  1934],\n",
       "       [ 2239, 37987]])"
      ]
     },
     "execution_count": 160,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "##参数在测试集的结果\n",
    "print('测试集结果')\n",
    "y_pred=gsearch_result1.predict(test_final.drop(columns='loan_status'))\n",
    "print('准确率')\n",
    "print(accuracy_score(test_final['loan_status'],y_pred))\n",
    "print('f1score')\n",
    "print(f1_score(test_final['loan_status'],y_pred)) \n",
    "print('auc')\n",
    "print(roc_auc_score(test_final['loan_status'],y_pred))\n",
    "print('混淆矩阵')\n",
    "confusion_matrix(test_final['loan_status'],y_pred)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## 五.自动寻优阶段\n",
    "本阶段使用贝叶斯方法自动寻找最优参数"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 216,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>continuous_annual_inc</th>\n",
       "      <th>continuous_annual_inc_joint</th>\n",
       "      <th>continuous_delinq_2yrs</th>\n",
       "      <th>continuous_dti</th>\n",
       "      <th>continuous_dti_joint</th>\n",
       "      <th>continuous_fico_range_high</th>\n",
       "      <th>continuous_fico_range_low</th>\n",
       "      <th>continuous_funded_amnt</th>\n",
       "      <th>continuous_funded_amnt_inv</th>\n",
       "      <th>continuous_inq_last_6mths</th>\n",
       "      <th>continuous_installment</th>\n",
       "      <th>continuous_int_rate</th>\n",
       "      <th>continuous_last_fico_range_high</th>\n",
       "      <th>continuous_last_fico_range_low</th>\n",
       "      <th>continuous_loan_amnt</th>\n",
       "      <th>continuous_mths_since_last_delinq</th>\n",
       "      <th>continuous_mths_since_last_major_derog</th>\n",
       "      <th>continuous_mths_since_last_record</th>\n",
       "      <th>continuous_open_acc</th>\n",
       "      <th>continuous_pub_rec</th>\n",
       "      <th>discrete_addr_state_1_one_hot</th>\n",
       "      <th>discrete_addr_state_2_one_hot</th>\n",
       "      <th>discrete_addr_state_3_one_hot</th>\n",
       "      <th>discrete_addr_state_4_one_hot</th>\n",
       "      <th>discrete_addr_state_5_one_hot</th>\n",
       "      <th>discrete_addr_state_6_one_hot</th>\n",
       "      <th>discrete_addr_state_7_one_hot</th>\n",
       "      <th>discrete_addr_state_8_one_hot</th>\n",
       "      <th>discrete_addr_state_9_one_hot</th>\n",
       "      <th>discrete_addr_state_10_one_hot</th>\n",
       "      <th>discrete_addr_state_11_one_hot</th>\n",
       "      <th>discrete_addr_state_12_one_hot</th>\n",
       "      <th>discrete_addr_state_13_one_hot</th>\n",
       "      <th>discrete_addr_state_14_one_hot</th>\n",
       "      <th>discrete_addr_state_15_one_hot</th>\n",
       "      <th>discrete_addr_state_16_one_hot</th>\n",
       "      <th>discrete_addr_state_17_one_hot</th>\n",
       "      <th>discrete_addr_state_18_one_hot</th>\n",
       "      <th>discrete_addr_state_19_one_hot</th>\n",
       "      <th>discrete_addr_state_20_one_hot</th>\n",
       "      <th>discrete_addr_state_21_one_hot</th>\n",
       "      <th>discrete_addr_state_22_one_hot</th>\n",
       "      <th>discrete_addr_state_23_one_hot</th>\n",
       "      <th>discrete_addr_state_24_one_hot</th>\n",
       "      <th>discrete_addr_state_25_one_hot</th>\n",
       "      <th>discrete_addr_state_26_one_hot</th>\n",
       "      <th>discrete_addr_state_27_one_hot</th>\n",
       "      <th>discrete_addr_state_28_one_hot</th>\n",
       "      <th>discrete_addr_state_29_one_hot</th>\n",
       "      <th>discrete_addr_state_30_one_hot</th>\n",
       "      <th>discrete_addr_state_31_one_hot</th>\n",
       "      <th>discrete_addr_state_32_one_hot</th>\n",
       "      <th>discrete_addr_state_33_one_hot</th>\n",
       "      <th>discrete_addr_state_34_one_hot</th>\n",
       "      <th>discrete_addr_state_35_one_hot</th>\n",
       "      <th>discrete_addr_state_36_one_hot</th>\n",
       "      <th>discrete_addr_state_37_one_hot</th>\n",
       "      <th>discrete_addr_state_38_one_hot</th>\n",
       "      <th>discrete_addr_state_39_one_hot</th>\n",
       "      <th>discrete_addr_state_40_one_hot</th>\n",
       "      <th>discrete_addr_state_41_one_hot</th>\n",
       "      <th>discrete_addr_state_42_one_hot</th>\n",
       "      <th>discrete_addr_state_43_one_hot</th>\n",
       "      <th>discrete_addr_state_44_one_hot</th>\n",
       "      <th>discrete_addr_state_45_one_hot</th>\n",
       "      <th>discrete_addr_state_46_one_hot</th>\n",
       "      <th>discrete_addr_state_47_one_hot</th>\n",
       "      <th>discrete_addr_state_48_one_hot</th>\n",
       "      <th>discrete_addr_state_49_one_hot</th>\n",
       "      <th>discrete_application_type_1_one_hot</th>\n",
       "      <th>discrete_application_type_2_one_hot</th>\n",
       "      <th>discrete_emp_length_1_one_hot</th>\n",
       "      <th>discrete_emp_length_2_one_hot</th>\n",
       "      <th>discrete_emp_length_3_one_hot</th>\n",
       "      <th>discrete_emp_length_4_one_hot</th>\n",
       "      <th>discrete_emp_length_5_one_hot</th>\n",
       "      <th>discrete_emp_length_6_one_hot</th>\n",
       "      <th>discrete_emp_length_7_one_hot</th>\n",
       "      <th>discrete_emp_length_8_one_hot</th>\n",
       "      <th>discrete_emp_length_9_one_hot</th>\n",
       "      <th>discrete_emp_length_10_one_hot</th>\n",
       "      <th>discrete_emp_length_11_one_hot</th>\n",
       "      <th>discrete_emp_length_12_one_hot</th>\n",
       "      <th>discrete_grade_1_one_hot</th>\n",
       "      <th>discrete_grade_2_one_hot</th>\n",
       "      <th>discrete_grade_3_one_hot</th>\n",
       "      <th>discrete_grade_4_one_hot</th>\n",
       "      <th>discrete_grade_5_one_hot</th>\n",
       "      <th>discrete_grade_6_one_hot</th>\n",
       "      <th>discrete_grade_7_one_hot</th>\n",
       "      <th>discrete_home_ownership_1_one_hot</th>\n",
       "      <th>discrete_home_ownership_2_one_hot</th>\n",
       "      <th>discrete_home_ownership_3_one_hot</th>\n",
       "      <th>discrete_home_ownership_4_one_hot</th>\n",
       "      <th>discrete_policy_code_1_one_hot</th>\n",
       "      <th>discrete_purpose_1_one_hot</th>\n",
       "      <th>discrete_purpose_2_one_hot</th>\n",
       "      <th>discrete_purpose_3_one_hot</th>\n",
       "      <th>discrete_purpose_4_one_hot</th>\n",
       "      <th>discrete_purpose_5_one_hot</th>\n",
       "      <th>discrete_purpose_6_one_hot</th>\n",
       "      <th>discrete_purpose_7_one_hot</th>\n",
       "      <th>discrete_purpose_8_one_hot</th>\n",
       "      <th>discrete_purpose_9_one_hot</th>\n",
       "      <th>discrete_purpose_10_one_hot</th>\n",
       "      <th>discrete_purpose_11_one_hot</th>\n",
       "      <th>discrete_purpose_12_one_hot</th>\n",
       "      <th>discrete_pymnt_plan_1_one_hot</th>\n",
       "      <th>discrete_sub_grade_1_one_hot</th>\n",
       "      <th>discrete_sub_grade_2_one_hot</th>\n",
       "      <th>discrete_sub_grade_3_one_hot</th>\n",
       "      <th>discrete_sub_grade_4_one_hot</th>\n",
       "      <th>discrete_sub_grade_5_one_hot</th>\n",
       "      <th>discrete_sub_grade_6_one_hot</th>\n",
       "      <th>discrete_sub_grade_7_one_hot</th>\n",
       "      <th>discrete_sub_grade_8_one_hot</th>\n",
       "      <th>discrete_sub_grade_9_one_hot</th>\n",
       "      <th>discrete_sub_grade_10_one_hot</th>\n",
       "      <th>discrete_sub_grade_11_one_hot</th>\n",
       "      <th>discrete_sub_grade_12_one_hot</th>\n",
       "      <th>discrete_sub_grade_13_one_hot</th>\n",
       "      <th>discrete_sub_grade_14_one_hot</th>\n",
       "      <th>discrete_sub_grade_15_one_hot</th>\n",
       "      <th>discrete_sub_grade_16_one_hot</th>\n",
       "      <th>discrete_sub_grade_17_one_hot</th>\n",
       "      <th>discrete_sub_grade_18_one_hot</th>\n",
       "      <th>discrete_sub_grade_19_one_hot</th>\n",
       "      <th>discrete_sub_grade_20_one_hot</th>\n",
       "      <th>discrete_sub_grade_21_one_hot</th>\n",
       "      <th>discrete_sub_grade_22_one_hot</th>\n",
       "      <th>discrete_sub_grade_23_one_hot</th>\n",
       "      <th>discrete_sub_grade_24_one_hot</th>\n",
       "      <th>discrete_sub_grade_25_one_hot</th>\n",
       "      <th>discrete_sub_grade_26_one_hot</th>\n",
       "      <th>discrete_sub_grade_27_one_hot</th>\n",
       "      <th>discrete_sub_grade_28_one_hot</th>\n",
       "      <th>discrete_sub_grade_29_one_hot</th>\n",
       "      <th>discrete_sub_grade_30_one_hot</th>\n",
       "      <th>discrete_sub_grade_31_one_hot</th>\n",
       "      <th>discrete_sub_grade_32_one_hot</th>\n",
       "      <th>discrete_sub_grade_33_one_hot</th>\n",
       "      <th>discrete_sub_grade_34_one_hot</th>\n",
       "      <th>discrete_sub_grade_35_one_hot</th>\n",
       "      <th>discrete_term_1_one_hot</th>\n",
       "      <th>discrete_term_2_one_hot</th>\n",
       "      <th>loan_status</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>48000.0</td>\n",
       "      <td>76000.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>25.13</td>\n",
       "      <td>5.97</td>\n",
       "      <td>699.0</td>\n",
       "      <td>695.0</td>\n",
       "      <td>10000.0</td>\n",
       "      <td>10000.0</td>\n",
       "      <td>2.0</td>\n",
       "      <td>332.10</td>\n",
       "      <td>11.99</td>\n",
       "      <td>569.0</td>\n",
       "      <td>565.0</td>\n",
       "      <td>10000.0</td>\n",
       "      <td>21.0</td>\n",
       "      <td>69.0</td>\n",
       "      <td>73.0</td>\n",
       "      <td>12.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>54000.0</td>\n",
       "      <td>76000.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>14.93</td>\n",
       "      <td>5.97</td>\n",
       "      <td>684.0</td>\n",
       "      <td>680.0</td>\n",
       "      <td>11000.0</td>\n",
       "      <td>11000.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>255.90</td>\n",
       "      <td>13.99</td>\n",
       "      <td>639.0</td>\n",
       "      <td>635.0</td>\n",
       "      <td>11000.0</td>\n",
       "      <td>57.0</td>\n",
       "      <td>57.0</td>\n",
       "      <td>73.0</td>\n",
       "      <td>6.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>60000.0</td>\n",
       "      <td>76000.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>26.18</td>\n",
       "      <td>5.97</td>\n",
       "      <td>664.0</td>\n",
       "      <td>660.0</td>\n",
       "      <td>19525.0</td>\n",
       "      <td>19525.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>528.11</td>\n",
       "      <td>20.99</td>\n",
       "      <td>504.0</td>\n",
       "      <td>500.0</td>\n",
       "      <td>19525.0</td>\n",
       "      <td>21.0</td>\n",
       "      <td>69.0</td>\n",
       "      <td>63.0</td>\n",
       "      <td>13.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>100000.0</td>\n",
       "      <td>76000.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>7.52</td>\n",
       "      <td>5.97</td>\n",
       "      <td>664.0</td>\n",
       "      <td>660.0</td>\n",
       "      <td>18000.0</td>\n",
       "      <td>18000.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>507.33</td>\n",
       "      <td>22.99</td>\n",
       "      <td>704.0</td>\n",
       "      <td>700.0</td>\n",
       "      <td>18000.0</td>\n",
       "      <td>21.0</td>\n",
       "      <td>69.0</td>\n",
       "      <td>52.0</td>\n",
       "      <td>5.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>100000.0</td>\n",
       "      <td>76000.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>9.30</td>\n",
       "      <td>5.97</td>\n",
       "      <td>689.0</td>\n",
       "      <td>685.0</td>\n",
       "      <td>1400.0</td>\n",
       "      <td>1400.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>45.83</td>\n",
       "      <td>10.99</td>\n",
       "      <td>639.0</td>\n",
       "      <td>635.0</td>\n",
       "      <td>1400.0</td>\n",
       "      <td>6.0</td>\n",
       "      <td>69.0</td>\n",
       "      <td>73.0</td>\n",
       "      <td>14.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "   continuous_annual_inc  continuous_annual_inc_joint  continuous_delinq_2yrs  \\\n",
       "0                48000.0                      76000.0                     0.0   \n",
       "1                54000.0                      76000.0                     0.0   \n",
       "2                60000.0                      76000.0                     0.0   \n",
       "3               100000.0                      76000.0                     0.0   \n",
       "4               100000.0                      76000.0                     1.0   \n",
       "\n",
       "   continuous_dti  continuous_dti_joint  continuous_fico_range_high  \\\n",
       "0           25.13                  5.97                       699.0   \n",
       "1           14.93                  5.97                       684.0   \n",
       "2           26.18                  5.97                       664.0   \n",
       "3            7.52                  5.97                       664.0   \n",
       "4            9.30                  5.97                       689.0   \n",
       "\n",
       "   continuous_fico_range_low  continuous_funded_amnt  \\\n",
       "0                      695.0                 10000.0   \n",
       "1                      680.0                 11000.0   \n",
       "2                      660.0                 19525.0   \n",
       "3                      660.0                 18000.0   \n",
       "4                      685.0                  1400.0   \n",
       "\n",
       "   continuous_funded_amnt_inv  continuous_inq_last_6mths  \\\n",
       "0                     10000.0                        2.0   \n",
       "1                     11000.0                        0.0   \n",
       "2                     19525.0                        1.0   \n",
       "3                     18000.0                        0.0   \n",
       "4                      1400.0                        0.0   \n",
       "\n",
       "   continuous_installment  continuous_int_rate  \\\n",
       "0                  332.10                11.99   \n",
       "1                  255.90                13.99   \n",
       "2                  528.11                20.99   \n",
       "3                  507.33                22.99   \n",
       "4                   45.83                10.99   \n",
       "\n",
       "   continuous_last_fico_range_high  continuous_last_fico_range_low  \\\n",
       "0                            569.0                           565.0   \n",
       "1                            639.0                           635.0   \n",
       "2                            504.0                           500.0   \n",
       "3                            704.0                           700.0   \n",
       "4                            639.0                           635.0   \n",
       "\n",
       "   continuous_loan_amnt  continuous_mths_since_last_delinq  \\\n",
       "0               10000.0                               21.0   \n",
       "1               11000.0                               57.0   \n",
       "2               19525.0                               21.0   \n",
       "3               18000.0                               21.0   \n",
       "4                1400.0                                6.0   \n",
       "\n",
       "   continuous_mths_since_last_major_derog  continuous_mths_since_last_record  \\\n",
       "0                                    69.0                               73.0   \n",
       "1                                    57.0                               73.0   \n",
       "2                                    69.0                               63.0   \n",
       "3                                    69.0                               52.0   \n",
       "4                                    69.0                               73.0   \n",
       "\n",
       "   continuous_open_acc  continuous_pub_rec  discrete_addr_state_1_one_hot  \\\n",
       "0                 12.0                 0.0                            0.0   \n",
       "1                  6.0                 0.0                            0.0   \n",
       "2                 13.0                 1.0                            0.0   \n",
       "3                  5.0                 1.0                            0.0   \n",
       "4                 14.0                 0.0                            0.0   \n",
       "\n",
       "   discrete_addr_state_2_one_hot  discrete_addr_state_3_one_hot  \\\n",
       "0                            0.0                            0.0   \n",
       "1                            0.0                            0.0   \n",
       "2                            0.0                            0.0   \n",
       "3                            0.0                            0.0   \n",
       "4                            0.0                            0.0   \n",
       "\n",
       "   discrete_addr_state_4_one_hot  discrete_addr_state_5_one_hot  \\\n",
       "0                            0.0                            0.0   \n",
       "1                            0.0                            0.0   \n",
       "2                            0.0                            0.0   \n",
       "3                            1.0                            0.0   \n",
       "4                            0.0                            0.0   \n",
       "\n",
       "   discrete_addr_state_6_one_hot  discrete_addr_state_7_one_hot  \\\n",
       "0                            0.0                            0.0   \n",
       "1                            0.0                            0.0   \n",
       "2                            0.0                            0.0   \n",
       "3                            0.0                            0.0   \n",
       "4                            0.0                            0.0   \n",
       "\n",
       "   discrete_addr_state_8_one_hot  discrete_addr_state_9_one_hot  \\\n",
       "0                            0.0                            0.0   \n",
       "1                            0.0                            0.0   \n",
       "2                            0.0                            0.0   \n",
       "3                            0.0                            0.0   \n",
       "4                            0.0                            0.0   \n",
       "\n",
       "   discrete_addr_state_10_one_hot  discrete_addr_state_11_one_hot  \\\n",
       "0                             0.0                             0.0   \n",
       "1                             0.0                             1.0   \n",
       "2                             0.0                             0.0   \n",
       "3                             0.0                             0.0   \n",
       "4                             0.0                             0.0   \n",
       "\n",
       "   discrete_addr_state_12_one_hot  discrete_addr_state_13_one_hot  \\\n",
       "0                             0.0                             0.0   \n",
       "1                             0.0                             0.0   \n",
       "2                             0.0                             0.0   \n",
       "3                             0.0                             0.0   \n",
       "4                             0.0                             1.0   \n",
       "\n",
       "   discrete_addr_state_14_one_hot  discrete_addr_state_15_one_hot  \\\n",
       "0                             0.0                             0.0   \n",
       "1                             0.0                             0.0   \n",
       "2                             0.0                             0.0   \n",
       "3                             0.0                             0.0   \n",
       "4                             0.0                             0.0   \n",
       "\n",
       "   discrete_addr_state_16_one_hot  discrete_addr_state_17_one_hot  \\\n",
       "0                             0.0                             0.0   \n",
       "1                             0.0                             0.0   \n",
       "2                             0.0                             0.0   \n",
       "3                             0.0                             0.0   \n",
       "4                             0.0                             0.0   \n",
       "\n",
       "   discrete_addr_state_18_one_hot  discrete_addr_state_19_one_hot  \\\n",
       "0                             0.0                             0.0   \n",
       "1                             0.0                             0.0   \n",
       "2                             0.0                             0.0   \n",
       "3                             0.0                             0.0   \n",
       "4                             0.0                             0.0   \n",
       "\n",
       "   discrete_addr_state_20_one_hot  discrete_addr_state_21_one_hot  \\\n",
       "0                             0.0                             0.0   \n",
       "1                             0.0                             0.0   \n",
       "2                             0.0                             0.0   \n",
       "3                             0.0                             0.0   \n",
       "4                             0.0                             0.0   \n",
       "\n",
       "   discrete_addr_state_22_one_hot  discrete_addr_state_23_one_hot  \\\n",
       "0                             0.0                             0.0   \n",
       "1                             0.0                             0.0   \n",
       "2                             0.0                             0.0   \n",
       "3                             0.0                             0.0   \n",
       "4                             0.0                             0.0   \n",
       "\n",
       "   discrete_addr_state_24_one_hot  discrete_addr_state_25_one_hot  \\\n",
       "0                             0.0                             0.0   \n",
       "1                             0.0                             0.0   \n",
       "2                             0.0                             0.0   \n",
       "3                             0.0                             0.0   \n",
       "4                             0.0                             0.0   \n",
       "\n",
       "   discrete_addr_state_26_one_hot  discrete_addr_state_27_one_hot  \\\n",
       "0                             0.0                             0.0   \n",
       "1                             0.0                             0.0   \n",
       "2                             0.0                             0.0   \n",
       "3                             0.0                             0.0   \n",
       "4                             0.0                             0.0   \n",
       "\n",
       "   discrete_addr_state_28_one_hot  discrete_addr_state_29_one_hot  \\\n",
       "0                             0.0                             0.0   \n",
       "1                             0.0                             0.0   \n",
       "2                             0.0                             0.0   \n",
       "3                             0.0                             0.0   \n",
       "4                             0.0                             0.0   \n",
       "\n",
       "   discrete_addr_state_30_one_hot  discrete_addr_state_31_one_hot  \\\n",
       "0                             0.0                             0.0   \n",
       "1                             0.0                             0.0   \n",
       "2                             0.0                             0.0   \n",
       "3                             0.0                             0.0   \n",
       "4                             0.0                             0.0   \n",
       "\n",
       "   discrete_addr_state_32_one_hot  discrete_addr_state_33_one_hot  \\\n",
       "0                             0.0                             0.0   \n",
       "1                             0.0                             0.0   \n",
       "2                             0.0                             1.0   \n",
       "3                             0.0                             0.0   \n",
       "4                             0.0                             0.0   \n",
       "\n",
       "   discrete_addr_state_34_one_hot  discrete_addr_state_35_one_hot  \\\n",
       "0                             0.0                             0.0   \n",
       "1                             0.0                             0.0   \n",
       "2                             0.0                             0.0   \n",
       "3                             0.0                             0.0   \n",
       "4                             0.0                             0.0   \n",
       "\n",
       "   discrete_addr_state_36_one_hot  discrete_addr_state_37_one_hot  \\\n",
       "0                             0.0                             0.0   \n",
       "1                             0.0                             0.0   \n",
       "2                             0.0                             0.0   \n",
       "3                             0.0                             0.0   \n",
       "4                             0.0                             0.0   \n",
       "\n",
       "   discrete_addr_state_38_one_hot  discrete_addr_state_39_one_hot  \\\n",
       "0                             0.0                             0.0   \n",
       "1                             0.0                             0.0   \n",
       "2                             0.0                             0.0   \n",
       "3                             0.0                             0.0   \n",
       "4                             0.0                             0.0   \n",
       "\n",
       "   discrete_addr_state_40_one_hot  discrete_addr_state_41_one_hot  \\\n",
       "0                             0.0                             0.0   \n",
       "1                             0.0                             0.0   \n",
       "2                             0.0                             0.0   \n",
       "3                             0.0                             0.0   \n",
       "4                             0.0                             0.0   \n",
       "\n",
       "   discrete_addr_state_42_one_hot  discrete_addr_state_43_one_hot  \\\n",
       "0                             0.0                             1.0   \n",
       "1                             0.0                             0.0   \n",
       "2                             0.0                             0.0   \n",
       "3                             0.0                             0.0   \n",
       "4                             0.0                             0.0   \n",
       "\n",
       "   discrete_addr_state_44_one_hot  discrete_addr_state_45_one_hot  \\\n",
       "0                             0.0                             0.0   \n",
       "1                             0.0                             0.0   \n",
       "2                             0.0                             0.0   \n",
       "3                             0.0                             0.0   \n",
       "4                             0.0                             0.0   \n",
       "\n",
       "   discrete_addr_state_46_one_hot  discrete_addr_state_47_one_hot  \\\n",
       "0                             0.0                             0.0   \n",
       "1                             0.0                             0.0   \n",
       "2                             0.0                             0.0   \n",
       "3                             0.0                             0.0   \n",
       "4                             0.0                             0.0   \n",
       "\n",
       "   discrete_addr_state_48_one_hot  discrete_addr_state_49_one_hot  \\\n",
       "0                             0.0                             0.0   \n",
       "1                             0.0                             0.0   \n",
       "2                             0.0                             0.0   \n",
       "3                             0.0                             0.0   \n",
       "4                             0.0                             0.0   \n",
       "\n",
       "   discrete_application_type_1_one_hot  discrete_application_type_2_one_hot  \\\n",
       "0                                  1.0                                  0.0   \n",
       "1                                  1.0                                  0.0   \n",
       "2                                  1.0                                  0.0   \n",
       "3                                  1.0                                  0.0   \n",
       "4                                  1.0                                  0.0   \n",
       "\n",
       "   discrete_emp_length_1_one_hot  discrete_emp_length_2_one_hot  \\\n",
       "0                            0.0                            0.0   \n",
       "1                            0.0                            1.0   \n",
       "2                            1.0                            0.0   \n",
       "3                            0.0                            0.0   \n",
       "4                            0.0                            0.0   \n",
       "\n",
       "   discrete_emp_length_3_one_hot  discrete_emp_length_4_one_hot  \\\n",
       "0                            0.0                            0.0   \n",
       "1                            0.0                            0.0   \n",
       "2                            0.0                            0.0   \n",
       "3                            0.0                            0.0   \n",
       "4                            0.0                            0.0   \n",
       "\n",
       "   discrete_emp_length_5_one_hot  discrete_emp_length_6_one_hot  \\\n",
       "0                            0.0                            0.0   \n",
       "1                            0.0                            0.0   \n",
       "2                            0.0                            0.0   \n",
       "3                            0.0                            0.0   \n",
       "4                            0.0                            1.0   \n",
       "\n",
       "   discrete_emp_length_7_one_hot  discrete_emp_length_8_one_hot  \\\n",
       "0                            0.0                            0.0   \n",
       "1                            0.0                            0.0   \n",
       "2                            0.0                            0.0   \n",
       "3                            0.0                            0.0   \n",
       "4                            0.0                            0.0   \n",
       "\n",
       "   discrete_emp_length_9_one_hot  discrete_emp_length_10_one_hot  \\\n",
       "0                            0.0                             1.0   \n",
       "1                            0.0                             0.0   \n",
       "2                            0.0                             0.0   \n",
       "3                            0.0                             0.0   \n",
       "4                            0.0                             0.0   \n",
       "\n",
       "   discrete_emp_length_11_one_hot  discrete_emp_length_12_one_hot  \\\n",
       "0                             0.0                             0.0   \n",
       "1                             0.0                             0.0   \n",
       "2                             0.0                             0.0   \n",
       "3                             1.0                             0.0   \n",
       "4                             0.0                             0.0   \n",
       "\n",
       "   discrete_grade_1_one_hot  discrete_grade_2_one_hot  \\\n",
       "0                       1.0                       0.0   \n",
       "1                       1.0                       0.0   \n",
       "2                       0.0                       0.0   \n",
       "3                       0.0                       0.0   \n",
       "4                       0.0                       1.0   \n",
       "\n",
       "   discrete_grade_3_one_hot  discrete_grade_4_one_hot  \\\n",
       "0                       0.0                       0.0   \n",
       "1                       0.0                       0.0   \n",
       "2                       0.0                       0.0   \n",
       "3                       1.0                       0.0   \n",
       "4                       0.0                       0.0   \n",
       "\n",
       "   discrete_grade_5_one_hot  discrete_grade_6_one_hot  \\\n",
       "0                       0.0                       0.0   \n",
       "1                       0.0                       0.0   \n",
       "2                       1.0                       0.0   \n",
       "3                       0.0                       0.0   \n",
       "4                       0.0                       0.0   \n",
       "\n",
       "   discrete_grade_7_one_hot  discrete_home_ownership_1_one_hot  \\\n",
       "0                       0.0                                1.0   \n",
       "1                       0.0                                1.0   \n",
       "2                       0.0                                1.0   \n",
       "3                       0.0                                1.0   \n",
       "4                       0.0                                0.0   \n",
       "\n",
       "   discrete_home_ownership_2_one_hot  discrete_home_ownership_3_one_hot  \\\n",
       "0                                0.0                                0.0   \n",
       "1                                0.0                                0.0   \n",
       "2                                0.0                                0.0   \n",
       "3                                0.0                                0.0   \n",
       "4                                0.0                                1.0   \n",
       "\n",
       "   discrete_home_ownership_4_one_hot  discrete_policy_code_1_one_hot  \\\n",
       "0                                0.0                             1.0   \n",
       "1                                0.0                             1.0   \n",
       "2                                0.0                             1.0   \n",
       "3                                0.0                             1.0   \n",
       "4                                0.0                             1.0   \n",
       "\n",
       "   discrete_purpose_1_one_hot  discrete_purpose_2_one_hot  \\\n",
       "0                         0.0                         0.0   \n",
       "1                         1.0                         0.0   \n",
       "2                         0.0                         0.0   \n",
       "3                         0.0                         0.0   \n",
       "4                         1.0                         0.0   \n",
       "\n",
       "   discrete_purpose_3_one_hot  discrete_purpose_4_one_hot  \\\n",
       "0                         0.0                         0.0   \n",
       "1                         0.0                         0.0   \n",
       "2                         0.0                         0.0   \n",
       "3                         0.0                         0.0   \n",
       "4                         0.0                         0.0   \n",
       "\n",
       "   discrete_purpose_5_one_hot  discrete_purpose_6_one_hot  \\\n",
       "0                         1.0                         0.0   \n",
       "1                         0.0                         0.0   \n",
       "2                         1.0                         0.0   \n",
       "3                         0.0                         1.0   \n",
       "4                         0.0                         0.0   \n",
       "\n",
       "   discrete_purpose_7_one_hot  discrete_purpose_8_one_hot  \\\n",
       "0                         0.0                         0.0   \n",
       "1                         0.0                         0.0   \n",
       "2                         0.0                         0.0   \n",
       "3                         0.0                         0.0   \n",
       "4                         0.0                         0.0   \n",
       "\n",
       "   discrete_purpose_9_one_hot  discrete_purpose_10_one_hot  \\\n",
       "0                         0.0                          0.0   \n",
       "1                         0.0                          0.0   \n",
       "2                         0.0                          0.0   \n",
       "3                         0.0                          0.0   \n",
       "4                         0.0                          0.0   \n",
       "\n",
       "   discrete_purpose_11_one_hot  discrete_purpose_12_one_hot  \\\n",
       "0                          0.0                          0.0   \n",
       "1                          0.0                          0.0   \n",
       "2                          0.0                          0.0   \n",
       "3                          0.0                          0.0   \n",
       "4                          0.0                          0.0   \n",
       "\n",
       "   discrete_pymnt_plan_1_one_hot  discrete_sub_grade_1_one_hot  \\\n",
       "0                            1.0                           0.0   \n",
       "1                            1.0                           1.0   \n",
       "2                            1.0                           0.0   \n",
       "3                            1.0                           0.0   \n",
       "4                            1.0                           0.0   \n",
       "\n",
       "   discrete_sub_grade_2_one_hot  discrete_sub_grade_3_one_hot  \\\n",
       "0                           1.0                           0.0   \n",
       "1                           0.0                           0.0   \n",
       "2                           0.0                           0.0   \n",
       "3                           0.0                           0.0   \n",
       "4                           0.0                           1.0   \n",
       "\n",
       "   discrete_sub_grade_4_one_hot  discrete_sub_grade_5_one_hot  \\\n",
       "0                           0.0                           0.0   \n",
       "1                           0.0                           0.0   \n",
       "2                           0.0                           0.0   \n",
       "3                           0.0                           0.0   \n",
       "4                           0.0                           0.0   \n",
       "\n",
       "   discrete_sub_grade_6_one_hot  discrete_sub_grade_7_one_hot  \\\n",
       "0                           0.0                           0.0   \n",
       "1                           0.0                           0.0   \n",
       "2                           0.0                           0.0   \n",
       "3                           0.0                           0.0   \n",
       "4                           0.0                           0.0   \n",
       "\n",
       "   discrete_sub_grade_8_one_hot  discrete_sub_grade_9_one_hot  \\\n",
       "0                           0.0                           0.0   \n",
       "1                           0.0                           0.0   \n",
       "2                           0.0                           0.0   \n",
       "3                           0.0                           0.0   \n",
       "4                           0.0                           0.0   \n",
       "\n",
       "   discrete_sub_grade_10_one_hot  discrete_sub_grade_11_one_hot  \\\n",
       "0                            0.0                            0.0   \n",
       "1                            0.0                            0.0   \n",
       "2                            0.0                            0.0   \n",
       "3                            0.0                            0.0   \n",
       "4                            0.0                            0.0   \n",
       "\n",
       "   discrete_sub_grade_12_one_hot  discrete_sub_grade_13_one_hot  \\\n",
       "0                            0.0                            0.0   \n",
       "1                            0.0                            0.0   \n",
       "2                            0.0                            0.0   \n",
       "3                            0.0                            0.0   \n",
       "4                            0.0                            0.0   \n",
       "\n",
       "   discrete_sub_grade_14_one_hot  discrete_sub_grade_15_one_hot  \\\n",
       "0                            0.0                            0.0   \n",
       "1                            0.0                            0.0   \n",
       "2                            0.0                            0.0   \n",
       "3                            0.0                            0.0   \n",
       "4                            0.0                            0.0   \n",
       "\n",
       "   discrete_sub_grade_16_one_hot  discrete_sub_grade_17_one_hot  \\\n",
       "0                            0.0                            0.0   \n",
       "1                            0.0                            0.0   \n",
       "2                            0.0                            0.0   \n",
       "3                            0.0                            0.0   \n",
       "4                            0.0                            0.0   \n",
       "\n",
       "   discrete_sub_grade_18_one_hot  discrete_sub_grade_19_one_hot  \\\n",
       "0                            0.0                            0.0   \n",
       "1                            0.0                            0.0   \n",
       "2                            0.0                            0.0   \n",
       "3                            0.0                            0.0   \n",
       "4                            0.0                            0.0   \n",
       "\n",
       "   discrete_sub_grade_20_one_hot  discrete_sub_grade_21_one_hot  \\\n",
       "0                            0.0                            0.0   \n",
       "1                            0.0                            0.0   \n",
       "2                            0.0                            0.0   \n",
       "3                            0.0                            0.0   \n",
       "4                            0.0                            0.0   \n",
       "\n",
       "   discrete_sub_grade_22_one_hot  discrete_sub_grade_23_one_hot  \\\n",
       "0                            0.0                            0.0   \n",
       "1                            0.0                            0.0   \n",
       "2                            0.0                            0.0   \n",
       "3                            0.0                            1.0   \n",
       "4                            0.0                            0.0   \n",
       "\n",
       "   discrete_sub_grade_24_one_hot  discrete_sub_grade_25_one_hot  \\\n",
       "0                            0.0                            0.0   \n",
       "1                            0.0                            0.0   \n",
       "2                            0.0                            0.0   \n",
       "3                            0.0                            0.0   \n",
       "4                            0.0                            0.0   \n",
       "\n",
       "   discrete_sub_grade_26_one_hot  discrete_sub_grade_27_one_hot  \\\n",
       "0                            0.0                            0.0   \n",
       "1                            0.0                            0.0   \n",
       "2                            0.0                            0.0   \n",
       "3                            0.0                            0.0   \n",
       "4                            0.0                            0.0   \n",
       "\n",
       "   discrete_sub_grade_28_one_hot  discrete_sub_grade_29_one_hot  \\\n",
       "0                            0.0                            0.0   \n",
       "1                            0.0                            0.0   \n",
       "2                            1.0                            0.0   \n",
       "3                            0.0                            0.0   \n",
       "4                            0.0                            0.0   \n",
       "\n",
       "   discrete_sub_grade_30_one_hot  discrete_sub_grade_31_one_hot  \\\n",
       "0                            0.0                            0.0   \n",
       "1                            0.0                            0.0   \n",
       "2                            0.0                            0.0   \n",
       "3                            0.0                            0.0   \n",
       "4                            0.0                            0.0   \n",
       "\n",
       "   discrete_sub_grade_32_one_hot  discrete_sub_grade_33_one_hot  \\\n",
       "0                            0.0                            0.0   \n",
       "1                            0.0                            0.0   \n",
       "2                            0.0                            0.0   \n",
       "3                            0.0                            0.0   \n",
       "4                            0.0                            0.0   \n",
       "\n",
       "   discrete_sub_grade_34_one_hot  discrete_sub_grade_35_one_hot  \\\n",
       "0                            0.0                            0.0   \n",
       "1                            0.0                            0.0   \n",
       "2                            0.0                            0.0   \n",
       "3                            0.0                            0.0   \n",
       "4                            0.0                            0.0   \n",
       "\n",
       "   discrete_term_1_one_hot  discrete_term_2_one_hot  loan_status  \n",
       "0                      1.0                      0.0          0.0  \n",
       "1                      0.0                      1.0          1.0  \n",
       "2                      0.0                      1.0          0.0  \n",
       "3                      0.0                      1.0          1.0  \n",
       "4                      1.0                      0.0          1.0  "
      ]
     },
     "execution_count": 216,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "#首先进行缺失值处理\n",
    "from sklearn.impute import SimpleImputer\n",
    "si = SimpleImputer(missing_values=np.nan, strategy='most_frequent')\n",
    "train_data = pd.DataFrame(si.fit_transform(train))\n",
    "train_data.columns=train.columns\n",
    "test_data = pd.DataFrame(si.fit_transform(test))\n",
    "test_data.columns=test.columns\n",
    "test_data.head()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 217,
   "metadata": {
    "id": "pk8phvUz3Lkk"
   },
   "outputs": [],
   "source": [
    "import io\n",
    "import multiprocessing\n",
    "from contextlib import redirect_stdout\n",
    "from copy import deepcopy\n",
    "from dataclasses import dataclass, asdict\n",
    "import hyperopt.pyll\n",
    "from hyperopt import fmin, tpe, hp\n",
    "import numpy as np\n",
    "import lightgbm as lgb\n",
    "from sklearn.metrics import accuracy_score\n",
    "from sklearn.metrics import f1_score\n",
    "from sklearn.metrics import precision_score\n",
    "from sklearn.metrics import recall_score\n",
    "from sklearn.metrics import roc_auc_score\n",
    "import torch\n",
    "\n",
    "import copy\n",
    "cpu_count = 4\n",
    "use_gpu = False\n",
    "@dataclass\n",
    "class LGBOpt:\n",
    "    num_threads: any = hp.choice('num_threads', [cpu_count])\n",
    "    num_leaves: any = hp.choice('num_leaves', [64])\n",
    "    metric: any = hp.choice('metric', ['binary_error'])\n",
    "    num_round: any = hp.choice('num_rounds', [1000])\n",
    "    objective: any = hp.choice('objective', ['binary'])\n",
    "    learning_rate: any = hp.uniform('learning_rate', 0.01, 0.1)\n",
    "    feature_fraction: any = hp.uniform('feature_fraction', 0.5, 1.0)\n",
    "    bagging_fraction: any = hp.uniform('bagging_fraction', 0.8, 1.0)\n",
    "    device_type: any = hp.choice('device_tpye', ['gpu']) if use_gpu else hp.choice('device_type',\n",
    "                                                                                   ['cpu'])\n",
    "    boosting: any = hp.choice('boosting', ['gbdt', 'dart', 'goss'])\n",
    "    extra_trees: any = hp.choice('extra_tress', [False, True])\n",
    "    drop_rate: any = hp.uniform('drop_rate', 0, 0.2)\n",
    "    uniform_drop: any = hp.choice('uniform_drop', [True, False])\n",
    "    lambda_l1: any = hp.uniform('lambda_l1', 0, 10)  # TODO: Check range\n",
    "    lambda_l2: any = hp.uniform('lambda_l2', 0, 10)  # TODO: Check range\n",
    "    min_gain_to_split: any = hp.uniform('min_gain_to_split', 0, 1)  # TODO: Check range\n",
    "    min_data_in_bin = hp.choice('min_data_in_bin', [3, 5, 10, 15, 20, 50])\n",
    "\n",
    "    @staticmethod\n",
    "    def get_common_params():\n",
    "        return {'num_thread': 4, 'num_leaves': 12, 'metric': 'binary', 'objective': 'binary',\n",
    "                'num_round': 1000, 'learning_rate': 0.01, 'feature_fraction': 0.8, 'bagging_fraction': 0.8}\n",
    "    "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 218,
   "metadata": {},
   "outputs": [],
   "source": [
    "class FitterBase(object):\n",
    "    def __init__(self, label, metric, max_eval=100, opt=None):\n",
    "        self.label = label\n",
    "        self.metric = metric\n",
    "        self.opt_params = dict()\n",
    "        self.max_eval = max_eval\n",
    "        self.opt = opt\n",
    "\n",
    "    def get_loss(self, y, y_pred):\n",
    "        if self.metric == 'error':\n",
    "            return 1 - accuracy_score(y, y_pred)\n",
    "        elif self.metric == 'precision':\n",
    "            return 1 - precision_score(y, y_pred)\n",
    "        elif self.metric == 'recall':\n",
    "            return 1 - recall_score(y, y_pred)\n",
    "        elif self.metric == 'macro_f1':\n",
    "            return 1 - f1_score(y, y_pred, average='macro')\n",
    "        elif self.metric == 'micro_f1':\n",
    "            return 1 - f1_score(y, y_pred, average='micro')\n",
    "        elif self.metric == 'auc':  # TODO: Add a warning checking if y_predict is all [0, 1], it should be probability\n",
    "            return 1 - roc_auc_score(y, y_pred)\n",
    "        else:\n",
    "            raise Exception(\"Not implemented yet.\")\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 219,
   "metadata": {},
   "outputs": [],
   "source": [
    "\n",
    "class LGBFitter(FitterBase):\n",
    "    def __init__(self, label='label', metric='error', opt: LGBOpt = None, max_eval=100):\n",
    "        super(LGBFitter, self).__init__(label, metric, max_eval)\n",
    "        if opt is not None:\n",
    "            self.opt = opt\n",
    "        else:\n",
    "            self.opt = LGBOpt()\n",
    "        self.best_round = None\n",
    "        self.clf = None\n",
    "\n",
    "    def train(self, train_df, eval_df, params=None, use_best_eval=True):\n",
    "        self.best_round = None\n",
    "        dtrain = lgb.Dataset(train_df.drop(columns=[self.label]), train_df[self.label])\n",
    "        deval = lgb.Dataset(eval_df.drop(columns=[self.label]), eval_df[self.label])\n",
    "        evallist = [dtrain, deval]\n",
    "        if params is None:\n",
    "            use_params = deepcopy(self.opt_params)\n",
    "        else:\n",
    "            use_params = deepcopy(params)\n",
    "\n",
    "        num_round = use_params.pop('num_round')\n",
    "        if use_best_eval:\n",
    "            with io.StringIO() as buf, redirect_stdout(buf):\n",
    "                self.clf = lgb.train(use_params, dtrain, num_round, valid_sets=evallist)\n",
    "                output = buf.getvalue().split(\"\\n\")\n",
    "            min_error = np.inf\n",
    "            min_index = 0\n",
    "            for idx in range(len(output) - 1):\n",
    "                if len(output[idx].split(\"\\t\")) == 3:\n",
    "                    temp = float(output[idx].split(\"\\t\")[2].split(\":\")[1])\n",
    "                    if min_error > temp:\n",
    "                        min_error = temp\n",
    "                        min_index = int(output[idx].split(\"\\t\")[0][1:-1])\n",
    "            print(\"The minimum is attained in round %d\" % (min_index + 1))\n",
    "            self.best_round = min_index + 1\n",
    "            return output\n",
    "        else:\n",
    "            with io.StringIO() as buf, redirect_stdout(buf):\n",
    "                self.clf = lgb.train(use_params, dtrain, num_round, valid_sets=evallist)\n",
    "                output = buf.getvalue().split(\"\\n\")\n",
    "            self.best_round = num_round\n",
    "            return output\n",
    "\n",
    "    def search(self, train_df, eval_df, use_best_eval=True):\n",
    "        self.opt_params = dict()\n",
    "\n",
    "        def train_impl(params):\n",
    "            self.train(train_df, eval_df, params, use_best_eval)\n",
    "            if self.metric == 'auc':\n",
    "                y_pred = self.clf.predict(eval_df.drop(columns=[self.label]), num_iteration=self.best_round)\n",
    "            else:\n",
    "                y_pred = (self.clf.predict(eval_df.drop(columns=[self.label]),\n",
    "                                           num_iteration=self.best_round) > 0.5).astype(int)\n",
    "            return self.get_loss(eval_df[self.label], y_pred)\n",
    "\n",
    "        self.opt_params = fmin(train_impl, asdict(self.opt), algo=tpe.suggest, max_evals=self.max_eval)\n",
    "\n",
    "    def search_k_fold(self, k_fold, data, use_best_eval=True):\n",
    "        self.opt_params = dict()\n",
    "\n",
    "        def train_impl_nfold(params):\n",
    "            loss = list()\n",
    "            for train_id, eval_id in k_fold.split(data):\n",
    "                train_df = data.loc[train_id]\n",
    "                eval_df = data.loc[eval_id]\n",
    "                self.train(train_df, eval_df, params, use_best_eval)\n",
    "                if self.metric == 'auc':\n",
    "                    y_pred = self.clf.predict(eval_df.drop(columns=[self.label]), num_iteration=self.best_round)\n",
    "                else:\n",
    "                    y_pred = (self.clf.predict(eval_df.drop(columns=[self.label]),\n",
    "                                               num_iteration=self.best_round) > 0.5).astype(int)\n",
    "                loss.append(self.get_loss(eval_df[self.label], y_pred))\n",
    "            return np.mean(loss)\n",
    "\n",
    "        self.opt_params = fmin(train_impl_nfold, asdict(self.opt), algo=tpe.suggest, max_evals=self.max_eval)\n",
    "\n",
    "    def train_k_fold(self, k_fold, train_data, test_data, params=None, drop_test_y=True, use_best_eval=True):\n",
    "        acc_result = list()\n",
    "        train_pred = np.empty(train_data.shape[0])\n",
    "        test_pred = np.empty(test_data.shape[0])\n",
    "        if drop_test_y:\n",
    "            dtest = test_data.drop(columns=self.label)\n",
    "        else:\n",
    "            dtest = test_data\n",
    "\n",
    "        models = list()\n",
    "        for train_id, eval_id in k_fold.split(train_data):\n",
    "            train_df = train_data.loc[train_id]\n",
    "            eval_df = train_data.loc[eval_id]\n",
    "            self.train(train_df, eval_df, params, use_best_eval)\n",
    "            models.append(copy.deepcopy(self.clf))\n",
    "            train_pred[eval_id] = self.clf.predict(eval_df.drop(columns=self.label), num_iteration=self.best_round)\n",
    "            if self.metric == 'auc':\n",
    "                y_pred = self.clf.predict(eval_df.drop(columns=[self.label]), num_iteration=self.best_round)\n",
    "            else:\n",
    "                y_pred = (self.clf.predict(eval_df.drop(columns=[self.label]),\n",
    "                                           num_iteration=self.best_round) > 0.5).astype(int)\n",
    "            acc_result.append(self.get_loss(eval_df[self.label], y_pred))\n",
    "            test_pred += self.clf.predict(dtest, num_iteration=self.best_round)\n",
    "        test_pred /= k_fold.n_splits\n",
    "        return train_pred, test_pred, acc_result, models"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 237,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "The minimum is attained in round 490\n",
      "Finished loading model, total used 2000 iterations\n",
      "The minimum is attained in round 341\n",
      "Finished loading model, total used 2000 iterations\n",
      "The minimum is attained in round 340\n",
      "Finished loading model, total used 2000 iterations\n",
      "The minimum is attained in round 329\n",
      "Finished loading model, total used 2000 iterations\n",
      "The minimum is attained in round 399\n",
      "Finished loading model, total used 2000 iterations\n"
     ]
    }
   ],
   "source": [
    "#训练\n",
    "fitter = LGBFitter(label='loan_status')\n",
    "params = {'num_thread': 4, 'num_leaves': 12, 'metric': 'binary', 'objective': 'binary',\n",
    "                'num_round': 2000, 'learning_rate': 0.02, 'feature_fraction': 0.8, 'bagging_fraction': 0.8}\n",
    "from sklearn.model_selection import KFold\n",
    "kfold = KFold(n_splits=5)\n",
    "fitter_result=fitter.train_k_fold(kfold, train_data, test_data, params = params)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 274,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(array([0.47663437, 0.99706559, 0.86079002, ..., 0.9959425 , 0.97990469,\n",
       "        0.9976346 ]),\n",
       " array([0.25114901, 0.7358211 , 0.04692949, ..., 1.186431  , 1.16534993,\n",
       "        1.18808243]),\n",
       " [0.08350000000000002,\n",
       "  0.07962499999999995,\n",
       "  0.08399999999999996,\n",
       "  0.08399999999999996,\n",
       "  0.07825000000000004],\n",
       " [<lightgbm.basic.Booster at 0x7fe12719b090>,\n",
       "  <lightgbm.basic.Booster at 0x7fe127196e50>,\n",
       "  <lightgbm.basic.Booster at 0x7fe105484c90>,\n",
       "  <lightgbm.basic.Booster at 0x7fe105517090>,\n",
       "  <lightgbm.basic.Booster at 0x7fe1055b88d0>])"
      ]
     },
     "execution_count": 274,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "fitter_result"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 301,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0\n",
      "准确率\n",
      "0.9158\n",
      "f1score\n",
      "0.9458032955715757\n",
      "auc\n",
      "0.9050487635994948\n",
      "混淆矩阵\n",
      "[[1811  231]\n",
      " [ 611 7347]]\n",
      "1\n",
      "准确率\n",
      "0.9191\n",
      "f1score\n",
      "0.9489493279485076\n",
      "auc\n",
      "0.8818202394106769\n",
      "混淆矩阵\n",
      "[[1672  370]\n",
      " [ 439 7519]]\n",
      "2\n",
      "准确率\n",
      "0.9161\n",
      "f1score\n",
      "0.9462282894315195\n",
      "auc\n",
      "0.8994123531498252\n",
      "混淆矩阵\n",
      "[[1779  263]\n",
      " [ 576 7382]]\n",
      "3\n",
      "准确率\n",
      "0.9209\n",
      "f1score\n",
      "0.9498700804867228\n",
      "auc\n",
      "0.8907783862338984\n",
      "混淆矩阵\n",
      "[[1715  327]\n",
      " [ 464 7494]]\n",
      "4\n",
      "准确率\n",
      "0.9074\n",
      "f1score\n",
      "0.9397841071660815\n",
      "auc\n",
      "0.906506096280694\n",
      "混淆矩阵\n",
      "[[1848  194]\n",
      " [ 732 7226]]\n"
     ]
    }
   ],
   "source": [
    "#查看验证集在各个选出的模型的效果\n",
    "p=0\n",
    "y_test=test_data['loan_status']\n",
    "for gbm1 in fitter_result[3]:\n",
    "    print(p)\n",
    "    y_pred0 = gbm1.predict(test_data.drop(columns='loan_status'))\n",
    "    y_pred=[1 if x >=0.5 else 0 for x in y_pred0]\n",
    "    y_train=train_data['loan_status']\n",
    "    print('准确率')\n",
    "    print(accuracy_score(y_test,y_pred))\n",
    "    print('f1score')\n",
    "    print(f1_score(y_test,y_pred)) \n",
    "    print('auc')\n",
    "    print(roc_auc_score(y_test,y_pred))\n",
    "    print('混淆矩阵')\n",
    "    print(confusion_matrix(y_test,y_pred))\n",
    "    p=p+1"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### 自动调参最优结果"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 320,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "准确率\n",
      "0.9122\n",
      "f1score\n",
      "0.9442468884937771\n",
      "auc\n",
      "0.8935787102034545\n",
      "混淆矩阵\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "array([[ 8435,  1339],\n",
       "       [ 3051, 37175]])"
      ]
     },
     "execution_count": 320,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "#从以上结果看 ，首个模型效果较好，所以使用首个模型参数 \n",
    "lgm_best=fitter_result[3][0]\n",
    "#对应测试集效果\n",
    "y_pred0=lgm_best.predict(test_final.drop(columns='loan_status'))\n",
    "y_pred=[1 if x >=0.5 else 0 for x in y_pred0]\n",
    "print('准确率')\n",
    "print(accuracy_score(test_final['loan_status'],y_pred))\n",
    "print('f1score')\n",
    "print(f1_score(test_final['loan_status'],y_pred)) \n",
    "print('auc')\n",
    "print(roc_auc_score(test_final['loan_status'],y_pred))\n",
    "print('混淆矩阵')\n",
    "confusion_matrix(test_final['loan_status'],y_pred)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 349,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "<lightgbm.basic.Booster at 0x7fe12719b090>"
      ]
     },
     "execution_count": 349,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "#通过文件查看参数\n",
    "lgm_best.save_model('model.txt')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 353,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "'\\nparameters:\\n[boosting: gbdt]\\n[objective: binary]\\n[metric: binary_logloss]\\n[tree_learner: serial]\\n[device_type: cpu]\\n[data: ]\\n[valid: ]\\n[num_iterations: 2000]\\n[learning_rate: 0.02]\\n[num_leaves: 12]\\n[num_threads: 4]\\n[deterministic: 0]\\n[force_col_wise: 0]\\n[force_row_wise: 0]\\n[histogram_pool_size: -1]\\n[max_depth: -1]\\n[min_data_in_leaf: 20]\\n[min_sum_hessian_in_leaf: 0.001]\\n[bagging_fraction: 0.8]\\n[pos_bagging_fraction: 1]\\n[neg_bagging_fraction: 1]\\n[bagging_freq: 0]\\n[bagging_seed: 3]\\n[feature_fraction: 0.8]\\n[feature_fraction_bynode: 1]\\n[feature_fraction_seed: 2]\\n[extra_trees: 0]\\n[extra_seed: 6]\\n[early_stopping_round: 0]\\n[first_metric_only: 0]\\n[max_delta_step: 0]\\n[lambda_l1: 0]\\n[lambda_l2: 0]\\n[min_gain_to_split: 0]\\n[drop_rate: 0.1]\\n[max_drop: 50]\\n[skip_drop: 0.5]\\n[xgboost_dart_mode: 0]\\n[uniform_drop: 0]\\n[drop_seed: 4]\\n[top_rate: 0.2]\\n[other_rate: 0.1]\\n[min_data_per_group: 100]\\n[max_cat_threshold: 32]\\n[cat_l2: 10]\\n[cat_smooth: 10]\\n[max_cat_to_onehot: 4]\\n[top_k: 20]\\n[monotone_constraints: ]\\n[monotone_constraints_method: basic]\\n[monotone_penalty: 0]\\n[feature_contri: ]\\n[forcedsplits_filename: ]\\n[refit_decay_rate: 0.9]\\n[cegb_tradeoff: 1]\\n[cegb_penalty_split: 0]\\n[cegb_penalty_feature_lazy: ]\\n[cegb_penalty_feature_coupled: ]\\n[path_smooth: 0]\\n[interaction_constraints: ]\\n[verbosity: 1]\\n[saved_feature_importance_type: 0]\\n[max_bin: 255]\\n[max_bin_by_feature: ]\\n[min_data_in_bin: 3]\\n[bin_construct_sample_cnt: 200000]\\n[data_random_seed: 1]\\n[is_enable_sparse: 1]\\n[enable_bundle: 1]\\n[use_missing: 1]\\n[zero_as_missing: 0]\\n[feature_pre_filter: 1]\\n[pre_partition: 0]\\n[two_round: 0]\\n[header: 0]\\n[label_column: ]\\n[weight_column: ]\\n[group_column: ]\\n[ignore_column: ]\\n[categorical_feature: ]\\n[forcedbins_filename: ]\\n[objective_seed: 5]\\n[num_class: 1]\\n[is_unbalance: 0]\\n[scale_pos_weight: 1]\\n[sigmoid: 1]\\n[boost_from_average: 1]\\n[reg_sqrt: 0]\\n[alpha: 0.9]\\n[fair_c: 1]\\n[poisson_max_delta_step: 0.7]\\n[tweedie_variance_power: 1.5]\\n[lambdarank_truncation_level: 30]\\n[lambdarank_norm: 1]\\n[label_gain: ]\\n[eval_at: ]\\n[multi_error_top_k: 1]\\n[auc_mu_weights: ]\\n[num_machines: 1]\\n[local_listen_port: 12400]\\n[time_out: 120]\\n[machine_list_filename: ]\\n[machines: ]\\n[gpu_platform_id: -1]\\n[gpu_device_id: -1]\\n[gpu_use_dp: 0]\\n[num_gpu: 1]\\n'"
      ]
     },
     "execution_count": 353,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "\"\"\"\n",
    "parameters:\n",
    "[boosting: gbdt]\n",
    "[objective: binary]\n",
    "[metric: binary_logloss]\n",
    "[tree_learner: serial]\n",
    "[device_type: cpu]\n",
    "[data: ]\n",
    "[valid: ]\n",
    "[num_iterations: 2000]\n",
    "[learning_rate: 0.02]\n",
    "[num_leaves: 12]\n",
    "[num_threads: 4]\n",
    "[deterministic: 0]\n",
    "[force_col_wise: 0]\n",
    "[force_row_wise: 0]\n",
    "[histogram_pool_size: -1]\n",
    "[max_depth: -1]\n",
    "[min_data_in_leaf: 20]\n",
    "[min_sum_hessian_in_leaf: 0.001]\n",
    "[bagging_fraction: 0.8]\n",
    "[pos_bagging_fraction: 1]\n",
    "[neg_bagging_fraction: 1]\n",
    "[bagging_freq: 0]\n",
    "[bagging_seed: 3]\n",
    "[feature_fraction: 0.8]\n",
    "[feature_fraction_bynode: 1]\n",
    "[feature_fraction_seed: 2]\n",
    "[extra_trees: 0]\n",
    "[extra_seed: 6]\n",
    "[early_stopping_round: 0]\n",
    "[first_metric_only: 0]\n",
    "[max_delta_step: 0]\n",
    "[lambda_l1: 0]\n",
    "[lambda_l2: 0]\n",
    "[min_gain_to_split: 0]\n",
    "[drop_rate: 0.1]\n",
    "[max_drop: 50]\n",
    "[skip_drop: 0.5]\n",
    "[xgboost_dart_mode: 0]\n",
    "[uniform_drop: 0]\n",
    "[drop_seed: 4]\n",
    "[top_rate: 0.2]\n",
    "[other_rate: 0.1]\n",
    "[min_data_per_group: 100]\n",
    "[max_cat_threshold: 32]\n",
    "[cat_l2: 10]\n",
    "[cat_smooth: 10]\n",
    "[max_cat_to_onehot: 4]\n",
    "[top_k: 20]\n",
    "[monotone_constraints: ]\n",
    "[monotone_constraints_method: basic]\n",
    "[monotone_penalty: 0]\n",
    "[feature_contri: ]\n",
    "[forcedsplits_filename: ]\n",
    "[refit_decay_rate: 0.9]\n",
    "[cegb_tradeoff: 1]\n",
    "[cegb_penalty_split: 0]\n",
    "[cegb_penalty_feature_lazy: ]\n",
    "[cegb_penalty_feature_coupled: ]\n",
    "[path_smooth: 0]\n",
    "[interaction_constraints: ]\n",
    "[verbosity: 1]\n",
    "[saved_feature_importance_type: 0]\n",
    "[max_bin: 255]\n",
    "[max_bin_by_feature: ]\n",
    "[min_data_in_bin: 3]\n",
    "[bin_construct_sample_cnt: 200000]\n",
    "[data_random_seed: 1]\n",
    "[is_enable_sparse: 1]\n",
    "[enable_bundle: 1]\n",
    "[use_missing: 1]\n",
    "[zero_as_missing: 0]\n",
    "[feature_pre_filter: 1]\n",
    "[pre_partition: 0]\n",
    "[two_round: 0]\n",
    "[header: 0]\n",
    "[label_column: ]\n",
    "[weight_column: ]\n",
    "[group_column: ]\n",
    "[ignore_column: ]\n",
    "[categorical_feature: ]\n",
    "[forcedbins_filename: ]\n",
    "[objective_seed: 5]\n",
    "[num_class: 1]\n",
    "[is_unbalance: 0]\n",
    "[scale_pos_weight: 1]\n",
    "[sigmoid: 1]\n",
    "[boost_from_average: 1]\n",
    "[reg_sqrt: 0]\n",
    "[alpha: 0.9]\n",
    "[fair_c: 1]\n",
    "[poisson_max_delta_step: 0.7]\n",
    "[tweedie_variance_power: 1.5]\n",
    "[lambdarank_truncation_level: 30]\n",
    "[lambdarank_norm: 1]\n",
    "[label_gain: ]\n",
    "[eval_at: ]\n",
    "[multi_error_top_k: 1]\n",
    "[auc_mu_weights: ]\n",
    "[num_machines: 1]\n",
    "[local_listen_port: 12400]\n",
    "[time_out: 120]\n",
    "[machine_list_filename: ]\n",
    "[machines: ]\n",
    "[gpu_platform_id: -1]\n",
    "[gpu_device_id: -1]\n",
    "[gpu_use_dp: 0]\n",
    "[num_gpu: 1]\n",
    "\"\"\""
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## 六.构建衍生变量与变量选择"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### 6.1构建衍生变量\n",
    "使用Featuretools包完成特征交叉.  \n",
    "参考文献:\n",
    "* [featuretools](https://github.com/alteryx/featuretools)\n",
    "* [如何使用Featuretools为没有直接功能的单个表创建功能](https://cloud.tencent.com/developer/ask/208841)\n",
    "* [特征工程之自动特征生成（自动特征衍生）工具Featuretools介绍](https://blog.csdn.net/wiborgite/article/details/88761330)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "训练集和测试集样本量\n",
      "40000\n",
      "10000\n"
     ]
    }
   ],
   "source": [
    "#读取数据\n",
    "import pandas as pd\n",
    "import numpy as np\n",
    "\n",
    "test_final = pd.read_csv('./test_final.csv', engine='python')\n",
    "train_final = pd.read_csv('./train_final.csv', engine='python')\n",
    "\n",
    "#将老师提供的训练集数据划分成训练集和测试集\n",
    "from sklearn.model_selection import train_test_split\n",
    "X_train, X_test, y_train, y_test = train_test_split(train_final.drop(columns='loan_status'),train_final['loan_status'], test_size=0.2,stratify=train_final['loan_status'],random_state=100)\n",
    "train = pd.concat([X_train,y_train], axis=1)\n",
    "test = pd.concat([X_test,y_test], axis=1)\n",
    "print('训练集和测试集样本量')\n",
    "print(len(train))\n",
    "print(len(test))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Built 10585 features\n",
      "Elapsed: 00:09 | Progress:  95%|█████████▍"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/Applications/anaconda3/envs/mypy37/lib/python3.7/site-packages/pandas/core/frame.py:4481: PerformanceWarning: DataFrame is highly fragmented.  This is usually the result of calling `frame.insert` many times, which has poor performance.  Consider using pd.concat instead.  To get a de-fragmented frame, use `newframe = frame.copy()`\n",
      "  data[k] = com.apply_if_callable(v, data)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\r",
      "Elapsed: 00:20 | Progress:  95%|█████████▌"
     ]
    }
   ],
   "source": [
    "#使用featuretools构造训练集变量\n",
    "import featuretools as ft\n",
    "#print(ft.list_primitives())\n",
    "# 建立实体\n",
    "df=X_train\n",
    "es = ft.EntitySet(id = 'train')\n",
    "es.entity_from_dataframe(entity_id = 'train', dataframe = df, make_index=True,index = 'index')\n",
    "\n",
    "# 使用dfs算法构造特征\n",
    "feature_matrix, feature_defs = ft.dfs(entityset = es, target_entity = 'train',max_depth=1,verbose=1,\n",
    "                                  trans_primitives = ['add_numeric'])\n",
    "feature_matrix.head()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "(40000, 10291)\n"
     ]
    },
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>continuous_annual_inc</th>\n",
       "      <th>continuous_delinq_2yrs</th>\n",
       "      <th>continuous_dti</th>\n",
       "      <th>continuous_fico_range_high</th>\n",
       "      <th>continuous_fico_range_low</th>\n",
       "      <th>continuous_funded_amnt</th>\n",
       "      <th>continuous_funded_amnt_inv</th>\n",
       "      <th>continuous_inq_last_6mths</th>\n",
       "      <th>continuous_installment</th>\n",
       "      <th>continuous_int_rate</th>\n",
       "      <th>...</th>\n",
       "      <th>discrete_sub_grade_6_one_hot + discrete_term_2_one_hot</th>\n",
       "      <th>discrete_sub_grade_7_one_hot + discrete_sub_grade_8_one_hot</th>\n",
       "      <th>discrete_sub_grade_7_one_hot + discrete_sub_grade_9_one_hot</th>\n",
       "      <th>discrete_sub_grade_7_one_hot + discrete_term_1_one_hot</th>\n",
       "      <th>discrete_sub_grade_7_one_hot + discrete_term_2_one_hot</th>\n",
       "      <th>discrete_sub_grade_8_one_hot + discrete_sub_grade_9_one_hot</th>\n",
       "      <th>discrete_sub_grade_8_one_hot + discrete_term_1_one_hot</th>\n",
       "      <th>discrete_sub_grade_8_one_hot + discrete_term_2_one_hot</th>\n",
       "      <th>discrete_sub_grade_9_one_hot + discrete_term_1_one_hot</th>\n",
       "      <th>discrete_sub_grade_9_one_hot + discrete_term_2_one_hot</th>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>index</th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>105000.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>19.15</td>\n",
       "      <td>744.0</td>\n",
       "      <td>740.0</td>\n",
       "      <td>33000.0</td>\n",
       "      <td>33000.0</td>\n",
       "      <td>2.0</td>\n",
       "      <td>1061.11</td>\n",
       "      <td>9.76</td>\n",
       "      <td>...</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>1</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>1</td>\n",
       "      <td>0</td>\n",
       "      <td>1</td>\n",
       "      <td>0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>29000.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>35.72</td>\n",
       "      <td>674.0</td>\n",
       "      <td>670.0</td>\n",
       "      <td>9250.0</td>\n",
       "      <td>9250.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>327.72</td>\n",
       "      <td>16.55</td>\n",
       "      <td>...</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>1</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>1</td>\n",
       "      <td>0</td>\n",
       "      <td>1</td>\n",
       "      <td>0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>78000.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>14.58</td>\n",
       "      <td>684.0</td>\n",
       "      <td>680.0</td>\n",
       "      <td>2000.0</td>\n",
       "      <td>2000.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>65.47</td>\n",
       "      <td>10.99</td>\n",
       "      <td>...</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>1</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>1</td>\n",
       "      <td>0</td>\n",
       "      <td>1</td>\n",
       "      <td>0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>73320.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>17.02</td>\n",
       "      <td>664.0</td>\n",
       "      <td>660.0</td>\n",
       "      <td>14400.0</td>\n",
       "      <td>14400.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>377.36</td>\n",
       "      <td>19.48</td>\n",
       "      <td>...</td>\n",
       "      <td>1</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>1</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>1</td>\n",
       "      <td>0</td>\n",
       "      <td>1</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>80000.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>17.94</td>\n",
       "      <td>699.0</td>\n",
       "      <td>695.0</td>\n",
       "      <td>35000.0</td>\n",
       "      <td>35000.0</td>\n",
       "      <td>2.0</td>\n",
       "      <td>1262.88</td>\n",
       "      <td>17.86</td>\n",
       "      <td>...</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>1</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>1</td>\n",
       "      <td>0</td>\n",
       "      <td>1</td>\n",
       "      <td>0</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "<p>5 rows × 10291 columns</p>\n",
       "</div>"
      ],
      "text/plain": [
       "       continuous_annual_inc  continuous_delinq_2yrs  continuous_dti  \\\n",
       "index                                                                  \n",
       "0                   105000.0                     0.0           19.15   \n",
       "1                    29000.0                     1.0           35.72   \n",
       "2                    78000.0                     0.0           14.58   \n",
       "3                    73320.0                     0.0           17.02   \n",
       "4                    80000.0                     0.0           17.94   \n",
       "\n",
       "       continuous_fico_range_high  continuous_fico_range_low  \\\n",
       "index                                                          \n",
       "0                           744.0                      740.0   \n",
       "1                           674.0                      670.0   \n",
       "2                           684.0                      680.0   \n",
       "3                           664.0                      660.0   \n",
       "4                           699.0                      695.0   \n",
       "\n",
       "       continuous_funded_amnt  continuous_funded_amnt_inv  \\\n",
       "index                                                       \n",
       "0                     33000.0                     33000.0   \n",
       "1                      9250.0                      9250.0   \n",
       "2                      2000.0                      2000.0   \n",
       "3                     14400.0                     14400.0   \n",
       "4                     35000.0                     35000.0   \n",
       "\n",
       "       continuous_inq_last_6mths  continuous_installment  continuous_int_rate  \\\n",
       "index                                                                           \n",
       "0                            2.0                 1061.11                 9.76   \n",
       "1                            0.0                  327.72                16.55   \n",
       "2                            0.0                   65.47                10.99   \n",
       "3                            0.0                  377.36                19.48   \n",
       "4                            2.0                 1262.88                17.86   \n",
       "\n",
       "       ...  discrete_sub_grade_6_one_hot + discrete_term_2_one_hot  \\\n",
       "index  ...                                                           \n",
       "0      ...                                                  0        \n",
       "1      ...                                                  0        \n",
       "2      ...                                                  0        \n",
       "3      ...                                                  1        \n",
       "4      ...                                                  0        \n",
       "\n",
       "       discrete_sub_grade_7_one_hot + discrete_sub_grade_8_one_hot  \\\n",
       "index                                                                \n",
       "0                                                      0             \n",
       "1                                                      0             \n",
       "2                                                      0             \n",
       "3                                                      0             \n",
       "4                                                      0             \n",
       "\n",
       "       discrete_sub_grade_7_one_hot + discrete_sub_grade_9_one_hot  \\\n",
       "index                                                                \n",
       "0                                                      0             \n",
       "1                                                      0             \n",
       "2                                                      0             \n",
       "3                                                      0             \n",
       "4                                                      0             \n",
       "\n",
       "       discrete_sub_grade_7_one_hot + discrete_term_1_one_hot  \\\n",
       "index                                                           \n",
       "0                                                      1        \n",
       "1                                                      1        \n",
       "2                                                      1        \n",
       "3                                                      0        \n",
       "4                                                      1        \n",
       "\n",
       "       discrete_sub_grade_7_one_hot + discrete_term_2_one_hot  \\\n",
       "index                                                           \n",
       "0                                                      0        \n",
       "1                                                      0        \n",
       "2                                                      0        \n",
       "3                                                      1        \n",
       "4                                                      0        \n",
       "\n",
       "       discrete_sub_grade_8_one_hot + discrete_sub_grade_9_one_hot  \\\n",
       "index                                                                \n",
       "0                                                      0             \n",
       "1                                                      0             \n",
       "2                                                      0             \n",
       "3                                                      0             \n",
       "4                                                      0             \n",
       "\n",
       "       discrete_sub_grade_8_one_hot + discrete_term_1_one_hot  \\\n",
       "index                                                           \n",
       "0                                                      1        \n",
       "1                                                      1        \n",
       "2                                                      1        \n",
       "3                                                      0        \n",
       "4                                                      1        \n",
       "\n",
       "       discrete_sub_grade_8_one_hot + discrete_term_2_one_hot  \\\n",
       "index                                                           \n",
       "0                                                      0        \n",
       "1                                                      0        \n",
       "2                                                      0        \n",
       "3                                                      1        \n",
       "4                                                      0        \n",
       "\n",
       "       discrete_sub_grade_9_one_hot + discrete_term_1_one_hot  \\\n",
       "index                                                           \n",
       "0                                                      1        \n",
       "1                                                      1        \n",
       "2                                                      1        \n",
       "3                                                      0        \n",
       "4                                                      1        \n",
       "\n",
       "       discrete_sub_grade_9_one_hot + discrete_term_2_one_hot  \n",
       "index                                                          \n",
       "0                                                      0       \n",
       "1                                                      0       \n",
       "2                                                      0       \n",
       "3                                                      1       \n",
       "4                                                      0       \n",
       "\n",
       "[5 rows x 10291 columns]"
      ]
     },
     "execution_count": 5,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "#去除异常特征\n",
    "from featuretools.selection import (remove_highly_correlated_features,remove_highly_null_features,remove_single_value_features)\n",
    "from featuretools.primitives import NaturalLanguage\n",
    "#去除空值多的特征\n",
    "f_matrix,f_defs=remove_highly_null_features(feature_matrix,feature_defs)\n",
    "#去除单一值多的特征\n",
    "f_matrix,f_defs=remove_single_value_features(f_matrix,f_defs)\n",
    "#去除高相关的特征,运行很慢\n",
    "#f_matrix,f_defs=remove_highly_correlated_features(f_matrix,f_defs)\n",
    "print(f_matrix.shape)\n",
    "f_matrix.head()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### 6.2变量选择"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead tr th {\n",
       "        text-align: left;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr>\n",
       "      <th></th>\n",
       "      <th>continuous_annual_inc</th>\n",
       "      <th>continuous_delinq_2yrs</th>\n",
       "      <th>continuous_dti</th>\n",
       "      <th>continuous_fico_range_high</th>\n",
       "      <th>continuous_fico_range_low</th>\n",
       "      <th>continuous_funded_amnt</th>\n",
       "      <th>continuous_funded_amnt_inv</th>\n",
       "      <th>continuous_inq_last_6mths</th>\n",
       "      <th>continuous_installment</th>\n",
       "      <th>continuous_int_rate</th>\n",
       "      <th>...</th>\n",
       "      <th>discrete_sub_grade_6_one_hot + discrete_term_2_one_hot</th>\n",
       "      <th>discrete_sub_grade_7_one_hot + discrete_sub_grade_8_one_hot</th>\n",
       "      <th>discrete_sub_grade_7_one_hot + discrete_sub_grade_9_one_hot</th>\n",
       "      <th>discrete_sub_grade_7_one_hot + discrete_term_1_one_hot</th>\n",
       "      <th>discrete_sub_grade_7_one_hot + discrete_term_2_one_hot</th>\n",
       "      <th>discrete_sub_grade_8_one_hot + discrete_sub_grade_9_one_hot</th>\n",
       "      <th>discrete_sub_grade_8_one_hot + discrete_term_1_one_hot</th>\n",
       "      <th>discrete_sub_grade_8_one_hot + discrete_term_2_one_hot</th>\n",
       "      <th>discrete_sub_grade_9_one_hot + discrete_term_1_one_hot</th>\n",
       "      <th>discrete_sub_grade_9_one_hot + discrete_term_2_one_hot</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>105000.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>19.15</td>\n",
       "      <td>744.0</td>\n",
       "      <td>740.0</td>\n",
       "      <td>33000.0</td>\n",
       "      <td>33000.0</td>\n",
       "      <td>2.0</td>\n",
       "      <td>1061.11</td>\n",
       "      <td>9.76</td>\n",
       "      <td>...</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>29000.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>35.72</td>\n",
       "      <td>674.0</td>\n",
       "      <td>670.0</td>\n",
       "      <td>9250.0</td>\n",
       "      <td>9250.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>327.72</td>\n",
       "      <td>16.55</td>\n",
       "      <td>...</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>78000.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>14.58</td>\n",
       "      <td>684.0</td>\n",
       "      <td>680.0</td>\n",
       "      <td>2000.0</td>\n",
       "      <td>2000.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>65.47</td>\n",
       "      <td>10.99</td>\n",
       "      <td>...</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>73320.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>17.02</td>\n",
       "      <td>664.0</td>\n",
       "      <td>660.0</td>\n",
       "      <td>14400.0</td>\n",
       "      <td>14400.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>377.36</td>\n",
       "      <td>19.48</td>\n",
       "      <td>...</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>80000.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>17.94</td>\n",
       "      <td>699.0</td>\n",
       "      <td>695.0</td>\n",
       "      <td>35000.0</td>\n",
       "      <td>35000.0</td>\n",
       "      <td>2.0</td>\n",
       "      <td>1262.88</td>\n",
       "      <td>17.86</td>\n",
       "      <td>...</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "<p>5 rows × 10291 columns</p>\n",
       "</div>"
      ],
      "text/plain": [
       "  continuous_annual_inc continuous_delinq_2yrs continuous_dti  \\\n",
       "0              105000.0                    0.0          19.15   \n",
       "1               29000.0                    1.0          35.72   \n",
       "2               78000.0                    0.0          14.58   \n",
       "3               73320.0                    0.0          17.02   \n",
       "4               80000.0                    0.0          17.94   \n",
       "\n",
       "  continuous_fico_range_high continuous_fico_range_low continuous_funded_amnt  \\\n",
       "0                      744.0                     740.0                33000.0   \n",
       "1                      674.0                     670.0                 9250.0   \n",
       "2                      684.0                     680.0                 2000.0   \n",
       "3                      664.0                     660.0                14400.0   \n",
       "4                      699.0                     695.0                35000.0   \n",
       "\n",
       "  continuous_funded_amnt_inv continuous_inq_last_6mths continuous_installment  \\\n",
       "0                    33000.0                       2.0                1061.11   \n",
       "1                     9250.0                       0.0                 327.72   \n",
       "2                     2000.0                       0.0                  65.47   \n",
       "3                    14400.0                       0.0                 377.36   \n",
       "4                    35000.0                       2.0                1262.88   \n",
       "\n",
       "  continuous_int_rate  ...  \\\n",
       "0                9.76  ...   \n",
       "1               16.55  ...   \n",
       "2               10.99  ...   \n",
       "3               19.48  ...   \n",
       "4               17.86  ...   \n",
       "\n",
       "  discrete_sub_grade_6_one_hot + discrete_term_2_one_hot  \\\n",
       "0                                                0.0       \n",
       "1                                                0.0       \n",
       "2                                                0.0       \n",
       "3                                                1.0       \n",
       "4                                                0.0       \n",
       "\n",
       "  discrete_sub_grade_7_one_hot + discrete_sub_grade_8_one_hot  \\\n",
       "0                                                0.0            \n",
       "1                                                0.0            \n",
       "2                                                0.0            \n",
       "3                                                0.0            \n",
       "4                                                0.0            \n",
       "\n",
       "  discrete_sub_grade_7_one_hot + discrete_sub_grade_9_one_hot  \\\n",
       "0                                                0.0            \n",
       "1                                                0.0            \n",
       "2                                                0.0            \n",
       "3                                                0.0            \n",
       "4                                                0.0            \n",
       "\n",
       "  discrete_sub_grade_7_one_hot + discrete_term_1_one_hot  \\\n",
       "0                                                1.0       \n",
       "1                                                1.0       \n",
       "2                                                1.0       \n",
       "3                                                0.0       \n",
       "4                                                1.0       \n",
       "\n",
       "  discrete_sub_grade_7_one_hot + discrete_term_2_one_hot  \\\n",
       "0                                                0.0       \n",
       "1                                                0.0       \n",
       "2                                                0.0       \n",
       "3                                                1.0       \n",
       "4                                                0.0       \n",
       "\n",
       "  discrete_sub_grade_8_one_hot + discrete_sub_grade_9_one_hot  \\\n",
       "0                                                0.0            \n",
       "1                                                0.0            \n",
       "2                                                0.0            \n",
       "3                                                0.0            \n",
       "4                                                0.0            \n",
       "\n",
       "  discrete_sub_grade_8_one_hot + discrete_term_1_one_hot  \\\n",
       "0                                                1.0       \n",
       "1                                                1.0       \n",
       "2                                                1.0       \n",
       "3                                                0.0       \n",
       "4                                                1.0       \n",
       "\n",
       "  discrete_sub_grade_8_one_hot + discrete_term_2_one_hot  \\\n",
       "0                                                0.0       \n",
       "1                                                0.0       \n",
       "2                                                0.0       \n",
       "3                                                1.0       \n",
       "4                                                0.0       \n",
       "\n",
       "  discrete_sub_grade_9_one_hot + discrete_term_1_one_hot  \\\n",
       "0                                                1.0       \n",
       "1                                                1.0       \n",
       "2                                                1.0       \n",
       "3                                                0.0       \n",
       "4                                                1.0       \n",
       "\n",
       "  discrete_sub_grade_9_one_hot + discrete_term_2_one_hot  \n",
       "0                                                0.0      \n",
       "1                                                0.0      \n",
       "2                                                0.0      \n",
       "3                                                1.0      \n",
       "4                                                0.0      \n",
       "\n",
       "[5 rows x 10291 columns]"
      ]
     },
     "execution_count": 6,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "#替换异常值\n",
    "from sklearn.impute import SimpleImputer\n",
    "si = SimpleImputer(missing_values=np.nan, strategy='most_frequent')\n",
    "f_df = pd.DataFrame(si.fit_transform(f_matrix))\n",
    "f_df.columns=[f_matrix.columns]\n",
    "f_df.head()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/Applications/anaconda3/envs/mypy37/lib/python3.7/site-packages/sklearn/utils/validation.py:1677: FutureWarning: Feature names only support names that are all strings. Got feature names with dtypes: ['tuple']. An error will be raised in 1.2.\n",
      "  FutureWarning,\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "feature_importance: [9.79766193e-05 6.17160225e-05 1.50666839e-04 ... 1.97692217e-04\n",
      " 6.34844685e-05 3.21578738e-05]\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/Applications/anaconda3/envs/mypy37/lib/python3.7/site-packages/sklearn/utils/validation.py:1677: FutureWarning: Feature names only support names that are all strings. Got feature names with dtypes: ['tuple']. An error will be raised in 1.2.\n",
      "  FutureWarning,\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "f_new shape: (40000, 692)\n"
     ]
    }
   ],
   "source": [
    "#使用树模型进行特征选择\n",
    "import matplotlib.pyplot as plt\n",
    "from sklearn.datasets import make_classification\n",
    "from sklearn.ensemble import ExtraTreesClassifier\n",
    "from sklearn.feature_selection import SelectFromModel\n",
    "\n",
    "clf = ExtraTreesClassifier(n_estimators=10)\n",
    "clf = clf.fit(f_df, y_train)\n",
    "print('feature_importance:', clf.feature_importances_)  \n",
    "f_model = SelectFromModel(clf, prefit=True)\n",
    "f_new = f_model.transform(f_df)\n",
    "print('f_new shape:', f_new.shape)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>continuous_annual_inc</th>\n",
       "      <th>continuous_dti</th>\n",
       "      <th>continuous_fico_range_high</th>\n",
       "      <th>continuous_installment</th>\n",
       "      <th>continuous_last_fico_range_high</th>\n",
       "      <th>continuous_annual_inc + continuous_delinq_2yrs</th>\n",
       "      <th>continuous_annual_inc + continuous_fico_range_high</th>\n",
       "      <th>continuous_annual_inc + continuous_funded_amnt</th>\n",
       "      <th>continuous_annual_inc + continuous_funded_amnt_inv</th>\n",
       "      <th>continuous_annual_inc + continuous_inq_last_6mths</th>\n",
       "      <th>...</th>\n",
       "      <th>discrete_sub_grade_15_one_hot + discrete_term_2_one_hot</th>\n",
       "      <th>discrete_sub_grade_17_one_hot + discrete_term_1_one_hot</th>\n",
       "      <th>discrete_sub_grade_19_one_hot + discrete_sub_grade_9_one_hot</th>\n",
       "      <th>discrete_sub_grade_27_one_hot + discrete_term_2_one_hot</th>\n",
       "      <th>discrete_sub_grade_28_one_hot + discrete_term_2_one_hot</th>\n",
       "      <th>discrete_sub_grade_30_one_hot + discrete_term_1_one_hot</th>\n",
       "      <th>discrete_sub_grade_32_one_hot + discrete_term_1_one_hot</th>\n",
       "      <th>discrete_sub_grade_6_one_hot + discrete_term_2_one_hot</th>\n",
       "      <th>discrete_sub_grade_8_one_hot + discrete_term_1_one_hot</th>\n",
       "      <th>discrete_sub_grade_8_one_hot + discrete_term_2_one_hot</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>105000.0</td>\n",
       "      <td>19.15</td>\n",
       "      <td>744.0</td>\n",
       "      <td>1061.11</td>\n",
       "      <td>774.0</td>\n",
       "      <td>105000.0</td>\n",
       "      <td>105744.0</td>\n",
       "      <td>138000.0</td>\n",
       "      <td>138000.0</td>\n",
       "      <td>105002.0</td>\n",
       "      <td>...</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>29000.0</td>\n",
       "      <td>35.72</td>\n",
       "      <td>674.0</td>\n",
       "      <td>327.72</td>\n",
       "      <td>679.0</td>\n",
       "      <td>29001.0</td>\n",
       "      <td>29674.0</td>\n",
       "      <td>38250.0</td>\n",
       "      <td>38250.0</td>\n",
       "      <td>29000.0</td>\n",
       "      <td>...</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>78000.0</td>\n",
       "      <td>14.58</td>\n",
       "      <td>684.0</td>\n",
       "      <td>65.47</td>\n",
       "      <td>739.0</td>\n",
       "      <td>78000.0</td>\n",
       "      <td>78684.0</td>\n",
       "      <td>80000.0</td>\n",
       "      <td>80000.0</td>\n",
       "      <td>78000.0</td>\n",
       "      <td>...</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>73320.0</td>\n",
       "      <td>17.02</td>\n",
       "      <td>664.0</td>\n",
       "      <td>377.36</td>\n",
       "      <td>569.0</td>\n",
       "      <td>73320.0</td>\n",
       "      <td>73984.0</td>\n",
       "      <td>87720.0</td>\n",
       "      <td>87720.0</td>\n",
       "      <td>73320.0</td>\n",
       "      <td>...</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>80000.0</td>\n",
       "      <td>17.94</td>\n",
       "      <td>699.0</td>\n",
       "      <td>1262.88</td>\n",
       "      <td>559.0</td>\n",
       "      <td>80000.0</td>\n",
       "      <td>80699.0</td>\n",
       "      <td>115000.0</td>\n",
       "      <td>115000.0</td>\n",
       "      <td>80002.0</td>\n",
       "      <td>...</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "<p>5 rows × 692 columns</p>\n",
       "</div>"
      ],
      "text/plain": [
       "   continuous_annual_inc  continuous_dti  continuous_fico_range_high  \\\n",
       "0               105000.0           19.15                       744.0   \n",
       "1                29000.0           35.72                       674.0   \n",
       "2                78000.0           14.58                       684.0   \n",
       "3                73320.0           17.02                       664.0   \n",
       "4                80000.0           17.94                       699.0   \n",
       "\n",
       "   continuous_installment  continuous_last_fico_range_high  \\\n",
       "0                 1061.11                            774.0   \n",
       "1                  327.72                            679.0   \n",
       "2                   65.47                            739.0   \n",
       "3                  377.36                            569.0   \n",
       "4                 1262.88                            559.0   \n",
       "\n",
       "   continuous_annual_inc + continuous_delinq_2yrs  \\\n",
       "0                                        105000.0   \n",
       "1                                         29001.0   \n",
       "2                                         78000.0   \n",
       "3                                         73320.0   \n",
       "4                                         80000.0   \n",
       "\n",
       "   continuous_annual_inc + continuous_fico_range_high  \\\n",
       "0                                           105744.0    \n",
       "1                                            29674.0    \n",
       "2                                            78684.0    \n",
       "3                                            73984.0    \n",
       "4                                            80699.0    \n",
       "\n",
       "   continuous_annual_inc + continuous_funded_amnt  \\\n",
       "0                                        138000.0   \n",
       "1                                         38250.0   \n",
       "2                                         80000.0   \n",
       "3                                         87720.0   \n",
       "4                                        115000.0   \n",
       "\n",
       "   continuous_annual_inc + continuous_funded_amnt_inv  \\\n",
       "0                                           138000.0    \n",
       "1                                            38250.0    \n",
       "2                                            80000.0    \n",
       "3                                            87720.0    \n",
       "4                                           115000.0    \n",
       "\n",
       "   continuous_annual_inc + continuous_inq_last_6mths  ...  \\\n",
       "0                                           105002.0  ...   \n",
       "1                                            29000.0  ...   \n",
       "2                                            78000.0  ...   \n",
       "3                                            73320.0  ...   \n",
       "4                                            80002.0  ...   \n",
       "\n",
       "   discrete_sub_grade_15_one_hot + discrete_term_2_one_hot  \\\n",
       "0                                                0.0         \n",
       "1                                                0.0         \n",
       "2                                                0.0         \n",
       "3                                                1.0         \n",
       "4                                                0.0         \n",
       "\n",
       "   discrete_sub_grade_17_one_hot + discrete_term_1_one_hot  \\\n",
       "0                                                1.0         \n",
       "1                                                1.0         \n",
       "2                                                1.0         \n",
       "3                                                0.0         \n",
       "4                                                1.0         \n",
       "\n",
       "   discrete_sub_grade_19_one_hot + discrete_sub_grade_9_one_hot  \\\n",
       "0                                                1.0              \n",
       "1                                                0.0              \n",
       "2                                                0.0              \n",
       "3                                                0.0              \n",
       "4                                                0.0              \n",
       "\n",
       "   discrete_sub_grade_27_one_hot + discrete_term_2_one_hot  \\\n",
       "0                                                0.0         \n",
       "1                                                0.0         \n",
       "2                                                0.0         \n",
       "3                                                1.0         \n",
       "4                                                0.0         \n",
       "\n",
       "   discrete_sub_grade_28_one_hot + discrete_term_2_one_hot  \\\n",
       "0                                                0.0         \n",
       "1                                                0.0         \n",
       "2                                                0.0         \n",
       "3                                                1.0         \n",
       "4                                                0.0         \n",
       "\n",
       "   discrete_sub_grade_30_one_hot + discrete_term_1_one_hot  \\\n",
       "0                                                1.0         \n",
       "1                                                1.0         \n",
       "2                                                1.0         \n",
       "3                                                0.0         \n",
       "4                                                1.0         \n",
       "\n",
       "   discrete_sub_grade_32_one_hot + discrete_term_1_one_hot  \\\n",
       "0                                                1.0         \n",
       "1                                                1.0         \n",
       "2                                                1.0         \n",
       "3                                                0.0         \n",
       "4                                                1.0         \n",
       "\n",
       "   discrete_sub_grade_6_one_hot + discrete_term_2_one_hot  \\\n",
       "0                                                0.0        \n",
       "1                                                0.0        \n",
       "2                                                0.0        \n",
       "3                                                1.0        \n",
       "4                                                0.0        \n",
       "\n",
       "   discrete_sub_grade_8_one_hot + discrete_term_1_one_hot  \\\n",
       "0                                                1.0        \n",
       "1                                                1.0        \n",
       "2                                                1.0        \n",
       "3                                                0.0        \n",
       "4                                                1.0        \n",
       "\n",
       "   discrete_sub_grade_8_one_hot + discrete_term_2_one_hot  \n",
       "0                                                0.0       \n",
       "1                                                0.0       \n",
       "2                                                0.0       \n",
       "3                                                1.0       \n",
       "4                                                0.0       \n",
       "\n",
       "[5 rows x 692 columns]"
      ]
     },
     "execution_count": 8,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "#输出特征名称\n",
    "feature_idx = f_model.get_support() \n",
    "feature_name = list(f_df.columns[feature_idx])\n",
    "f_names=[]\n",
    "for i in range(len(feature_name)):\n",
    "    f_names.append(feature_name[i][0])\n",
    "#print(f_names)\n",
    "f_train_df=pd.DataFrame(f_new)\n",
    "f_train_df.columns=f_names\n",
    "f_train_df.head()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## 七.进一步调参"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Built 10585 features\n",
      "Elapsed: 00:04 | Progress:  93%|█████████▎"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/Applications/anaconda3/envs/mypy37/lib/python3.7/site-packages/pandas/core/frame.py:4481: PerformanceWarning: DataFrame is highly fragmented.  This is usually the result of calling `frame.insert` many times, which has poor performance.  Consider using pd.concat instead.  To get a de-fragmented frame, use `newframe = frame.copy()`\n",
      "  data[k] = com.apply_if_callable(v, data)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Elapsed: 00:30 | Progress: 100%|██████████\n"
     ]
    },
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>continuous_annual_inc</th>\n",
       "      <th>continuous_annual_inc_joint</th>\n",
       "      <th>continuous_delinq_2yrs</th>\n",
       "      <th>continuous_dti</th>\n",
       "      <th>continuous_dti_joint</th>\n",
       "      <th>continuous_fico_range_high</th>\n",
       "      <th>continuous_fico_range_low</th>\n",
       "      <th>continuous_funded_amnt</th>\n",
       "      <th>continuous_funded_amnt_inv</th>\n",
       "      <th>continuous_inq_last_6mths</th>\n",
       "      <th>...</th>\n",
       "      <th>discrete_sub_grade_7_one_hot + discrete_sub_grade_8_one_hot</th>\n",
       "      <th>discrete_sub_grade_7_one_hot + discrete_sub_grade_9_one_hot</th>\n",
       "      <th>discrete_sub_grade_7_one_hot + discrete_term_1_one_hot</th>\n",
       "      <th>discrete_sub_grade_7_one_hot + discrete_term_2_one_hot</th>\n",
       "      <th>discrete_sub_grade_8_one_hot + discrete_sub_grade_9_one_hot</th>\n",
       "      <th>discrete_sub_grade_8_one_hot + discrete_term_1_one_hot</th>\n",
       "      <th>discrete_sub_grade_8_one_hot + discrete_term_2_one_hot</th>\n",
       "      <th>discrete_sub_grade_9_one_hot + discrete_term_1_one_hot</th>\n",
       "      <th>discrete_sub_grade_9_one_hot + discrete_term_2_one_hot</th>\n",
       "      <th>discrete_term_1_one_hot + discrete_term_2_one_hot</th>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>index</th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>113000.00</td>\n",
       "      <td>NaN</td>\n",
       "      <td>0.0</td>\n",
       "      <td>26.17</td>\n",
       "      <td>NaN</td>\n",
       "      <td>664.0</td>\n",
       "      <td>660.0</td>\n",
       "      <td>5000.0</td>\n",
       "      <td>5000.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>...</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>1</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>1</td>\n",
       "      <td>0</td>\n",
       "      <td>1</td>\n",
       "      <td>0</td>\n",
       "      <td>1</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>92500.00</td>\n",
       "      <td>NaN</td>\n",
       "      <td>0.0</td>\n",
       "      <td>21.93</td>\n",
       "      <td>NaN</td>\n",
       "      <td>704.0</td>\n",
       "      <td>700.0</td>\n",
       "      <td>24000.0</td>\n",
       "      <td>24000.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>...</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>1</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>1</td>\n",
       "      <td>0</td>\n",
       "      <td>1</td>\n",
       "      <td>0</td>\n",
       "      <td>1</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>85000.00</td>\n",
       "      <td>NaN</td>\n",
       "      <td>0.0</td>\n",
       "      <td>19.57</td>\n",
       "      <td>NaN</td>\n",
       "      <td>714.0</td>\n",
       "      <td>710.0</td>\n",
       "      <td>35000.0</td>\n",
       "      <td>35000.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>...</td>\n",
       "      <td>0</td>\n",
       "      <td>1</td>\n",
       "      <td>1</td>\n",
       "      <td>0</td>\n",
       "      <td>1</td>\n",
       "      <td>1</td>\n",
       "      <td>0</td>\n",
       "      <td>2</td>\n",
       "      <td>1</td>\n",
       "      <td>1</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>102977.28</td>\n",
       "      <td>NaN</td>\n",
       "      <td>0.0</td>\n",
       "      <td>13.98</td>\n",
       "      <td>NaN</td>\n",
       "      <td>694.0</td>\n",
       "      <td>690.0</td>\n",
       "      <td>35000.0</td>\n",
       "      <td>35000.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>...</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>1</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>1</td>\n",
       "      <td>0</td>\n",
       "      <td>1</td>\n",
       "      <td>0</td>\n",
       "      <td>1</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>90000.00</td>\n",
       "      <td>NaN</td>\n",
       "      <td>0.0</td>\n",
       "      <td>19.83</td>\n",
       "      <td>NaN</td>\n",
       "      <td>769.0</td>\n",
       "      <td>765.0</td>\n",
       "      <td>12000.0</td>\n",
       "      <td>12000.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>...</td>\n",
       "      <td>1</td>\n",
       "      <td>1</td>\n",
       "      <td>2</td>\n",
       "      <td>1</td>\n",
       "      <td>0</td>\n",
       "      <td>1</td>\n",
       "      <td>0</td>\n",
       "      <td>1</td>\n",
       "      <td>0</td>\n",
       "      <td>1</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "<p>5 rows × 10585 columns</p>\n",
       "</div>"
      ],
      "text/plain": [
       "       continuous_annual_inc  continuous_annual_inc_joint  \\\n",
       "index                                                       \n",
       "0                  113000.00                          NaN   \n",
       "1                   92500.00                          NaN   \n",
       "2                   85000.00                          NaN   \n",
       "3                  102977.28                          NaN   \n",
       "4                   90000.00                          NaN   \n",
       "\n",
       "       continuous_delinq_2yrs  continuous_dti  continuous_dti_joint  \\\n",
       "index                                                                 \n",
       "0                         0.0           26.17                   NaN   \n",
       "1                         0.0           21.93                   NaN   \n",
       "2                         0.0           19.57                   NaN   \n",
       "3                         0.0           13.98                   NaN   \n",
       "4                         0.0           19.83                   NaN   \n",
       "\n",
       "       continuous_fico_range_high  continuous_fico_range_low  \\\n",
       "index                                                          \n",
       "0                           664.0                      660.0   \n",
       "1                           704.0                      700.0   \n",
       "2                           714.0                      710.0   \n",
       "3                           694.0                      690.0   \n",
       "4                           769.0                      765.0   \n",
       "\n",
       "       continuous_funded_amnt  continuous_funded_amnt_inv  \\\n",
       "index                                                       \n",
       "0                      5000.0                      5000.0   \n",
       "1                     24000.0                     24000.0   \n",
       "2                     35000.0                     35000.0   \n",
       "3                     35000.0                     35000.0   \n",
       "4                     12000.0                     12000.0   \n",
       "\n",
       "       continuous_inq_last_6mths  ...  \\\n",
       "index                             ...   \n",
       "0                            1.0  ...   \n",
       "1                            1.0  ...   \n",
       "2                            0.0  ...   \n",
       "3                            0.0  ...   \n",
       "4                            1.0  ...   \n",
       "\n",
       "       discrete_sub_grade_7_one_hot + discrete_sub_grade_8_one_hot  \\\n",
       "index                                                                \n",
       "0                                                      0             \n",
       "1                                                      0             \n",
       "2                                                      0             \n",
       "3                                                      0             \n",
       "4                                                      1             \n",
       "\n",
       "       discrete_sub_grade_7_one_hot + discrete_sub_grade_9_one_hot  \\\n",
       "index                                                                \n",
       "0                                                      0             \n",
       "1                                                      0             \n",
       "2                                                      1             \n",
       "3                                                      0             \n",
       "4                                                      1             \n",
       "\n",
       "       discrete_sub_grade_7_one_hot + discrete_term_1_one_hot  \\\n",
       "index                                                           \n",
       "0                                                      1        \n",
       "1                                                      1        \n",
       "2                                                      1        \n",
       "3                                                      1        \n",
       "4                                                      2        \n",
       "\n",
       "       discrete_sub_grade_7_one_hot + discrete_term_2_one_hot  \\\n",
       "index                                                           \n",
       "0                                                      0        \n",
       "1                                                      0        \n",
       "2                                                      0        \n",
       "3                                                      0        \n",
       "4                                                      1        \n",
       "\n",
       "       discrete_sub_grade_8_one_hot + discrete_sub_grade_9_one_hot  \\\n",
       "index                                                                \n",
       "0                                                      0             \n",
       "1                                                      0             \n",
       "2                                                      1             \n",
       "3                                                      0             \n",
       "4                                                      0             \n",
       "\n",
       "       discrete_sub_grade_8_one_hot + discrete_term_1_one_hot  \\\n",
       "index                                                           \n",
       "0                                                      1        \n",
       "1                                                      1        \n",
       "2                                                      1        \n",
       "3                                                      1        \n",
       "4                                                      1        \n",
       "\n",
       "       discrete_sub_grade_8_one_hot + discrete_term_2_one_hot  \\\n",
       "index                                                           \n",
       "0                                                      0        \n",
       "1                                                      0        \n",
       "2                                                      0        \n",
       "3                                                      0        \n",
       "4                                                      0        \n",
       "\n",
       "       discrete_sub_grade_9_one_hot + discrete_term_1_one_hot  \\\n",
       "index                                                           \n",
       "0                                                      1        \n",
       "1                                                      1        \n",
       "2                                                      2        \n",
       "3                                                      1        \n",
       "4                                                      1        \n",
       "\n",
       "       discrete_sub_grade_9_one_hot + discrete_term_2_one_hot  \\\n",
       "index                                                           \n",
       "0                                                      0        \n",
       "1                                                      0        \n",
       "2                                                      1        \n",
       "3                                                      0        \n",
       "4                                                      0        \n",
       "\n",
       "       discrete_term_1_one_hot + discrete_term_2_one_hot  \n",
       "index                                                     \n",
       "0                                                      1  \n",
       "1                                                      1  \n",
       "2                                                      1  \n",
       "3                                                      1  \n",
       "4                                                      1  \n",
       "\n",
       "[5 rows x 10585 columns]"
      ]
     },
     "execution_count": 9,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "#首先对验证集进行预处理\n",
    "# 建立实体\n",
    "df=X_test\n",
    "es = ft.EntitySet(id = 'test')\n",
    "es.entity_from_dataframe(entity_id = 'test', dataframe = df, make_index=True,index = 'index')\n",
    "\n",
    "# 使用dfs算法构造特征\n",
    "feature_test_matrix, feature_test_defs = ft.dfs(entityset = es, target_entity = 'test',max_depth=1,verbose=1,\n",
    "                                  trans_primitives = ['add_numeric'])\n",
    "feature_test_matrix.head()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead tr th {\n",
       "        text-align: left;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr>\n",
       "      <th></th>\n",
       "      <th>continuous_annual_inc</th>\n",
       "      <th>continuous_dti</th>\n",
       "      <th>continuous_fico_range_high</th>\n",
       "      <th>continuous_installment</th>\n",
       "      <th>continuous_last_fico_range_high</th>\n",
       "      <th>continuous_annual_inc + continuous_delinq_2yrs</th>\n",
       "      <th>continuous_annual_inc + continuous_fico_range_high</th>\n",
       "      <th>continuous_annual_inc + continuous_funded_amnt</th>\n",
       "      <th>continuous_annual_inc + continuous_funded_amnt_inv</th>\n",
       "      <th>continuous_annual_inc + continuous_inq_last_6mths</th>\n",
       "      <th>...</th>\n",
       "      <th>discrete_sub_grade_15_one_hot + discrete_term_2_one_hot</th>\n",
       "      <th>discrete_sub_grade_17_one_hot + discrete_term_1_one_hot</th>\n",
       "      <th>discrete_sub_grade_19_one_hot + discrete_sub_grade_9_one_hot</th>\n",
       "      <th>discrete_sub_grade_27_one_hot + discrete_term_2_one_hot</th>\n",
       "      <th>discrete_sub_grade_28_one_hot + discrete_term_2_one_hot</th>\n",
       "      <th>discrete_sub_grade_30_one_hot + discrete_term_1_one_hot</th>\n",
       "      <th>discrete_sub_grade_32_one_hot + discrete_term_1_one_hot</th>\n",
       "      <th>discrete_sub_grade_6_one_hot + discrete_term_2_one_hot</th>\n",
       "      <th>discrete_sub_grade_8_one_hot + discrete_term_1_one_hot</th>\n",
       "      <th>discrete_sub_grade_8_one_hot + discrete_term_2_one_hot</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>113000.00</td>\n",
       "      <td>26.17</td>\n",
       "      <td>664.0</td>\n",
       "      <td>160.78</td>\n",
       "      <td>534.0</td>\n",
       "      <td>113000.00</td>\n",
       "      <td>113664.00</td>\n",
       "      <td>118000.00</td>\n",
       "      <td>118000.00</td>\n",
       "      <td>113001.00</td>\n",
       "      <td>...</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>92500.00</td>\n",
       "      <td>21.93</td>\n",
       "      <td>704.0</td>\n",
       "      <td>781.65</td>\n",
       "      <td>719.0</td>\n",
       "      <td>92500.00</td>\n",
       "      <td>93204.00</td>\n",
       "      <td>116500.00</td>\n",
       "      <td>116500.00</td>\n",
       "      <td>92501.00</td>\n",
       "      <td>...</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>85000.00</td>\n",
       "      <td>19.57</td>\n",
       "      <td>714.0</td>\n",
       "      <td>1154.00</td>\n",
       "      <td>744.0</td>\n",
       "      <td>85000.00</td>\n",
       "      <td>85714.00</td>\n",
       "      <td>120000.00</td>\n",
       "      <td>120000.00</td>\n",
       "      <td>85000.00</td>\n",
       "      <td>...</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>102977.28</td>\n",
       "      <td>13.98</td>\n",
       "      <td>694.0</td>\n",
       "      <td>1095.00</td>\n",
       "      <td>819.0</td>\n",
       "      <td>102977.28</td>\n",
       "      <td>103671.28</td>\n",
       "      <td>137977.28</td>\n",
       "      <td>137977.28</td>\n",
       "      <td>102977.28</td>\n",
       "      <td>...</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>90000.00</td>\n",
       "      <td>19.83</td>\n",
       "      <td>769.0</td>\n",
       "      <td>378.76</td>\n",
       "      <td>729.0</td>\n",
       "      <td>90000.00</td>\n",
       "      <td>90769.00</td>\n",
       "      <td>102000.00</td>\n",
       "      <td>102000.00</td>\n",
       "      <td>90001.00</td>\n",
       "      <td>...</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "<p>5 rows × 692 columns</p>\n",
       "</div>"
      ],
      "text/plain": [
       "  continuous_annual_inc continuous_dti continuous_fico_range_high  \\\n",
       "0             113000.00          26.17                      664.0   \n",
       "1              92500.00          21.93                      704.0   \n",
       "2              85000.00          19.57                      714.0   \n",
       "3             102977.28          13.98                      694.0   \n",
       "4              90000.00          19.83                      769.0   \n",
       "\n",
       "  continuous_installment continuous_last_fico_range_high  \\\n",
       "0                 160.78                           534.0   \n",
       "1                 781.65                           719.0   \n",
       "2                1154.00                           744.0   \n",
       "3                1095.00                           819.0   \n",
       "4                 378.76                           729.0   \n",
       "\n",
       "  continuous_annual_inc + continuous_delinq_2yrs  \\\n",
       "0                                      113000.00   \n",
       "1                                       92500.00   \n",
       "2                                       85000.00   \n",
       "3                                      102977.28   \n",
       "4                                       90000.00   \n",
       "\n",
       "  continuous_annual_inc + continuous_fico_range_high  \\\n",
       "0                                          113664.00   \n",
       "1                                           93204.00   \n",
       "2                                           85714.00   \n",
       "3                                          103671.28   \n",
       "4                                           90769.00   \n",
       "\n",
       "  continuous_annual_inc + continuous_funded_amnt  \\\n",
       "0                                      118000.00   \n",
       "1                                      116500.00   \n",
       "2                                      120000.00   \n",
       "3                                      137977.28   \n",
       "4                                      102000.00   \n",
       "\n",
       "  continuous_annual_inc + continuous_funded_amnt_inv  \\\n",
       "0                                          118000.00   \n",
       "1                                          116500.00   \n",
       "2                                          120000.00   \n",
       "3                                          137977.28   \n",
       "4                                          102000.00   \n",
       "\n",
       "  continuous_annual_inc + continuous_inq_last_6mths  ...  \\\n",
       "0                                         113001.00  ...   \n",
       "1                                          92501.00  ...   \n",
       "2                                          85000.00  ...   \n",
       "3                                         102977.28  ...   \n",
       "4                                          90001.00  ...   \n",
       "\n",
       "  discrete_sub_grade_15_one_hot + discrete_term_2_one_hot  \\\n",
       "0                                                0.0        \n",
       "1                                                0.0        \n",
       "2                                                0.0        \n",
       "3                                                0.0        \n",
       "4                                                0.0        \n",
       "\n",
       "  discrete_sub_grade_17_one_hot + discrete_term_1_one_hot  \\\n",
       "0                                                1.0        \n",
       "1                                                1.0        \n",
       "2                                                1.0        \n",
       "3                                                1.0        \n",
       "4                                                1.0        \n",
       "\n",
       "  discrete_sub_grade_19_one_hot + discrete_sub_grade_9_one_hot  \\\n",
       "0                                                1.0             \n",
       "1                                                0.0             \n",
       "2                                                1.0             \n",
       "3                                                0.0             \n",
       "4                                                0.0             \n",
       "\n",
       "  discrete_sub_grade_27_one_hot + discrete_term_2_one_hot  \\\n",
       "0                                                0.0        \n",
       "1                                                0.0        \n",
       "2                                                0.0        \n",
       "3                                                0.0        \n",
       "4                                                0.0        \n",
       "\n",
       "  discrete_sub_grade_28_one_hot + discrete_term_2_one_hot  \\\n",
       "0                                                0.0        \n",
       "1                                                0.0        \n",
       "2                                                0.0        \n",
       "3                                                0.0        \n",
       "4                                                0.0        \n",
       "\n",
       "  discrete_sub_grade_30_one_hot + discrete_term_1_one_hot  \\\n",
       "0                                                1.0        \n",
       "1                                                1.0        \n",
       "2                                                1.0        \n",
       "3                                                1.0        \n",
       "4                                                1.0        \n",
       "\n",
       "  discrete_sub_grade_32_one_hot + discrete_term_1_one_hot  \\\n",
       "0                                                1.0        \n",
       "1                                                1.0        \n",
       "2                                                1.0        \n",
       "3                                                1.0        \n",
       "4                                                1.0        \n",
       "\n",
       "  discrete_sub_grade_6_one_hot + discrete_term_2_one_hot  \\\n",
       "0                                                0.0       \n",
       "1                                                0.0       \n",
       "2                                                0.0       \n",
       "3                                                0.0       \n",
       "4                                                0.0       \n",
       "\n",
       "  discrete_sub_grade_8_one_hot + discrete_term_1_one_hot  \\\n",
       "0                                                1.0       \n",
       "1                                                1.0       \n",
       "2                                                1.0       \n",
       "3                                                1.0       \n",
       "4                                                1.0       \n",
       "\n",
       "  discrete_sub_grade_8_one_hot + discrete_term_2_one_hot  \n",
       "0                                                0.0      \n",
       "1                                                0.0      \n",
       "2                                                0.0      \n",
       "3                                                0.0      \n",
       "4                                                0.0      \n",
       "\n",
       "[5 rows x 692 columns]"
      ]
     },
     "execution_count": 10,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "#选择训练集的变量\n",
    "f_test_df = pd.DataFrame(si.fit_transform(feature_test_matrix.loc[:,f_names]))\n",
    "f_test_df.columns=[f_names]\n",
    "f_test_df.head()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {},
   "outputs": [],
   "source": [
    "#然后开始模型训练\n",
    "import io\n",
    "import multiprocessing\n",
    "from contextlib import redirect_stdout\n",
    "from copy import deepcopy\n",
    "from dataclasses import dataclass, asdict\n",
    "import hyperopt.pyll\n",
    "from hyperopt import fmin, tpe, hp\n",
    "import numpy as np\n",
    "import lightgbm as lgb\n",
    "from sklearn.metrics import accuracy_score\n",
    "from sklearn.metrics import f1_score\n",
    "from sklearn.metrics import precision_score\n",
    "from sklearn.metrics import recall_score\n",
    "from sklearn.metrics import roc_auc_score\n",
    "import torch\n",
    "\n",
    "import copy\n",
    "cpu_count = 4\n",
    "use_gpu = False\n",
    "@dataclass\n",
    "class LGBOpt:\n",
    "    num_threads: any = hp.choice('num_threads', [cpu_count])\n",
    "    num_leaves: any = hp.choice('num_leaves', [64])\n",
    "    metric: any = hp.choice('metric', ['binary_error'])\n",
    "    num_round: any = hp.choice('num_rounds', [1000])\n",
    "    objective: any = hp.choice('objective', ['binary'])\n",
    "    learning_rate: any = hp.uniform('learning_rate', 0.01, 0.1)\n",
    "    feature_fraction: any = hp.uniform('feature_fraction', 0.5, 1.0)\n",
    "    bagging_fraction: any = hp.uniform('bagging_fraction', 0.8, 1.0)\n",
    "    device_type: any = hp.choice('device_tpye', ['gpu']) if use_gpu else hp.choice('device_type',\n",
    "                                                                                   ['cpu'])\n",
    "    boosting: any = hp.choice('boosting', ['gbdt', 'dart', 'goss'])\n",
    "    extra_trees: any = hp.choice('extra_tress', [False, True])\n",
    "    drop_rate: any = hp.uniform('drop_rate', 0, 0.2)\n",
    "    uniform_drop: any = hp.choice('uniform_drop', [True, False])\n",
    "    lambda_l1: any = hp.uniform('lambda_l1', 0, 10)  # TODO: Check range\n",
    "    lambda_l2: any = hp.uniform('lambda_l2', 0, 10)  # TODO: Check range\n",
    "    min_gain_to_split: any = hp.uniform('min_gain_to_split', 0, 1)  # TODO: Check range\n",
    "    min_data_in_bin = hp.choice('min_data_in_bin', [3, 5, 10, 15, 20, 50])\n",
    "\n",
    "    @staticmethod\n",
    "    def get_common_params():\n",
    "        return {'num_thread': 4, 'num_leaves': 12, 'metric': 'binary', 'objective': 'binary',\n",
    "                'num_round': 1000, 'learning_rate': 0.01, 'feature_fraction': 0.8, 'bagging_fraction': 0.8}"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {},
   "outputs": [],
   "source": [
    "class FitterBase(object):\n",
    "    def __init__(self, label, metric, max_eval=100, opt=None):\n",
    "        self.label = label\n",
    "        self.metric = metric\n",
    "        self.opt_params = dict()\n",
    "        self.max_eval = max_eval\n",
    "        self.opt = opt\n",
    "\n",
    "    def get_loss(self, y, y_pred):\n",
    "        if self.metric == 'error':\n",
    "            return 1 - accuracy_score(y, y_pred)\n",
    "        elif self.metric == 'precision':\n",
    "            return 1 - precision_score(y, y_pred)\n",
    "        elif self.metric == 'recall':\n",
    "            return 1 - recall_score(y, y_pred)\n",
    "        elif self.metric == 'macro_f1':\n",
    "            return 1 - f1_score(y, y_pred, average='macro')\n",
    "        elif self.metric == 'micro_f1':\n",
    "            return 1 - f1_score(y, y_pred, average='micro')\n",
    "        elif self.metric == 'auc':  # TODO: Add a warning checking if y_predict is all [0, 1], it should be probability\n",
    "            return 1 - roc_auc_score(y, y_pred)\n",
    "        else:\n",
    "            raise Exception(\"Not implemented yet.\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {},
   "outputs": [],
   "source": [
    "class LGBFitter(FitterBase):\n",
    "    def __init__(self, label='label', metric='error', opt: LGBOpt = None, max_eval=100):\n",
    "        super(LGBFitter, self).__init__(label, metric, max_eval)\n",
    "        if opt is not None:\n",
    "            self.opt = opt\n",
    "        else:\n",
    "            self.opt = LGBOpt()\n",
    "        self.best_round = None\n",
    "        self.clf = None\n",
    "\n",
    "    def train(self, train_df, eval_df, params=None, use_best_eval=True):\n",
    "        self.best_round = None\n",
    "        dtrain = lgb.Dataset(train_df.drop(columns=[self.label]), train_df[self.label])\n",
    "        deval = lgb.Dataset(eval_df.drop(columns=[self.label]), eval_df[self.label])\n",
    "        evallist = [dtrain, deval]\n",
    "        if params is None:\n",
    "            use_params = deepcopy(self.opt_params)\n",
    "        else:\n",
    "            use_params = deepcopy(params)\n",
    "\n",
    "        num_round = use_params.pop('num_round')\n",
    "        if use_best_eval:\n",
    "            with io.StringIO() as buf, redirect_stdout(buf):\n",
    "                self.clf = lgb.train(use_params, dtrain, num_round, valid_sets=evallist)\n",
    "                output = buf.getvalue().split(\"\\n\")\n",
    "            min_error = np.inf\n",
    "            min_index = 0\n",
    "            for idx in range(len(output) - 1):\n",
    "                if len(output[idx].split(\"\\t\")) == 3:\n",
    "                    temp = float(output[idx].split(\"\\t\")[2].split(\":\")[1])\n",
    "                    if min_error > temp:\n",
    "                        min_error = temp\n",
    "                        min_index = int(output[idx].split(\"\\t\")[0][1:-1])\n",
    "            print(\"The minimum is attained in round %d\" % (min_index + 1))\n",
    "            self.best_round = min_index + 1\n",
    "            return output\n",
    "        else:\n",
    "            with io.StringIO() as buf, redirect_stdout(buf):\n",
    "                self.clf = lgb.train(use_params, dtrain, num_round, valid_sets=evallist)\n",
    "                output = buf.getvalue().split(\"\\n\")\n",
    "            self.best_round = num_round\n",
    "            return output\n",
    "\n",
    "    def search(self, train_df, eval_df, use_best_eval=True):\n",
    "        self.opt_params = dict()\n",
    "\n",
    "        def train_impl(params):\n",
    "            self.train(train_df, eval_df, params, use_best_eval)\n",
    "            if self.metric == 'auc':\n",
    "                y_pred = self.clf.predict(eval_df.drop(columns=[self.label]), num_iteration=self.best_round)\n",
    "            else:\n",
    "                y_pred = (self.clf.predict(eval_df.drop(columns=[self.label]),\n",
    "                                           num_iteration=self.best_round) > 0.5).astype(int)\n",
    "            return self.get_loss(eval_df[self.label], y_pred)\n",
    "\n",
    "        self.opt_params = fmin(train_impl, asdict(self.opt), algo=tpe.suggest, max_evals=self.max_eval)\n",
    "\n",
    "    def search_k_fold(self, k_fold, data, use_best_eval=True):\n",
    "        self.opt_params = dict()\n",
    "\n",
    "        def train_impl_nfold(params):\n",
    "            loss = list()\n",
    "            for train_id, eval_id in k_fold.split(data):\n",
    "                train_df = data.loc[train_id]\n",
    "                eval_df = data.loc[eval_id]\n",
    "                self.train(train_df, eval_df, params, use_best_eval)\n",
    "                if self.metric == 'auc':\n",
    "                    y_pred = self.clf.predict(eval_df.drop(columns=[self.label]), num_iteration=self.best_round)\n",
    "                else:\n",
    "                    y_pred = (self.clf.predict(eval_df.drop(columns=[self.label]),\n",
    "                                               num_iteration=self.best_round) > 0.5).astype(int)\n",
    "                loss.append(self.get_loss(eval_df[self.label], y_pred))\n",
    "            return np.mean(loss)\n",
    "\n",
    "        self.opt_params = fmin(train_impl_nfold, asdict(self.opt), algo=tpe.suggest, max_evals=self.max_eval)\n",
    "\n",
    "    def train_k_fold(self, k_fold, train_data, test_data, params=None, drop_test_y=True, use_best_eval=True):\n",
    "        acc_result = list()\n",
    "        train_pred = np.empty(train_data.shape[0])\n",
    "        test_pred = np.empty(test_data.shape[0])\n",
    "        if drop_test_y:\n",
    "            dtest = test_data.drop(columns=self.label)\n",
    "        else:\n",
    "            dtest = test_data\n",
    "\n",
    "        models = list()\n",
    "        for train_id, eval_id in k_fold.split(train_data):\n",
    "            train_df = train_data.loc[train_id]\n",
    "            eval_df = train_data.loc[eval_id]\n",
    "            self.train(train_df, eval_df, params, use_best_eval)\n",
    "            models.append(copy.deepcopy(self.clf))\n",
    "            train_pred[eval_id] = self.clf.predict(eval_df.drop(columns=self.label), num_iteration=self.best_round)\n",
    "            if self.metric == 'auc':\n",
    "                y_pred = self.clf.predict(eval_df.drop(columns=[self.label]), num_iteration=self.best_round)\n",
    "            else:\n",
    "                y_pred = (self.clf.predict(eval_df.drop(columns=[self.label]),\n",
    "                                           num_iteration=self.best_round) > 0.5).astype(int)\n",
    "            acc_result.append(self.get_loss(eval_df[self.label], y_pred))\n",
    "            test_pred += self.clf.predict(dtest, num_iteration=self.best_round)\n",
    "        test_pred /= k_fold.n_splits\n",
    "        return train_pred, test_pred, acc_result, models"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/Applications/anaconda3/envs/mypy37/lib/python3.7/site-packages/pandas/core/generic.py:4147: PerformanceWarning: dropping on a non-lexsorted multi-index without a level parameter may impact performance.\n",
      "  obj = obj._drop_axis(labels, axis, level=level, errors=errors)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "The minimum is attained in round 372\n",
      "Finished loading model, total used 2000 iterations\n",
      "The minimum is attained in round 444\n",
      "Finished loading model, total used 2000 iterations\n",
      "The minimum is attained in round 305\n",
      "Finished loading model, total used 2000 iterations\n",
      "The minimum is attained in round 416\n",
      "Finished loading model, total used 2000 iterations\n",
      "The minimum is attained in round 353\n",
      "Finished loading model, total used 2000 iterations\n"
     ]
    }
   ],
   "source": [
    "#训练\n",
    "train_data = f_train_df.loc[:,:]\n",
    "train_data['loan_status']=list(y_train)\n",
    "test_data = f_test_df.loc[:,:]\n",
    "test_data['loan_status']=list(y_test)\n",
    "fitter = LGBFitter(label='loan_status')\n",
    "params = {'num_thread': 4, 'num_leaves': 12, 'metric': 'binary', 'objective': 'binary',\n",
    "                'num_round': 2000, 'learning_rate': 0.02, 'feature_fraction': 0.8, 'bagging_fraction': 0.8}\n",
    "from sklearn.model_selection import KFold\n",
    "kfold = KFold(n_splits=5)\n",
    "fitter_result=fitter.train_k_fold(kfold, train_data, test_data, params = params)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(array([0.99522488, 0.96265258, 0.99509   , ..., 0.9710991 , 0.98292765,\n",
       "        0.77866755]),\n",
       " array([0.24821202, 0.99336799, 0.99296283, ..., 0.99175926, 0.27199719,\n",
       "        0.99536096]),\n",
       " [0.08150000000000002,\n",
       "  0.08025000000000004,\n",
       "  0.08299999999999996,\n",
       "  0.07799999999999996,\n",
       "  0.08399999999999996],\n",
       " [<lightgbm.basic.Booster at 0x7fc91e640f10>,\n",
       "  <lightgbm.basic.Booster at 0x7fc91e62add0>,\n",
       "  <lightgbm.basic.Booster at 0x7fc91e6dcd10>,\n",
       "  <lightgbm.basic.Booster at 0x7fc91e6e2a90>,\n",
       "  <lightgbm.basic.Booster at 0x7fc91e6dcd50>])"
      ]
     },
     "execution_count": 15,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "fitter_result"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/Applications/anaconda3/envs/mypy37/lib/python3.7/site-packages/pandas/core/generic.py:4147: PerformanceWarning: dropping on a non-lexsorted multi-index without a level parameter may impact performance.\n",
      "  obj = obj._drop_axis(labels, axis, level=level, errors=errors)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "准确率\n",
      "0.9218\n",
      "f1score\n",
      "0.9507804632426988\n",
      "auc\n",
      "0.8822424486635149\n",
      "混淆矩阵\n",
      "[[1665  377]\n",
      " [ 405 7553]]\n",
      "1\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/Applications/anaconda3/envs/mypy37/lib/python3.7/site-packages/pandas/core/generic.py:4147: PerformanceWarning: dropping on a non-lexsorted multi-index without a level parameter may impact performance.\n",
      "  obj = obj._drop_axis(labels, axis, level=level, errors=errors)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "准确率\n",
      "0.9201\n",
      "f1score\n",
      "0.9496629496629495\n",
      "auc\n",
      "0.8809923129731778\n",
      "混淆矩阵\n",
      "[[1664  378]\n",
      " [ 421 7537]]\n",
      "2\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/Applications/anaconda3/envs/mypy37/lib/python3.7/site-packages/pandas/core/generic.py:4147: PerformanceWarning: dropping on a non-lexsorted multi-index without a level parameter may impact performance.\n",
      "  obj = obj._drop_axis(labels, axis, level=level, errors=errors)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "准确率\n",
      "0.921\n",
      "f1score\n",
      "0.9504764292878636\n",
      "auc\n",
      "0.8751867972871287\n",
      "混淆矩阵\n",
      "[[1629  413]\n",
      " [ 377 7581]]\n",
      "3\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/Applications/anaconda3/envs/mypy37/lib/python3.7/site-packages/pandas/core/generic.py:4147: PerformanceWarning: dropping on a non-lexsorted multi-index without a level parameter may impact performance.\n",
      "  obj = obj._drop_axis(labels, axis, level=level, errors=errors)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "准确率\n",
      "0.9187\n",
      "f1score\n",
      "0.9489032744642072\n",
      "auc\n",
      "0.875379963712527\n",
      "混淆矩阵\n",
      "[[1638  404]\n",
      " [ 409 7549]]\n",
      "4\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/Applications/anaconda3/envs/mypy37/lib/python3.7/site-packages/pandas/core/generic.py:4147: PerformanceWarning: dropping on a non-lexsorted multi-index without a level parameter may impact performance.\n",
      "  obj = obj._drop_axis(labels, axis, level=level, errors=errors)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "准确率\n",
      "0.9204\n",
      "f1score\n",
      "0.9500752634219769\n",
      "auc\n",
      "0.8749918462722635\n",
      "混淆矩阵\n",
      "[[1630  412]\n",
      " [ 384 7574]]\n"
     ]
    }
   ],
   "source": [
    "#查看验证集在各个选出的模型的效果\n",
    "from sklearn import metrics\n",
    "from sklearn.metrics import accuracy_score,roc_auc_score\n",
    "from sklearn.metrics import f1_score \n",
    "from sklearn.metrics import confusion_matrix\n",
    "p=0\n",
    "y_test=test_data['loan_status']\n",
    "for gbm1 in fitter_result[3]:\n",
    "    print(p)\n",
    "    y_pred0 = gbm1.predict(test_data.drop(columns='loan_status'))\n",
    "    y_pred=[1 if x >=0.5 else 0 for x in y_pred0]\n",
    "    y_train=train_data['loan_status']\n",
    "    print('准确率')\n",
    "    print(accuracy_score(y_test,y_pred))\n",
    "    print('f1score')\n",
    "    print(f1_score(y_test,y_pred)) \n",
    "    print('auc')\n",
    "    print(roc_auc_score(y_test,y_pred))\n",
    "    print('混淆矩阵')\n",
    "    print(confusion_matrix(y_test,y_pred))\n",
    "    p=p+1"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## 八.lightgbm自动调参最优结果"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Built 10585 features\n",
      "Elapsed: 00:10 | Progress:  95%|█████████▍"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/Applications/anaconda3/envs/mypy37/lib/python3.7/site-packages/pandas/core/frame.py:4481: PerformanceWarning: DataFrame is highly fragmented.  This is usually the result of calling `frame.insert` many times, which has poor performance.  Consider using pd.concat instead.  To get a de-fragmented frame, use `newframe = frame.copy()`\n",
      "  data[k] = com.apply_if_callable(v, data)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Elapsed: 02:20 | Progress: 100%|██████████\n"
     ]
    },
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead tr th {\n",
       "        text-align: left;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr>\n",
       "      <th></th>\n",
       "      <th>continuous_annual_inc</th>\n",
       "      <th>continuous_dti</th>\n",
       "      <th>continuous_fico_range_high</th>\n",
       "      <th>continuous_installment</th>\n",
       "      <th>continuous_last_fico_range_high</th>\n",
       "      <th>continuous_annual_inc + continuous_delinq_2yrs</th>\n",
       "      <th>continuous_annual_inc + continuous_fico_range_high</th>\n",
       "      <th>continuous_annual_inc + continuous_funded_amnt</th>\n",
       "      <th>continuous_annual_inc + continuous_funded_amnt_inv</th>\n",
       "      <th>continuous_annual_inc + continuous_inq_last_6mths</th>\n",
       "      <th>...</th>\n",
       "      <th>discrete_sub_grade_15_one_hot + discrete_term_2_one_hot</th>\n",
       "      <th>discrete_sub_grade_17_one_hot + discrete_term_1_one_hot</th>\n",
       "      <th>discrete_sub_grade_19_one_hot + discrete_sub_grade_9_one_hot</th>\n",
       "      <th>discrete_sub_grade_27_one_hot + discrete_term_2_one_hot</th>\n",
       "      <th>discrete_sub_grade_28_one_hot + discrete_term_2_one_hot</th>\n",
       "      <th>discrete_sub_grade_30_one_hot + discrete_term_1_one_hot</th>\n",
       "      <th>discrete_sub_grade_32_one_hot + discrete_term_1_one_hot</th>\n",
       "      <th>discrete_sub_grade_6_one_hot + discrete_term_2_one_hot</th>\n",
       "      <th>discrete_sub_grade_8_one_hot + discrete_term_1_one_hot</th>\n",
       "      <th>discrete_sub_grade_8_one_hot + discrete_term_2_one_hot</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>35000.0</td>\n",
       "      <td>25.65</td>\n",
       "      <td>664.0</td>\n",
       "      <td>428.63</td>\n",
       "      <td>659.0</td>\n",
       "      <td>35001.0</td>\n",
       "      <td>35664.0</td>\n",
       "      <td>48000.0</td>\n",
       "      <td>48000.0</td>\n",
       "      <td>35001.0</td>\n",
       "      <td>...</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>50000.0</td>\n",
       "      <td>22.01</td>\n",
       "      <td>729.0</td>\n",
       "      <td>339.10</td>\n",
       "      <td>769.0</td>\n",
       "      <td>50000.0</td>\n",
       "      <td>50729.0</td>\n",
       "      <td>61000.0</td>\n",
       "      <td>61000.0</td>\n",
       "      <td>50000.0</td>\n",
       "      <td>...</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>100000.0</td>\n",
       "      <td>26.39</td>\n",
       "      <td>764.0</td>\n",
       "      <td>301.15</td>\n",
       "      <td>684.0</td>\n",
       "      <td>100000.0</td>\n",
       "      <td>100764.0</td>\n",
       "      <td>110000.0</td>\n",
       "      <td>110000.0</td>\n",
       "      <td>100001.0</td>\n",
       "      <td>...</td>\n",
       "      <td>1.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>55000.0</td>\n",
       "      <td>12.26</td>\n",
       "      <td>664.0</td>\n",
       "      <td>318.75</td>\n",
       "      <td>724.0</td>\n",
       "      <td>55000.0</td>\n",
       "      <td>55664.0</td>\n",
       "      <td>68800.0</td>\n",
       "      <td>68800.0</td>\n",
       "      <td>55000.0</td>\n",
       "      <td>...</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>50000.0</td>\n",
       "      <td>17.55</td>\n",
       "      <td>674.0</td>\n",
       "      <td>491.23</td>\n",
       "      <td>574.0</td>\n",
       "      <td>50000.0</td>\n",
       "      <td>50674.0</td>\n",
       "      <td>63550.0</td>\n",
       "      <td>63550.0</td>\n",
       "      <td>50003.0</td>\n",
       "      <td>...</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "      <td>1.0</td>\n",
       "      <td>0.0</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "<p>5 rows × 692 columns</p>\n",
       "</div>"
      ],
      "text/plain": [
       "  continuous_annual_inc continuous_dti continuous_fico_range_high  \\\n",
       "0               35000.0          25.65                      664.0   \n",
       "1               50000.0          22.01                      729.0   \n",
       "2              100000.0          26.39                      764.0   \n",
       "3               55000.0          12.26                      664.0   \n",
       "4               50000.0          17.55                      674.0   \n",
       "\n",
       "  continuous_installment continuous_last_fico_range_high  \\\n",
       "0                 428.63                           659.0   \n",
       "1                 339.10                           769.0   \n",
       "2                 301.15                           684.0   \n",
       "3                 318.75                           724.0   \n",
       "4                 491.23                           574.0   \n",
       "\n",
       "  continuous_annual_inc + continuous_delinq_2yrs  \\\n",
       "0                                        35001.0   \n",
       "1                                        50000.0   \n",
       "2                                       100000.0   \n",
       "3                                        55000.0   \n",
       "4                                        50000.0   \n",
       "\n",
       "  continuous_annual_inc + continuous_fico_range_high  \\\n",
       "0                                            35664.0   \n",
       "1                                            50729.0   \n",
       "2                                           100764.0   \n",
       "3                                            55664.0   \n",
       "4                                            50674.0   \n",
       "\n",
       "  continuous_annual_inc + continuous_funded_amnt  \\\n",
       "0                                        48000.0   \n",
       "1                                        61000.0   \n",
       "2                                       110000.0   \n",
       "3                                        68800.0   \n",
       "4                                        63550.0   \n",
       "\n",
       "  continuous_annual_inc + continuous_funded_amnt_inv  \\\n",
       "0                                            48000.0   \n",
       "1                                            61000.0   \n",
       "2                                           110000.0   \n",
       "3                                            68800.0   \n",
       "4                                            63550.0   \n",
       "\n",
       "  continuous_annual_inc + continuous_inq_last_6mths  ...  \\\n",
       "0                                           35001.0  ...   \n",
       "1                                           50000.0  ...   \n",
       "2                                          100001.0  ...   \n",
       "3                                           55000.0  ...   \n",
       "4                                           50003.0  ...   \n",
       "\n",
       "  discrete_sub_grade_15_one_hot + discrete_term_2_one_hot  \\\n",
       "0                                                0.0        \n",
       "1                                                0.0        \n",
       "2                                                1.0        \n",
       "3                                                1.0        \n",
       "4                                                0.0        \n",
       "\n",
       "  discrete_sub_grade_17_one_hot + discrete_term_1_one_hot  \\\n",
       "0                                                1.0        \n",
       "1                                                1.0        \n",
       "2                                                1.0        \n",
       "3                                                0.0        \n",
       "4                                                1.0        \n",
       "\n",
       "  discrete_sub_grade_19_one_hot + discrete_sub_grade_9_one_hot  \\\n",
       "0                                                1.0             \n",
       "1                                                0.0             \n",
       "2                                                0.0             \n",
       "3                                                0.0             \n",
       "4                                                0.0             \n",
       "\n",
       "  discrete_sub_grade_27_one_hot + discrete_term_2_one_hot  \\\n",
       "0                                                0.0        \n",
       "1                                                0.0        \n",
       "2                                                0.0        \n",
       "3                                                1.0        \n",
       "4                                                0.0        \n",
       "\n",
       "  discrete_sub_grade_28_one_hot + discrete_term_2_one_hot  \\\n",
       "0                                                0.0        \n",
       "1                                                0.0        \n",
       "2                                                0.0        \n",
       "3                                                1.0        \n",
       "4                                                0.0        \n",
       "\n",
       "  discrete_sub_grade_30_one_hot + discrete_term_1_one_hot  \\\n",
       "0                                                1.0        \n",
       "1                                                1.0        \n",
       "2                                                1.0        \n",
       "3                                                0.0        \n",
       "4                                                1.0        \n",
       "\n",
       "  discrete_sub_grade_32_one_hot + discrete_term_1_one_hot  \\\n",
       "0                                                1.0        \n",
       "1                                                1.0        \n",
       "2                                                1.0        \n",
       "3                                                0.0        \n",
       "4                                                1.0        \n",
       "\n",
       "  discrete_sub_grade_6_one_hot + discrete_term_2_one_hot  \\\n",
       "0                                                0.0       \n",
       "1                                                0.0       \n",
       "2                                                0.0       \n",
       "3                                                1.0       \n",
       "4                                                0.0       \n",
       "\n",
       "  discrete_sub_grade_8_one_hot + discrete_term_1_one_hot  \\\n",
       "0                                                1.0       \n",
       "1                                                1.0       \n",
       "2                                                1.0       \n",
       "3                                                0.0       \n",
       "4                                                1.0       \n",
       "\n",
       "  discrete_sub_grade_8_one_hot + discrete_term_2_one_hot  \n",
       "0                                                0.0      \n",
       "1                                                0.0      \n",
       "2                                                0.0      \n",
       "3                                                1.0      \n",
       "4                                                0.0      \n",
       "\n",
       "[5 rows x 692 columns]"
      ]
     },
     "execution_count": 18,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "#测试集准备\n",
    "## 建立实体\n",
    "df=test_final.drop(columns='loan_status')\n",
    "es = ft.EntitySet(id = 'final')\n",
    "es.entity_from_dataframe(entity_id = 'final', dataframe = df, make_index=True,index = 'index')\n",
    "\n",
    "## 使用dfs算法构造特征\n",
    "feature_final_matrix, feature_final_defs = ft.dfs(entityset = es, target_entity = 'final',max_depth=1,verbose=1,\n",
    "                                  trans_primitives = ['add_numeric'])\n",
    "f_final_df = pd.DataFrame(si.fit_transform(feature_final_matrix.loc[:,f_names]))\n",
    "f_final_df.columns=[f_names]\n",
    "f_final_df.head()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "准确率\n",
      "0.91492\n",
      "f1score\n",
      "0.9467251095804633\n",
      "auc\n",
      "0.8764094234467015\n",
      "混淆矩阵\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "array([[ 7948,  1826],\n",
       "       [ 2428, 37798]])"
      ]
     },
     "execution_count": 19,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "#从验证集结果看 ，首个模型效果较好，所以使用首个模型参数 \n",
    "lgm_best=fitter_result[3][0]\n",
    "#对应测试集效果\n",
    "y_pred0=lgm_best.predict(f_final_df)\n",
    "y_pred=[1 if x >=0.5 else 0 for x in y_pred0]\n",
    "print('准确率')\n",
    "print(accuracy_score(test_final['loan_status'],y_pred))\n",
    "print('f1score')\n",
    "print(f1_score(test_final['loan_status'],y_pred)) \n",
    "print('auc')\n",
    "print(roc_auc_score(test_final['loan_status'],y_pred))\n",
    "print('混淆矩阵')\n",
    "confusion_matrix(test_final['loan_status'],y_pred)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "14141"
      ]
     },
     "execution_count": 20,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "#清空多余的大变量\n",
    "del es,df,feature_matrix,f_df,f_matrix,f_defs,f_new,feature_test_matrix, feature_test_defs,feature_final_matrix, feature_final_defs\n",
    "import gc\n",
    "gc.collect()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## 九.Stacking模型集成\n",
    "参考文献：\n",
    "* [mlxtend](https://github.com/rasbt/mlxtend)\n",
    "* [集成学习之3.Stacking](https://blog.csdn.net/webzhuce/article/details/108683626)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/Applications/anaconda3/envs/mypy37/lib/python3.7/site-packages/sklearn/svm/_base.py:1201: ConvergenceWarning: Liblinear failed to converge, increase the number of iterations.\n",
      "  ConvergenceWarning,\n",
      "/Applications/anaconda3/envs/mypy37/lib/python3.7/site-packages/sklearn/svm/_base.py:1201: ConvergenceWarning: Liblinear failed to converge, increase the number of iterations.\n",
      "  ConvergenceWarning,\n",
      "/Applications/anaconda3/envs/mypy37/lib/python3.7/site-packages/sklearn/svm/_base.py:1201: ConvergenceWarning: Liblinear failed to converge, increase the number of iterations.\n",
      "  ConvergenceWarning,\n",
      "/Applications/anaconda3/envs/mypy37/lib/python3.7/site-packages/sklearn/svm/_base.py:1201: ConvergenceWarning: Liblinear failed to converge, increase the number of iterations.\n",
      "  ConvergenceWarning,\n",
      "/Applications/anaconda3/envs/mypy37/lib/python3.7/site-packages/sklearn/svm/_base.py:1201: ConvergenceWarning: Liblinear failed to converge, increase the number of iterations.\n",
      "  ConvergenceWarning,\n",
      "/Applications/anaconda3/envs/mypy37/lib/python3.7/site-packages/sklearn/svm/_base.py:1201: ConvergenceWarning: Liblinear failed to converge, increase the number of iterations.\n",
      "  ConvergenceWarning,\n",
      "/Applications/anaconda3/envs/mypy37/lib/python3.7/site-packages/sklearn/utils/validation.py:1677: FutureWarning: Feature names only support names that are all strings. Got feature names with dtypes: ['tuple']. An error will be raised in 1.2.\n",
      "  FutureWarning,\n",
      "/Applications/anaconda3/envs/mypy37/lib/python3.7/site-packages/sklearn/base.py:442: UserWarning: X does not have valid feature names, but LinearSVC was fitted with feature names\n",
      "  \"X does not have valid feature names, but\"\n",
      "/Applications/anaconda3/envs/mypy37/lib/python3.7/site-packages/sklearn/utils/validation.py:1677: FutureWarning: Feature names only support names that are all strings. Got feature names with dtypes: ['tuple']. An error will be raised in 1.2.\n",
      "  FutureWarning,\n",
      "/Applications/anaconda3/envs/mypy37/lib/python3.7/site-packages/sklearn/base.py:442: UserWarning: X does not have valid feature names, but GradientBoostingClassifier was fitted with feature names\n",
      "  \"X does not have valid feature names, but\"\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "准确率\n",
      "0.91562\n",
      "f1score\n",
      "0.9472473336084124\n",
      "auc\n",
      "0.8750630530504491\n",
      "混淆矩阵\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "array([[ 7902,  1872],\n",
       "       [ 2347, 37879]])"
      ]
     },
     "execution_count": 21,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "from sklearn import svm\n",
    "from sklearn.tree import DecisionTreeClassifier\n",
    "from sklearn.ensemble import GradientBoostingClassifier\n",
    "from sklearn.linear_model import LogisticRegression\n",
    "from lightgbm import LGBMClassifier\n",
    "from mlxtend.classifier import StackingCVClassifier\n",
    "\n",
    "svclf = svm.LinearSVC()\n",
    "lgblf=LGBMClassifier()\n",
    "gbdtclf = GradientBoostingClassifier(learning_rate=0.7)\n",
    "lrclf = LogisticRegression()\n",
    "scclf = StackingCVClassifier(classifiers=[svclf,gbdtclf,lgblf], meta_classifier=lrclf, cv=5)\n",
    "scclf.fit(train_data.drop(columns='loan_status'), y_train)\n",
    "\n",
    "#结果\n",
    "y_pred0 = scclf.predict(f_final_df)\n",
    "y_pred=[1 if x >=0.5 else 0 for x in y_pred0]\n",
    "print('准确率')\n",
    "print(accuracy_score(test_final['loan_status'],y_pred))\n",
    "print('f1score')\n",
    "print(f1_score(test_final['loan_status'],y_pred)) \n",
    "print('auc')\n",
    "print(roc_auc_score(test_final['loan_status'],y_pred))\n",
    "print('混淆矩阵')\n",
    "confusion_matrix(test_final['loan_status'],y_pred)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "colab": {
   "collapsed_sections": [],
   "machine_shape": "hm",
   "name": "chap06.ipynb",
   "provenance": []
  },
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.7.10"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 1
}
