{
 "cells": [
  {
   "cell_type": "markdown",
   "id": "6455b906",
   "metadata": {},
   "source": [
    "## 导入所需的包"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "id": "38a3e5db",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2021-09-10T11:06:28.258343Z",
     "start_time": "2021-09-10T11:06:27.871049Z"
    },
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\u001b[33mcommit 26eb6bea33b8e8428ee1afcd4403ccae2948724e\u001b[m\u001b[33m (\u001b[m\u001b[1;36mHEAD -> \u001b[m\u001b[1;32mmaster\u001b[m\u001b[33m, \u001b[m\u001b[1;31morigin/master\u001b[m\u001b[33m, \u001b[m\u001b[1;31morigin/HEAD\u001b[m\u001b[33m)\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Fri Sep 10 19:10:12 2021 +0800\r\n",
      "\r\n",
      "    debug: task_type\r\n",
      "\r\n",
      "\u001b[33mcommit 58820caad3acc6d2b1fae7a81e051d3fb30f13d3\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Fri Sep 10 14:35:18 2021 +0800\r\n",
      "\r\n",
      "    优化log\r\n",
      "\r\n",
      "\u001b[33mcommit 51704abfd80114578eab318356cc77b1ef46e18b\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Fri Sep 10 14:30:35 2021 +0800\r\n",
      "\r\n",
      "    1. 增加case: kaggle springleaf;\r\n",
      "    2. 优化autox get_submit逻辑\r\n",
      "\r\n",
      "\u001b[33mcommit 6455e62326d344b33a37f100b4fecf2dcb637c8a\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Thu Sep 2 16:50:56 2021 +0800\r\n",
      "\r\n",
      "    增加ieee结果和pipeline demo.\r\n",
      "\r\n",
      "\u001b[33mcommit 74d679c47ae2e0639d02b994e6cf1f6f84dfe560\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Thu Sep 2 15:44:15 2021 +0800\r\n",
      "\r\n",
      "    debug for feature_filter.\r\n",
      "\r\n",
      "\u001b[33mcommit 75c9510e049cfdbaa57f07b3f4306f1a161fccea\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Thu Sep 2 14:48:42 2021 +0800\r\n",
      "\r\n",
      "    优化groupby key筛选条件.\r\n",
      "\r\n",
      "\u001b[33mcommit ff2bb3fb04a5b84feca94b26de7ac6048cc36c7b\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Wed Sep 1 17:37:03 2021 +0800\r\n",
      "\r\n",
      "    debug: fe_rank\r\n",
      "\r\n",
      "\u001b[33mcommit 1fe1f1732606a5dbf007270c2dbae1711b5a72b6\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Wed Sep 1 16:27:16 2021 +0800\r\n",
      "\r\n",
      "    debug: 拼接1-1简单表.\r\n",
      "\r\n",
      "\u001b[33mcommit c7b7964fb2713118d6e85d0ef22a384f924143be\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Wed Sep 1 16:04:23 2021 +0800\r\n",
      "\r\n",
      "    增加功能，拼接1-1简单表;\r\n",
      "    kaggle_ieee, demo;\r\n",
      "    modify README.md.\r\n",
      "\r\n",
      "\u001b[33mcommit 21457fafb8d01644cfc668d0aab8d463a8cda3e7\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Tue Aug 31 15:45:24 2021 +0800\r\n",
      "\r\n",
      "    modify README\r\n",
      "\r\n",
      "\u001b[33mcommit 2c2cf54574a8a9c6c21f5452ed5f5bcf4b3ae7ef\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Tue Aug 31 15:25:09 2021 +0800\r\n",
      "\r\n",
      "    modify README_EN.md\r\n",
      "\r\n",
      "\u001b[33mcommit 3da3ba229d78d81d844226c0d584d1da6572109a\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Mon Aug 30 20:34:13 2021 +0800\r\n",
      "\r\n",
      "    init Fe_rank.\r\n",
      "\r\n",
      "\u001b[33mcommit 7f2e3717b84ebdd223037ce2ac63740d46571a9a\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Mon Aug 30 17:32:12 2021 +0800\r\n",
      "\r\n",
      "    add rank feature.\r\n",
      "\r\n",
      "\u001b[33mcommit b3fa6719c0052b964f0d74a6bf9a8941c488d915\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Mon Aug 30 10:43:40 2021 +0800\r\n",
      "\r\n",
      "    add demo: kaggle house price.\r\n",
      "\r\n",
      "\u001b[33mcommit 59b7d261d059f84a291fda6013f7eeffdcae9987\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Sun Aug 29 08:03:49 2021 +0800\r\n",
      "\r\n",
      "    modify README_EN.md, 跳转链接.\r\n",
      "\r\n",
      "\u001b[33mcommit f2581a891bc919b3c0f11fb4c7cf700ecedc2f73\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Sun Aug 29 07:57:44 2021 +0800\r\n",
      "\r\n",
      "    modify README_EN.md\r\n",
      "\r\n",
      "\u001b[33mcommit 50b186979fa431cdfaed38a508f1a134d2e7e0f1\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Fri Aug 27 17:44:44 2021 +0800\r\n",
      "\r\n",
      "    modify README.md, 新增kaggle house price数据集.\r\n",
      "\r\n",
      "\u001b[33mcommit 08e2dc8e069ffdd5f5dea5870af971d7b2cbe1df\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Fri Aug 27 16:39:01 2021 +0800\r\n",
      "\r\n",
      "    install_requires, 忽略tabnet.\r\n",
      "\r\n",
      "\u001b[33mcommit 89611b6d10d4492cde5b6f390d2c4077977b66f1\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Fri Aug 27 15:52:55 2021 +0800\r\n",
      "\r\n",
      "    xgb打印轮次设置为100\r\n",
      "\r\n",
      "\u001b[33mcommit 984d81a49150edd80d177137ceb748591db1a04d\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Fri Aug 27 15:42:14 2021 +0800\r\n",
      "\r\n",
      "    回归模型调参,修改验证集切分方式.\r\n",
      "\r\n",
      "\u001b[33mcommit 7fb12ebf09d04b0a617896ab1a63474fc8bb55a5\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Fri Aug 27 15:09:13 2021 +0800\r\n",
      "\r\n",
      "    优化特征类型识别.\r\n",
      "\r\n",
      "\u001b[33mcommit 39f94e1e4cd82c6d148556d80bb47146bfc8d539\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Fri Aug 27 14:58:53 2021 +0800\r\n",
      "\r\n",
      "    优化特征类型识别.\r\n",
      "\r\n",
      "\u001b[33mcommit 9f78099656c32f335a9134ea6358f131891699f0\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Thu Aug 26 17:02:52 2021 +0800\r\n",
      "\r\n",
      "    modify readme.\r\n",
      "\r\n",
      "\u001b[33mcommit 656c91218b289fe61cf21855e16b6895acec2c78\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Tue Aug 24 15:21:30 2021 +0800\r\n",
      "\r\n",
      "    优化readme,增加zhidemai比赛上分点总结\r\n",
      "\r\n",
      "\u001b[33mcommit 0aa3748f2a06d3639f6afeb94e33bca7d0bdeea8\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Fri Aug 20 14:33:54 2021 +0800\r\n",
      "\r\n",
      "    modify README.md\r\n",
      "\r\n",
      "\u001b[33mcommit 0cfd6d5cf86fee9b1a02b79a94ffd97c0b8a166a\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Thu Aug 19 20:14:02 2021 +0800\r\n",
      "\r\n",
      "    setup安装包增加tabnet.\r\n",
      "\r\n",
      "\u001b[33mcommit 84af1a14acd1e121bfcae1afde06f00b80df614d\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Wed Aug 18 11:52:08 2021 +0800\r\n",
      "\r\n",
      "    debug: tabnet的调参参数配置\r\n",
      "\r\n",
      "\u001b[33mcommit d185a546260127b5faffab105d3a2c0eaafb69bc\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Tue Aug 17 21:19:57 2021 +0800\r\n",
      "\r\n",
      "    tabnet, reshape y\r\n",
      "\r\n",
      "\u001b[33mcommit 388c00372b8f4af10bed4212ccc1bdf6e3f54275\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Tue Aug 17 21:01:59 2021 +0800\r\n",
      "\r\n",
      "    debug, tabnet.\r\n",
      "\r\n",
      "\u001b[33mcommit 18bc69af5802750f5ec23312e7ab649ddc25cfa8\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Tue Aug 17 20:25:31 2021 +0800\r\n",
      "\r\n",
      "    tabnet: 缺失值用中位数填充.\r\n",
      "\r\n",
      "\u001b[33mcommit 5a88c0ff98338dddb8d1406eb1bfd0d2a72f2121\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Tue Aug 17 19:37:39 2021 +0800\r\n",
      "\r\n",
      "    优化tabnet\r\n",
      "\r\n",
      "\u001b[33mcommit 1e07509db81c4c6e3222ae6697ef5c119a2eef31\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Tue Aug 17 16:08:43 2021 +0800\r\n",
      "\r\n",
      "    bagging中增加tabnet模型\r\n",
      "\r\n",
      "\u001b[33mcommit fbed7f9fb73e4a9ac143398902fd042a6fa54247\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Tue Aug 17 16:05:36 2021 +0800\r\n",
      "\r\n",
      "    tabnet regressor\r\n",
      "\r\n",
      "\u001b[33mcommit 8e89749c53b5ebddc86cb9b8ace762f7b3854841\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Tue Aug 17 15:21:32 2021 +0800\r\n",
      "\r\n",
      "    debug模式下缩短调参时间。\r\n",
      "\r\n",
      "\u001b[33mcommit 9c70574e38194766cbecca0822b2cdd48867144b\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Tue Aug 17 15:13:04 2021 +0800\r\n",
      "\r\n",
      "    debug模型打印日志.\r\n",
      "\r\n",
      "\u001b[33mcommit 7b18599f132699f056470761701968931da3f7a9\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Tue Aug 17 15:04:15 2021 +0800\r\n",
      "\r\n",
      "    增加debug模式，方便快速调试.\r\n",
      "\r\n",
      "\u001b[33mcommit d2d332b0bb5432e0ef49df01455c81a2644e7271\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Mon Aug 16 08:00:11 2021 +0800\r\n",
      "\r\n",
      "    auto_label_encoder,设置silence_cols\r\n",
      "\r\n",
      "\u001b[33mcommit 1b8ced5337d1826ff6dadec235c8cc5a00cb4e89\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Sun Aug 15 08:42:35 2021 +0800\r\n",
      "\r\n",
      "    内存优化.\r\n",
      "\r\n",
      "\u001b[33mcommit fab33ba59407c96ac146c4ad6865a32f06b8fa34\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Wed Aug 11 10:53:16 2021 +0800\r\n",
      "\r\n",
      "    增加二分类模型.\r\n",
      "\r\n",
      "\u001b[33mcommit 373c58eb950fbc364581d75b493ecaa1735079ed\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Mon Aug 9 15:28:00 2021 +0800\r\n",
      "\r\n",
      "    识别任务类型\r\n",
      "\r\n",
      "\u001b[33mcommit 44755fa33a1a6f59239786ab80de9d521c72b68c\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Fri Aug 6 16:19:49 2021 +0800\r\n",
      "\r\n",
      "    lgb, Verbose = 100\r\n",
      "\r\n",
      "\u001b[33mcommit e18d2dd86b63d4e253b3ad67017aeb82546cead3\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Fri Aug 6 13:21:03 2021 +0800\r\n",
      "\r\n",
      "    优化CrossXgbRegression.\r\n",
      "\r\n",
      "\u001b[33mcommit 6264775e9faec6d832cdb59819bca6534ced7401\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Fri Aug 6 11:41:03 2021 +0800\r\n",
      "\r\n",
      "    优化CrossXgbRegression: X进行StandardScaler, debug.\r\n",
      "\r\n",
      "\u001b[33mcommit 53135e94a4a0a0b079bf83df3db8e687e5ce0dc5\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Fri Aug 6 11:20:31 2021 +0800\r\n",
      "\r\n",
      "    优化CrossXgbRegression: X进行StandardScaler\r\n",
      "\r\n",
      "\u001b[33mcommit f0ac9242246a87dcee3e660c971cc33c5246f0bb\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Fri Aug 6 10:38:23 2021 +0800\r\n",
      "\r\n",
      "    优化CrossXgbRegression\r\n",
      "\r\n",
      "\u001b[33mcommit 75156d600497bb0910a8026aa87b2e7dc964ba79\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Thu Aug 5 22:34:31 2021 +0800\r\n",
      "\r\n",
      "    xgb model: tree_method='gpu_hist'\r\n",
      "\r\n",
      "\u001b[33mcommit afb229aa554f632badee1a171c28c991968eb331\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Thu Aug 5 20:58:11 2021 +0800\r\n",
      "\r\n",
      "    模型部分使用xgb和lgb融合\r\n",
      "\r\n",
      "\u001b[33mcommit 338ee3069d797cc10fe6d0f8a38afdc308cfdc71\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Thu Aug 5 20:29:23 2021 +0800\r\n",
      "\r\n",
      "    del temp.py\r\n",
      "\r\n",
      "\u001b[33mcommit c0dbe0887b53f7e8f02a6a3e9bcde803059ae973\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Thu Aug 5 19:53:05 2021 +0800\r\n",
      "\r\n",
      "    debug: X.iloc\r\n",
      "\r\n",
      "\u001b[33mcommit 2b806aedc6a2086268b1dadac4111ef1b1b1d83b\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Thu Aug 5 19:23:17 2021 +0800\r\n",
      "\r\n",
      "    debug: xgb regressor\r\n",
      "\r\n",
      "\u001b[33mcommit 5afbd56bdfb29e33d52d908a03c85508ad4e3d08\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Thu Aug 5 17:13:38 2021 +0800\r\n",
      "\r\n",
      "    xgboost不使用gpu_hist\r\n",
      "\r\n",
      "\u001b[33mcommit 1c56af83e7f90ec4fcc594fd87dcd0f6b9abaf8c\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Thu Aug 5 17:10:11 2021 +0800\r\n",
      "\r\n",
      "    xgboost不使用gpu\r\n",
      "\r\n",
      "\u001b[33mcommit b25c37c96115d5845841df1781eabbf4345af621\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Thu Aug 5 16:55:16 2021 +0800\r\n",
      "\r\n",
      "    增加xgb模型.\r\n",
      "\r\n",
      "\u001b[33mcommit 9af12e41e9a0a3295e63a0fe17988261e63050ee\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Thu Aug 5 11:02:14 2021 +0800\r\n",
      "\r\n",
      "    get_submit, 优化模型训练部分\r\n",
      "\r\n",
      "\u001b[33mcommit d127c4e4a9b17f0244ec14af11482db60c261d21\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Wed Aug 4 16:29:34 2021 +0800\r\n",
      "\r\n",
      "    debug: log输出.\r\n",
      "\r\n",
      "\u001b[33mcommit 3178f49c0ae81f5d9ba084d5dad1563804f457a4\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Wed Aug 4 15:24:57 2021 +0800\r\n",
      "\r\n",
      "    增加模型调参功能.\r\n",
      "\r\n",
      "\u001b[33mcommit eb97b2420853e8e0fddd55343c6029ee6da8b4b3\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Wed Aug 4 15:02:27 2021 +0800\r\n",
      "\r\n",
      "    debug: concat_train_test操作在自动特征类型识别之后.\r\n",
      "\r\n",
      "\u001b[33mcommit 3452d8831ebe8d33f717ad16452109680aa8ef1f\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Mon Aug 2 20:00:16 2021 +0800\r\n",
      "\r\n",
      "    调整target encoding的阈值.\r\n",
      "\r\n",
      "\u001b[33mcommit 0d66de573edd6485725d603620c1344bf41e6222\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Mon Aug 2 19:48:30 2021 +0800\r\n",
      "\r\n",
      "    debug:del_targetencoding_cols去重.\r\n",
      "\r\n",
      "\u001b[33mcommit dc4df8ef3a70a0532578ee6e043223e1f219b60d\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Mon Aug 2 19:45:35 2021 +0800\r\n",
      "\r\n",
      "    debug: del_targetencoding_cols去重.\r\n",
      "\r\n",
      "\u001b[33mcommit 4ecb985b6d6b46145743338c2ed3bd28e3f7977f\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Mon Aug 2 19:34:08 2021 +0800\r\n",
      "\r\n",
      "    debug.\r\n",
      "\r\n",
      "\u001b[33mcommit a156ca854d341072d23df29623f82c819e5c5b81\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Mon Aug 2 19:31:32 2021 +0800\r\n",
      "\r\n",
      "    target encoding特征筛选：test做了target encoding之后，有值的部分要大于90%\r\n",
      "\r\n",
      "\u001b[33mcommit ba93d457a017bb65bf3dc8d4676cea232a48c88c\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Mon Jul 26 17:36:55 2021 +0800\r\n",
      "\r\n",
      "    内存优化, 优化log.\r\n",
      "\r\n",
      "\u001b[33mcommit 5cda17ae252012184192ba10ba941b7eabd1946d\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Mon Jul 26 17:31:49 2021 +0800\r\n",
      "\r\n",
      "    内存优化.\r\n",
      "\r\n",
      "\u001b[33mcommit 0714e370615f919b92995b96eeea79dc475f1064\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Mon Jul 26 14:52:50 2021 +0800\r\n",
      "\r\n",
      "    target encoding feature: 默认使用统计信息进行特征筛选\r\n",
      "\r\n",
      "\u001b[33mcommit cb928b678996a700a8e2c37ae725d5baab573558\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Mon Jul 26 14:45:42 2021 +0800\r\n",
      "\r\n",
      "    target encoding feature: 优化统计信息筛选阈值\r\n",
      "\r\n",
      "\u001b[33mcommit e2a3e989e4b8b911663d871decfdcff05c818f45\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Mon Jul 26 14:39:50 2021 +0800\r\n",
      "\r\n",
      "    debug target encoding feature.\r\n",
      "\r\n",
      "\u001b[33mcommit 855c6c962ac0e9a716c9bc35441fee35bb89bf65\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Sat Jul 24 10:25:33 2021 +0800\r\n",
      "\r\n",
      "    add license file\r\n",
      "\r\n",
      "\u001b[33mcommit b5297ac2334e9b6d008d0e2d1c7a7e6b7dd61b78\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Thu Jul 22 15:20:24 2021 +0800\r\n",
      "\r\n",
      "    modify README.md;\r\n",
      "    增加zhidemai_automl.ipynb.\r\n",
      "\r\n",
      "\u001b[33mcommit 8fb15db690010c060a11e3d28d5a9fdaa268113a\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Thu Jul 22 14:36:09 2021 +0800\r\n",
      "\r\n",
      "    del sub files.\r\n",
      "\r\n",
      "\u001b[33mcommit 74ef3c0664934bb0033c178e50df9fee0986df55\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Thu Jul 22 14:29:32 2021 +0800\r\n",
      "\r\n",
      "    first commit\r\n",
      "\r\n",
      "\u001b[33mcommit 4d75036cbf5db2927ba3233a9cdda4a32c022d85\u001b[m\r\n",
      "Author: poteman <946691288@qq.com>\r\n",
      "Date:   Thu Jul 22 14:26:45 2021 +0800\r\n",
      "\r\n",
      "    first commit\r\n"
     ]
    }
   ],
   "source": [
    "!git log"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "id": "9185f791",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2021-09-10T11:06:31.070656Z",
     "start_time": "2021-09-10T11:06:28.262027Z"
    }
   },
   "outputs": [],
   "source": [
    "from autox import AutoX\n",
    "import pandas as pd\n",
    "import numpy as np\n",
    "import os\n",
    "from tqdm import tqdm"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "fa24e429",
   "metadata": {},
   "source": [
    "## 配置数据信息"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "id": "c152c7f9",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2021-09-10T11:06:31.082396Z",
     "start_time": "2021-09-10T11:06:31.075469Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "['zhidemai_1month',\n",
       " 'titanic',\n",
       " '.ipynb_checkpoints',\n",
       " '__init__.py',\n",
       " 'homecredit',\n",
       " 'zhidemai_train_test',\n",
       " 'kaggle_house_price',\n",
       " 'zhidemai',\n",
       " 'kaggle_springleaf',\n",
       " 'kaggle_tabular_aug_2021',\n",
       " 'kaggle_ieee',\n",
       " 'zhidemai2',\n",
       " 'zhidemai_competition_0715']"
      ]
     },
     "execution_count": 3,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "os.listdir('data')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "id": "b722824a",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2021-09-10T11:31:17.501560Z",
     "start_time": "2021-09-10T11:06:31.084042Z"
    },
    "scrolled": false
   },
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "   INFO ->  [+] read sample_submission.csv\n",
      "   INFO ->  Memory usage of dataframe is 2.22 MB\n",
      "   INFO ->  Memory usage after optimization is: 0.69 MB\n",
      "   INFO ->  Decreased by 68.7%\n",
      "   INFO ->  table = sample_submission.csv, shape = (145232, 2)\n",
      "   INFO ->  [+] read train.csv\n",
      "   INFO ->  Memory usage of dataframe is 2142.92 MB\n",
      "   INFO ->  Memory usage after optimization is: 586.16 MB\n",
      "   INFO ->  Decreased by 72.6%\n",
      "   INFO ->  table = train.csv, shape = (145231, 1934)\n",
      "   INFO ->  [+] read test.csv\n",
      "   INFO ->  Memory usage of dataframe is 2141.83 MB\n",
      "   INFO ->  Memory usage after optimization is: 585.46 MB\n",
      "   INFO ->  Decreased by 72.7%\n",
      "   INFO ->  table = test.csv, shape = (145232, 1933)\n"
     ]
    }
   ],
   "source": [
    "# 选择数据集\n",
    "data_name = 'kaggle_springleaf'\n",
    "path = f'./data/{data_name}'\n",
    "\n",
    "# 读数据\n",
    "# 合并train和test\n",
    "# 识别数据表中列的类型\n",
    "\n",
    "autox = AutoX(target = 'target', train_name = 'train.csv', test_name = 'test.csv', \n",
    "               id = ['ID'], path = path\n",
    "#               , feature_type = feature_type\n",
    "             )"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "id": "3215c062",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2021-09-10T11:31:17.508173Z",
     "start_time": "2021-09-10T11:31:17.503659Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "'binary'"
      ]
     },
     "execution_count": 5,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "autox.info_['task_type']"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "id": "fc6898fc",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2021-09-10T15:05:26.426985Z",
     "start_time": "2021-09-10T11:31:17.509564Z"
    },
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "   INFO ->  start feature engineer\n",
      "   INFO ->  feature engineer: Stat\n",
      "   INFO ->  ignore featureStat\n",
      "   INFO ->  feature engineer: Count\n",
      "100%|██████████| 51/51 [00:03<00:00, 15.30it/s]\n",
      "   INFO ->  featureCount ops: [['VAR_0001'], ['VAR_0005'], ['VAR_0008'], ['VAR_0009'], ['VAR_0010'], ['VAR_0011'], ['VAR_0012'], ['VAR_0043'], ['VAR_0044'], ['VAR_0073'], ['VAR_0075'], ['VAR_0156'], ['VAR_0157'], ['VAR_0158'], ['VAR_0159'], ['VAR_0166'], ['VAR_0167'], ['VAR_0168'], ['VAR_0169'], ['VAR_0176'], ['VAR_0177'], ['VAR_0178'], ['VAR_0179'], ['VAR_0196'], ['VAR_0200'], ['VAR_0202'], ['VAR_0204'], ['VAR_0214'], ['VAR_0216'], ['VAR_0217'], ['VAR_0222'], ['VAR_0226'], ['VAR_0229'], ['VAR_0230'], ['VAR_0232'], ['VAR_0236'], ['VAR_0237'], ['VAR_0239'], ['VAR_0274'], ['VAR_0283'], ['VAR_0305'], ['VAR_0325'], ['VAR_0342'], ['VAR_0352'], ['VAR_0353'], ['VAR_0354'], ['VAR_0404'], ['VAR_0466'], ['VAR_0467'], ['VAR_0493'], ['VAR_1934']]\n",
      "   INFO ->  feature engineer: Rank\n",
      "   INFO ->  ignore featureRank\n",
      "100%|██████████| 1934/1934 [00:05<00:00, 371.10it/s]\n",
      "   INFO ->  label_encoder_list: ['VAR_0001', 'VAR_0005', 'VAR_0008', 'VAR_0009', 'VAR_0010', 'VAR_0011', 'VAR_0012', 'VAR_0043', 'VAR_0044', 'VAR_0073', 'VAR_0075', 'VAR_0156', 'VAR_0157', 'VAR_0158', 'VAR_0159', 'VAR_0166', 'VAR_0167', 'VAR_0168', 'VAR_0169', 'VAR_0176', 'VAR_0177', 'VAR_0178', 'VAR_0179', 'VAR_0196', 'VAR_0200', 'VAR_0202', 'VAR_0204', 'VAR_0214', 'VAR_0216', 'VAR_0217', 'VAR_0222', 'VAR_0226', 'VAR_0229', 'VAR_0230', 'VAR_0232', 'VAR_0236', 'VAR_0237', 'VAR_0239', 'VAR_0274', 'VAR_0283', 'VAR_0305', 'VAR_0325', 'VAR_0342', 'VAR_0352', 'VAR_0353', 'VAR_0354', 'VAR_0404', 'VAR_0466', 'VAR_0467', 'VAR_0493', 'VAR_1934']\n",
      "   INFO ->  feature combination\n",
      "   INFO ->  shape of FE_all: (290463, 1985), shape of train: (145231, 1985), shape of test: (145232, 1985)\n",
      "   INFO ->  feature filter\n",
      "100%|██████████| 1985/1985 [01:01<00:00, 32.14it/s] \n",
      "   INFO ->  filtered features: ['ID', 'target', 'VAR_0207', 'VAR_0213', 'VAR_0840']\n",
      "   INFO ->  used_features: ['VAR_0001', 'VAR_0002', 'VAR_0003', 'VAR_0004', 'VAR_0005', 'VAR_0006', 'VAR_0007', 'VAR_0008', 'VAR_0009', 'VAR_0010', 'VAR_0011', 'VAR_0012', 'VAR_0013', 'VAR_0014', 'VAR_0015', 'VAR_0016', 'VAR_0017', 'VAR_0018', 'VAR_0019', 'VAR_0020', 'VAR_0021', 'VAR_0022', 'VAR_0023', 'VAR_0024', 'VAR_0025', 'VAR_0026', 'VAR_0027', 'VAR_0028', 'VAR_0029', 'VAR_0030', 'VAR_0031', 'VAR_0032', 'VAR_0033', 'VAR_0034', 'VAR_0035', 'VAR_0036', 'VAR_0037', 'VAR_0038', 'VAR_0039', 'VAR_0040', 'VAR_0041', 'VAR_0042', 'VAR_0043', 'VAR_0044', 'VAR_0045', 'VAR_0046', 'VAR_0047', 'VAR_0048', 'VAR_0049', 'VAR_0050', 'VAR_0051', 'VAR_0052', 'VAR_0053', 'VAR_0054', 'VAR_0055', 'VAR_0056', 'VAR_0057', 'VAR_0058', 'VAR_0059', 'VAR_0060', 'VAR_0061', 'VAR_0062', 'VAR_0063', 'VAR_0064', 'VAR_0065', 'VAR_0066', 'VAR_0067', 'VAR_0068', 'VAR_0069', 'VAR_0070', 'VAR_0071', 'VAR_0072', 'VAR_0073', 'VAR_0074', 'VAR_0075', 'VAR_0076', 'VAR_0077', 'VAR_0078', 'VAR_0079', 'VAR_0080', 'VAR_0081', 'VAR_0082', 'VAR_0083', 'VAR_0084', 'VAR_0085', 'VAR_0086', 'VAR_0087', 'VAR_0088', 'VAR_0089', 'VAR_0090', 'VAR_0091', 'VAR_0092', 'VAR_0093', 'VAR_0094', 'VAR_0095', 'VAR_0096', 'VAR_0097', 'VAR_0098', 'VAR_0099', 'VAR_0100', 'VAR_0101', 'VAR_0102', 'VAR_0103', 'VAR_0104', 'VAR_0105', 'VAR_0106', 'VAR_0107', 'VAR_0108', 'VAR_0109', 'VAR_0110', 'VAR_0111', 'VAR_0112', 'VAR_0113', 'VAR_0114', 'VAR_0115', 'VAR_0116', 'VAR_0117', 'VAR_0118', 'VAR_0119', 'VAR_0120', 'VAR_0121', 'VAR_0122', 'VAR_0123', 'VAR_0124', 'VAR_0125', 'VAR_0126', 'VAR_0127', 'VAR_0128', 'VAR_0129', 'VAR_0130', 'VAR_0131', 'VAR_0132', 'VAR_0133', 'VAR_0134', 'VAR_0135', 'VAR_0136', 'VAR_0137', 'VAR_0138', 'VAR_0139', 'VAR_0140', 'VAR_0141', 'VAR_0142', 'VAR_0143', 'VAR_0144', 'VAR_0145', 'VAR_0146', 'VAR_0147', 'VAR_0148', 'VAR_0149', 'VAR_0150', 'VAR_0151', 'VAR_0152', 'VAR_0153', 'VAR_0154', 'VAR_0155', 'VAR_0156', 'VAR_0157', 'VAR_0158', 'VAR_0159', 'VAR_0160', 'VAR_0161', 'VAR_0162', 'VAR_0163', 'VAR_0164', 'VAR_0165', 'VAR_0166', 'VAR_0167', 'VAR_0168', 'VAR_0169', 'VAR_0170', 'VAR_0171', 'VAR_0172', 'VAR_0173', 'VAR_0174', 'VAR_0175', 'VAR_0176', 'VAR_0177', 'VAR_0178', 'VAR_0179', 'VAR_0180', 'VAR_0181', 'VAR_0182', 'VAR_0183', 'VAR_0184', 'VAR_0185', 'VAR_0186', 'VAR_0187', 'VAR_0188', 'VAR_0189', 'VAR_0190', 'VAR_0191', 'VAR_0192', 'VAR_0193', 'VAR_0194', 'VAR_0195', 'VAR_0196', 'VAR_0197', 'VAR_0198', 'VAR_0199', 'VAR_0200', 'VAR_0201', 'VAR_0202', 'VAR_0203', 'VAR_0204', 'VAR_0205', 'VAR_0206', 'VAR_0208', 'VAR_0209', 'VAR_0210', 'VAR_0211', 'VAR_0212', 'VAR_0214', 'VAR_0215', 'VAR_0216', 'VAR_0217', 'VAR_0219', 'VAR_0220', 'VAR_0221', 'VAR_0222', 'VAR_0223', 'VAR_0224', 'VAR_0225', 'VAR_0226', 'VAR_0227', 'VAR_0228', 'VAR_0229', 'VAR_0230', 'VAR_0231', 'VAR_0232', 'VAR_0233', 'VAR_0234', 'VAR_0235', 'VAR_0236', 'VAR_0237', 'VAR_0238', 'VAR_0239', 'VAR_0241', 'VAR_0242', 'VAR_0243', 'VAR_0244', 'VAR_0245', 'VAR_0246', 'VAR_0247', 'VAR_0248', 'VAR_0249', 'VAR_0250', 'VAR_0251', 'VAR_0252', 'VAR_0253', 'VAR_0254', 'VAR_0255', 'VAR_0256', 'VAR_0257', 'VAR_0258', 'VAR_0259', 'VAR_0260', 'VAR_0261', 'VAR_0262', 'VAR_0263', 'VAR_0264', 'VAR_0265', 'VAR_0266', 'VAR_0267', 'VAR_0268', 'VAR_0269', 'VAR_0270', 'VAR_0271', 'VAR_0272', 'VAR_0273', 'VAR_0274', 'VAR_0275', 'VAR_0276', 'VAR_0277', 'VAR_0278', 'VAR_0279', 'VAR_0280', 'VAR_0281', 'VAR_0282', 'VAR_0283', 'VAR_0284', 'VAR_0285', 'VAR_0286', 'VAR_0287', 'VAR_0288', 'VAR_0289', 'VAR_0290', 'VAR_0291', 'VAR_0292', 'VAR_0293', 'VAR_0294', 'VAR_0295', 'VAR_0296', 'VAR_0297', 'VAR_0298', 'VAR_0299', 'VAR_0300', 'VAR_0301', 'VAR_0302', 'VAR_0303', 'VAR_0304', 'VAR_0305', 'VAR_0306', 'VAR_0307', 'VAR_0308', 'VAR_0309', 'VAR_0310', 'VAR_0311', 'VAR_0312', 'VAR_0313', 'VAR_0314', 'VAR_0315', 'VAR_0316', 'VAR_0317', 'VAR_0318', 'VAR_0319', 'VAR_0320', 'VAR_0321', 'VAR_0322', 'VAR_0323', 'VAR_0324', 'VAR_0325', 'VAR_0326', 'VAR_0327', 'VAR_0328', 'VAR_0329', 'VAR_0330', 'VAR_0331', 'VAR_0332', 'VAR_0333', 'VAR_0334', 'VAR_0335', 'VAR_0336', 'VAR_0337', 'VAR_0338', 'VAR_0339', 'VAR_0340', 'VAR_0341', 'VAR_0342', 'VAR_0343', 'VAR_0344', 'VAR_0345', 'VAR_0346', 'VAR_0347', 'VAR_0348', 'VAR_0349', 'VAR_0350', 'VAR_0351', 'VAR_0352', 'VAR_0353', 'VAR_0354', 'VAR_0355', 'VAR_0356', 'VAR_0357', 'VAR_0358', 'VAR_0359', 'VAR_0360', 'VAR_0361', 'VAR_0362', 'VAR_0363', 'VAR_0364', 'VAR_0365', 'VAR_0366', 'VAR_0367', 'VAR_0368', 'VAR_0369', 'VAR_0370', 'VAR_0371', 'VAR_0372', 'VAR_0373', 'VAR_0374', 'VAR_0375', 'VAR_0376', 'VAR_0377', 'VAR_0378', 'VAR_0379', 'VAR_0380', 'VAR_0381', 'VAR_0382', 'VAR_0383', 'VAR_0384', 'VAR_0385', 'VAR_0386', 'VAR_0387', 'VAR_0388', 'VAR_0389', 'VAR_0390', 'VAR_0391', 'VAR_0392', 'VAR_0393', 'VAR_0394', 'VAR_0395', 'VAR_0396', 'VAR_0397', 'VAR_0398', 'VAR_0399', 'VAR_0400', 'VAR_0401', 'VAR_0402', 'VAR_0403', 'VAR_0404', 'VAR_0405', 'VAR_0406', 'VAR_0407', 'VAR_0408', 'VAR_0409', 'VAR_0410', 'VAR_0411', 'VAR_0412', 'VAR_0413', 'VAR_0414', 'VAR_0415', 'VAR_0416', 'VAR_0417', 'VAR_0418', 'VAR_0419', 'VAR_0420', 'VAR_0421', 'VAR_0422', 'VAR_0423', 'VAR_0424', 'VAR_0425', 'VAR_0426', 'VAR_0427', 'VAR_0428', 'VAR_0429', 'VAR_0430', 'VAR_0431', 'VAR_0432', 'VAR_0433', 'VAR_0434', 'VAR_0435', 'VAR_0436', 'VAR_0437', 'VAR_0438', 'VAR_0439', 'VAR_0440', 'VAR_0441', 'VAR_0442', 'VAR_0443', 'VAR_0444', 'VAR_0445', 'VAR_0446', 'VAR_0447', 'VAR_0448', 'VAR_0449', 'VAR_0450', 'VAR_0451', 'VAR_0452', 'VAR_0453', 'VAR_0454', 'VAR_0455', 'VAR_0456', 'VAR_0457', 'VAR_0458', 'VAR_0459', 'VAR_0460', 'VAR_0461', 'VAR_0462', 'VAR_0463', 'VAR_0464', 'VAR_0465', 'VAR_0466', 'VAR_0467', 'VAR_0468', 'VAR_0469', 'VAR_0470', 'VAR_0471', 'VAR_0472', 'VAR_0473', 'VAR_0474', 'VAR_0475', 'VAR_0476', 'VAR_0477', 'VAR_0478', 'VAR_0479', 'VAR_0480', 'VAR_0481', 'VAR_0482', 'VAR_0483', 'VAR_0484', 'VAR_0485', 'VAR_0486', 'VAR_0487', 'VAR_0488', 'VAR_0489', 'VAR_0490', 'VAR_0491', 'VAR_0492', 'VAR_0493', 'VAR_0494', 'VAR_0495', 'VAR_0496', 'VAR_0497', 'VAR_0498', 'VAR_0499', 'VAR_0500', 'VAR_0501', 'VAR_0502', 'VAR_0503', 'VAR_0504', 'VAR_0505', 'VAR_0506', 'VAR_0507', 'VAR_0508', 'VAR_0509', 'VAR_0510', 'VAR_0511', 'VAR_0512', 'VAR_0513', 'VAR_0514', 'VAR_0515', 'VAR_0516', 'VAR_0517', 'VAR_0518', 'VAR_0519', 'VAR_0520', 'VAR_0521', 'VAR_0522', 'VAR_0523', 'VAR_0524', 'VAR_0525', 'VAR_0526', 'VAR_0527', 'VAR_0528', 'VAR_0529', 'VAR_0530', 'VAR_0531', 'VAR_0532', 'VAR_0533', 'VAR_0534', 'VAR_0535', 'VAR_0536', 'VAR_0537', 'VAR_0538', 'VAR_0539', 'VAR_0540', 'VAR_0541', 'VAR_0542', 'VAR_0543', 'VAR_0544', 'VAR_0545', 'VAR_0546', 'VAR_0547', 'VAR_0548', 'VAR_0549', 'VAR_0550', 'VAR_0551', 'VAR_0552', 'VAR_0553', 'VAR_0554', 'VAR_0555', 'VAR_0556', 'VAR_0557', 'VAR_0558', 'VAR_0559', 'VAR_0560', 'VAR_0561', 'VAR_0562', 'VAR_0563', 'VAR_0564', 'VAR_0565', 'VAR_0566', 'VAR_0567', 'VAR_0568', 'VAR_0569', 'VAR_0570', 'VAR_0571', 'VAR_0572', 'VAR_0573', 'VAR_0574', 'VAR_0575', 'VAR_0576', 'VAR_0577', 'VAR_0578', 'VAR_0579', 'VAR_0580', 'VAR_0581', 'VAR_0582', 'VAR_0583', 'VAR_0584', 'VAR_0585', 'VAR_0586', 'VAR_0587', 'VAR_0588', 'VAR_0589', 'VAR_0590', 'VAR_0591', 'VAR_0592', 'VAR_0593', 'VAR_0594', 'VAR_0595', 'VAR_0596', 'VAR_0597', 'VAR_0598', 'VAR_0599', 'VAR_0600', 'VAR_0601', 'VAR_0602', 'VAR_0603', 'VAR_0604', 'VAR_0605', 'VAR_0606', 'VAR_0607', 'VAR_0608', 'VAR_0609', 'VAR_0610', 'VAR_0611', 'VAR_0612', 'VAR_0613', 'VAR_0614', 'VAR_0615', 'VAR_0616', 'VAR_0617', 'VAR_0618', 'VAR_0619', 'VAR_0620', 'VAR_0621', 'VAR_0622', 'VAR_0623', 'VAR_0624', 'VAR_0625', 'VAR_0626', 'VAR_0627', 'VAR_0628', 'VAR_0629', 'VAR_0630', 'VAR_0631', 'VAR_0632', 'VAR_0633', 'VAR_0634', 'VAR_0635', 'VAR_0636', 'VAR_0637', 'VAR_0638', 'VAR_0639', 'VAR_0640', 'VAR_0641', 'VAR_0642', 'VAR_0643', 'VAR_0644', 'VAR_0645', 'VAR_0646', 'VAR_0647', 'VAR_0648', 'VAR_0649', 'VAR_0650', 'VAR_0651', 'VAR_0652', 'VAR_0653', 'VAR_0654', 'VAR_0655', 'VAR_0656', 'VAR_0657', 'VAR_0658', 'VAR_0659', 'VAR_0660', 'VAR_0661', 'VAR_0662', 'VAR_0663', 'VAR_0664', 'VAR_0665', 'VAR_0666', 'VAR_0667', 'VAR_0668', 'VAR_0669', 'VAR_0670', 'VAR_0671', 'VAR_0672', 'VAR_0673', 'VAR_0674', 'VAR_0675', 'VAR_0676', 'VAR_0677', 'VAR_0678', 'VAR_0679', 'VAR_0680', 'VAR_0681', 'VAR_0682', 'VAR_0683', 'VAR_0684', 'VAR_0685', 'VAR_0686', 'VAR_0687', 'VAR_0688', 'VAR_0689', 'VAR_0690', 'VAR_0691', 'VAR_0692', 'VAR_0693', 'VAR_0694', 'VAR_0695', 'VAR_0696', 'VAR_0697', 'VAR_0698', 'VAR_0699', 'VAR_0700', 'VAR_0701', 'VAR_0702', 'VAR_0703', 'VAR_0704', 'VAR_0705', 'VAR_0706', 'VAR_0707', 'VAR_0708', 'VAR_0709', 'VAR_0710', 'VAR_0711', 'VAR_0712', 'VAR_0713', 'VAR_0714', 'VAR_0715', 'VAR_0716', 'VAR_0717', 'VAR_0718', 'VAR_0719', 'VAR_0720', 'VAR_0721', 'VAR_0722', 'VAR_0723', 'VAR_0724', 'VAR_0725', 'VAR_0726', 'VAR_0727', 'VAR_0728', 'VAR_0729', 'VAR_0730', 'VAR_0731', 'VAR_0732', 'VAR_0733', 'VAR_0734', 'VAR_0735', 'VAR_0736', 'VAR_0737', 'VAR_0738', 'VAR_0739', 'VAR_0740', 'VAR_0741', 'VAR_0742', 'VAR_0743', 'VAR_0744', 'VAR_0745', 'VAR_0746', 'VAR_0747', 'VAR_0748', 'VAR_0749', 'VAR_0750', 'VAR_0751', 'VAR_0752', 'VAR_0753', 'VAR_0754', 'VAR_0755', 'VAR_0756', 'VAR_0757', 'VAR_0758', 'VAR_0759', 'VAR_0760', 'VAR_0761', 'VAR_0762', 'VAR_0763', 'VAR_0764', 'VAR_0765', 'VAR_0766', 'VAR_0767', 'VAR_0768', 'VAR_0769', 'VAR_0770', 'VAR_0771', 'VAR_0772', 'VAR_0773', 'VAR_0774', 'VAR_0775', 'VAR_0776', 'VAR_0777', 'VAR_0778', 'VAR_0779', 'VAR_0780', 'VAR_0781', 'VAR_0782', 'VAR_0783', 'VAR_0784', 'VAR_0785', 'VAR_0786', 'VAR_0787', 'VAR_0788', 'VAR_0789', 'VAR_0790', 'VAR_0791', 'VAR_0792', 'VAR_0793', 'VAR_0794', 'VAR_0795', 'VAR_0796', 'VAR_0797', 'VAR_0798', 'VAR_0799', 'VAR_0800', 'VAR_0801', 'VAR_0802', 'VAR_0803', 'VAR_0804', 'VAR_0805', 'VAR_0806', 'VAR_0807', 'VAR_0808', 'VAR_0809', 'VAR_0810', 'VAR_0811', 'VAR_0812', 'VAR_0813', 'VAR_0814', 'VAR_0815', 'VAR_0816', 'VAR_0817', 'VAR_0818', 'VAR_0819', 'VAR_0820', 'VAR_0821', 'VAR_0822', 'VAR_0823', 'VAR_0824', 'VAR_0825', 'VAR_0826', 'VAR_0827', 'VAR_0828', 'VAR_0829', 'VAR_0830', 'VAR_0831', 'VAR_0832', 'VAR_0833', 'VAR_0834', 'VAR_0835', 'VAR_0836', 'VAR_0837', 'VAR_0838', 'VAR_0839', 'VAR_0841', 'VAR_0842', 'VAR_0843', 'VAR_0844', 'VAR_0845', 'VAR_0846', 'VAR_0847', 'VAR_0848', 'VAR_0849', 'VAR_0850', 'VAR_0851', 'VAR_0852', 'VAR_0853', 'VAR_0854', 'VAR_0855', 'VAR_0856', 'VAR_0857', 'VAR_0858', 'VAR_0859', 'VAR_0860', 'VAR_0861', 'VAR_0862', 'VAR_0863', 'VAR_0864', 'VAR_0865', 'VAR_0866', 'VAR_0867', 'VAR_0868', 'VAR_0869', 'VAR_0870', 'VAR_0871', 'VAR_0872', 'VAR_0873', 'VAR_0874', 'VAR_0875', 'VAR_0876', 'VAR_0877', 'VAR_0878', 'VAR_0879', 'VAR_0880', 'VAR_0881', 'VAR_0882', 'VAR_0883', 'VAR_0884', 'VAR_0885', 'VAR_0886', 'VAR_0887', 'VAR_0888', 'VAR_0889', 'VAR_0890', 'VAR_0891', 'VAR_0892', 'VAR_0893', 'VAR_0894', 'VAR_0895', 'VAR_0896', 'VAR_0897', 'VAR_0898', 'VAR_0899', 'VAR_0900', 'VAR_0901', 'VAR_0902', 'VAR_0903', 'VAR_0904', 'VAR_0905', 'VAR_0906', 'VAR_0907', 'VAR_0908', 'VAR_0909', 'VAR_0910', 'VAR_0911', 'VAR_0912', 'VAR_0913', 'VAR_0914', 'VAR_0915', 'VAR_0916', 'VAR_0917', 'VAR_0918', 'VAR_0919', 'VAR_0920', 'VAR_0921', 'VAR_0922', 'VAR_0923', 'VAR_0924', 'VAR_0925', 'VAR_0926', 'VAR_0927', 'VAR_0928', 'VAR_0929', 'VAR_0930', 'VAR_0931', 'VAR_0932', 'VAR_0933', 'VAR_0934', 'VAR_0935', 'VAR_0936', 'VAR_0937', 'VAR_0938', 'VAR_0939', 'VAR_0940', 'VAR_0941', 'VAR_0942', 'VAR_0943', 'VAR_0944', 'VAR_0945', 'VAR_0946', 'VAR_0947', 'VAR_0948', 'VAR_0949', 'VAR_0950', 'VAR_0951', 'VAR_0952', 'VAR_0953', 'VAR_0954', 'VAR_0955', 'VAR_0956', 'VAR_0957', 'VAR_0958', 'VAR_0959', 'VAR_0960', 'VAR_0961', 'VAR_0962', 'VAR_0963', 'VAR_0964', 'VAR_0965', 'VAR_0966', 'VAR_0967', 'VAR_0968', 'VAR_0969', 'VAR_0970', 'VAR_0971', 'VAR_0972', 'VAR_0973', 'VAR_0974', 'VAR_0975', 'VAR_0976', 'VAR_0977', 'VAR_0978', 'VAR_0979', 'VAR_0980', 'VAR_0981', 'VAR_0982', 'VAR_0983', 'VAR_0984', 'VAR_0985', 'VAR_0986', 'VAR_0987', 'VAR_0988', 'VAR_0989', 'VAR_0990', 'VAR_0991', 'VAR_0992', 'VAR_0993', 'VAR_0994', 'VAR_0995', 'VAR_0996', 'VAR_0997', 'VAR_0998', 'VAR_0999', 'VAR_1000', 'VAR_1001', 'VAR_1002', 'VAR_1003', 'VAR_1004', 'VAR_1005', 'VAR_1006', 'VAR_1007', 'VAR_1008', 'VAR_1009', 'VAR_1010', 'VAR_1011', 'VAR_1012', 'VAR_1013', 'VAR_1014', 'VAR_1015', 'VAR_1016', 'VAR_1017', 'VAR_1018', 'VAR_1019', 'VAR_1020', 'VAR_1021', 'VAR_1022', 'VAR_1023', 'VAR_1024', 'VAR_1025', 'VAR_1026', 'VAR_1027', 'VAR_1028', 'VAR_1029', 'VAR_1030', 'VAR_1031', 'VAR_1032', 'VAR_1033', 'VAR_1034', 'VAR_1035', 'VAR_1036', 'VAR_1037', 'VAR_1038', 'VAR_1039', 'VAR_1040', 'VAR_1041', 'VAR_1042', 'VAR_1043', 'VAR_1044', 'VAR_1045', 'VAR_1046', 'VAR_1047', 'VAR_1048', 'VAR_1049', 'VAR_1050', 'VAR_1051', 'VAR_1052', 'VAR_1053', 'VAR_1054', 'VAR_1055', 'VAR_1056', 'VAR_1057', 'VAR_1058', 'VAR_1059', 'VAR_1060', 'VAR_1061', 'VAR_1062', 'VAR_1063', 'VAR_1064', 'VAR_1065', 'VAR_1066', 'VAR_1067', 'VAR_1068', 'VAR_1069', 'VAR_1070', 'VAR_1071', 'VAR_1072', 'VAR_1073', 'VAR_1074', 'VAR_1075', 'VAR_1076', 'VAR_1077', 'VAR_1078', 'VAR_1079', 'VAR_1080', 'VAR_1081', 'VAR_1082', 'VAR_1083', 'VAR_1084', 'VAR_1085', 'VAR_1086', 'VAR_1087', 'VAR_1088', 'VAR_1089', 'VAR_1090', 'VAR_1091', 'VAR_1092', 'VAR_1093', 'VAR_1094', 'VAR_1095', 'VAR_1096', 'VAR_1097', 'VAR_1098', 'VAR_1099', 'VAR_1100', 'VAR_1101', 'VAR_1102', 'VAR_1103', 'VAR_1104', 'VAR_1105', 'VAR_1106', 'VAR_1107', 'VAR_1108', 'VAR_1109', 'VAR_1110', 'VAR_1111', 'VAR_1112', 'VAR_1113', 'VAR_1114', 'VAR_1115', 'VAR_1116', 'VAR_1117', 'VAR_1118', 'VAR_1119', 'VAR_1120', 'VAR_1121', 'VAR_1122', 'VAR_1123', 'VAR_1124', 'VAR_1125', 'VAR_1126', 'VAR_1127', 'VAR_1128', 'VAR_1129', 'VAR_1130', 'VAR_1131', 'VAR_1132', 'VAR_1133', 'VAR_1134', 'VAR_1135', 'VAR_1136', 'VAR_1137', 'VAR_1138', 'VAR_1139', 'VAR_1140', 'VAR_1141', 'VAR_1142', 'VAR_1143', 'VAR_1144', 'VAR_1145', 'VAR_1146', 'VAR_1147', 'VAR_1148', 'VAR_1149', 'VAR_1150', 'VAR_1151', 'VAR_1152', 'VAR_1153', 'VAR_1154', 'VAR_1155', 'VAR_1156', 'VAR_1157', 'VAR_1158', 'VAR_1159', 'VAR_1160', 'VAR_1161', 'VAR_1162', 'VAR_1163', 'VAR_1164', 'VAR_1165', 'VAR_1166', 'VAR_1167', 'VAR_1168', 'VAR_1169', 'VAR_1170', 'VAR_1171', 'VAR_1172', 'VAR_1173', 'VAR_1174', 'VAR_1175', 'VAR_1176', 'VAR_1177', 'VAR_1178', 'VAR_1179', 'VAR_1180', 'VAR_1181', 'VAR_1182', 'VAR_1183', 'VAR_1184', 'VAR_1185', 'VAR_1186', 'VAR_1187', 'VAR_1188', 'VAR_1189', 'VAR_1190', 'VAR_1191', 'VAR_1192', 'VAR_1193', 'VAR_1194', 'VAR_1195', 'VAR_1196', 'VAR_1197', 'VAR_1198', 'VAR_1199', 'VAR_1200', 'VAR_1201', 'VAR_1202', 'VAR_1203', 'VAR_1204', 'VAR_1205', 'VAR_1206', 'VAR_1207', 'VAR_1208', 'VAR_1209', 'VAR_1210', 'VAR_1211', 'VAR_1212', 'VAR_1213', 'VAR_1214', 'VAR_1215', 'VAR_1216', 'VAR_1217', 'VAR_1218', 'VAR_1219', 'VAR_1220', 'VAR_1221', 'VAR_1222', 'VAR_1223', 'VAR_1224', 'VAR_1225', 'VAR_1226', 'VAR_1227', 'VAR_1228', 'VAR_1229', 'VAR_1230', 'VAR_1231', 'VAR_1232', 'VAR_1233', 'VAR_1234', 'VAR_1235', 'VAR_1236', 'VAR_1237', 'VAR_1238', 'VAR_1239', 'VAR_1240', 'VAR_1241', 'VAR_1242', 'VAR_1243', 'VAR_1244', 'VAR_1245', 'VAR_1246', 'VAR_1247', 'VAR_1248', 'VAR_1249', 'VAR_1250', 'VAR_1251', 'VAR_1252', 'VAR_1253', 'VAR_1254', 'VAR_1255', 'VAR_1256', 'VAR_1257', 'VAR_1258', 'VAR_1259', 'VAR_1260', 'VAR_1261', 'VAR_1262', 'VAR_1263', 'VAR_1264', 'VAR_1265', 'VAR_1266', 'VAR_1267', 'VAR_1268', 'VAR_1269', 'VAR_1270', 'VAR_1271', 'VAR_1272', 'VAR_1273', 'VAR_1274', 'VAR_1275', 'VAR_1276', 'VAR_1277', 'VAR_1278', 'VAR_1279', 'VAR_1280', 'VAR_1281', 'VAR_1282', 'VAR_1283', 'VAR_1284', 'VAR_1285', 'VAR_1286', 'VAR_1287', 'VAR_1288', 'VAR_1289', 'VAR_1290', 'VAR_1291', 'VAR_1292', 'VAR_1293', 'VAR_1294', 'VAR_1295', 'VAR_1296', 'VAR_1297', 'VAR_1298', 'VAR_1299', 'VAR_1300', 'VAR_1301', 'VAR_1302', 'VAR_1303', 'VAR_1304', 'VAR_1305', 'VAR_1306', 'VAR_1307', 'VAR_1308', 'VAR_1309', 'VAR_1310', 'VAR_1311', 'VAR_1312', 'VAR_1313', 'VAR_1314', 'VAR_1315', 'VAR_1316', 'VAR_1317', 'VAR_1318', 'VAR_1319', 'VAR_1320', 'VAR_1321', 'VAR_1322', 'VAR_1323', 'VAR_1324', 'VAR_1325', 'VAR_1326', 'VAR_1327', 'VAR_1328', 'VAR_1329', 'VAR_1330', 'VAR_1331', 'VAR_1332', 'VAR_1333', 'VAR_1334', 'VAR_1335', 'VAR_1336', 'VAR_1337', 'VAR_1338', 'VAR_1339', 'VAR_1340', 'VAR_1341', 'VAR_1342', 'VAR_1343', 'VAR_1344', 'VAR_1345', 'VAR_1346', 'VAR_1347', 'VAR_1348', 'VAR_1349', 'VAR_1350', 'VAR_1351', 'VAR_1352', 'VAR_1353', 'VAR_1354', 'VAR_1355', 'VAR_1356', 'VAR_1357', 'VAR_1358', 'VAR_1359', 'VAR_1360', 'VAR_1361', 'VAR_1362', 'VAR_1363', 'VAR_1364', 'VAR_1365', 'VAR_1366', 'VAR_1367', 'VAR_1368', 'VAR_1369', 'VAR_1370', 'VAR_1371', 'VAR_1372', 'VAR_1373', 'VAR_1374', 'VAR_1375', 'VAR_1376', 'VAR_1377', 'VAR_1378', 'VAR_1379', 'VAR_1380', 'VAR_1381', 'VAR_1382', 'VAR_1383', 'VAR_1384', 'VAR_1385', 'VAR_1386', 'VAR_1387', 'VAR_1388', 'VAR_1389', 'VAR_1390', 'VAR_1391', 'VAR_1392', 'VAR_1393', 'VAR_1394', 'VAR_1395', 'VAR_1396', 'VAR_1397', 'VAR_1398', 'VAR_1399', 'VAR_1400', 'VAR_1401', 'VAR_1402', 'VAR_1403', 'VAR_1404', 'VAR_1405', 'VAR_1406', 'VAR_1407', 'VAR_1408', 'VAR_1409', 'VAR_1410', 'VAR_1411', 'VAR_1412', 'VAR_1413', 'VAR_1414', 'VAR_1415', 'VAR_1416', 'VAR_1417', 'VAR_1418', 'VAR_1419', 'VAR_1420', 'VAR_1421', 'VAR_1422', 'VAR_1423', 'VAR_1424', 'VAR_1425', 'VAR_1426', 'VAR_1427', 'VAR_1428', 'VAR_1429', 'VAR_1430', 'VAR_1431', 'VAR_1432', 'VAR_1433', 'VAR_1434', 'VAR_1435', 'VAR_1436', 'VAR_1437', 'VAR_1438', 'VAR_1439', 'VAR_1440', 'VAR_1441', 'VAR_1442', 'VAR_1443', 'VAR_1444', 'VAR_1445', 'VAR_1446', 'VAR_1447', 'VAR_1448', 'VAR_1449', 'VAR_1450', 'VAR_1451', 'VAR_1452', 'VAR_1453', 'VAR_1454', 'VAR_1455', 'VAR_1456', 'VAR_1457', 'VAR_1458', 'VAR_1459', 'VAR_1460', 'VAR_1461', 'VAR_1462', 'VAR_1463', 'VAR_1464', 'VAR_1465', 'VAR_1466', 'VAR_1467', 'VAR_1468', 'VAR_1469', 'VAR_1470', 'VAR_1471', 'VAR_1472', 'VAR_1473', 'VAR_1474', 'VAR_1475', 'VAR_1476', 'VAR_1477', 'VAR_1478', 'VAR_1479', 'VAR_1480', 'VAR_1481', 'VAR_1482', 'VAR_1483', 'VAR_1484', 'VAR_1485', 'VAR_1486', 'VAR_1487', 'VAR_1488', 'VAR_1489', 'VAR_1490', 'VAR_1491', 'VAR_1492', 'VAR_1493', 'VAR_1494', 'VAR_1495', 'VAR_1496', 'VAR_1497', 'VAR_1498', 'VAR_1499', 'VAR_1500', 'VAR_1501', 'VAR_1502', 'VAR_1503', 'VAR_1504', 'VAR_1505', 'VAR_1506', 'VAR_1507', 'VAR_1508', 'VAR_1509', 'VAR_1510', 'VAR_1511', 'VAR_1512', 'VAR_1513', 'VAR_1514', 'VAR_1515', 'VAR_1516', 'VAR_1517', 'VAR_1518', 'VAR_1519', 'VAR_1520', 'VAR_1521', 'VAR_1522', 'VAR_1523', 'VAR_1524', 'VAR_1525', 'VAR_1526', 'VAR_1527', 'VAR_1528', 'VAR_1529', 'VAR_1530', 'VAR_1531', 'VAR_1532', 'VAR_1533', 'VAR_1534', 'VAR_1535', 'VAR_1536', 'VAR_1537', 'VAR_1538', 'VAR_1539', 'VAR_1540', 'VAR_1541', 'VAR_1542', 'VAR_1543', 'VAR_1544', 'VAR_1545', 'VAR_1546', 'VAR_1547', 'VAR_1548', 'VAR_1549', 'VAR_1550', 'VAR_1551', 'VAR_1552', 'VAR_1553', 'VAR_1554', 'VAR_1555', 'VAR_1556', 'VAR_1557', 'VAR_1558', 'VAR_1559', 'VAR_1560', 'VAR_1561', 'VAR_1562', 'VAR_1563', 'VAR_1564', 'VAR_1565', 'VAR_1566', 'VAR_1567', 'VAR_1568', 'VAR_1569', 'VAR_1570', 'VAR_1571', 'VAR_1572', 'VAR_1573', 'VAR_1574', 'VAR_1575', 'VAR_1576', 'VAR_1577', 'VAR_1578', 'VAR_1579', 'VAR_1580', 'VAR_1581', 'VAR_1582', 'VAR_1583', 'VAR_1584', 'VAR_1585', 'VAR_1586', 'VAR_1587', 'VAR_1588', 'VAR_1589', 'VAR_1590', 'VAR_1591', 'VAR_1592', 'VAR_1593', 'VAR_1594', 'VAR_1595', 'VAR_1596', 'VAR_1597', 'VAR_1598', 'VAR_1599', 'VAR_1600', 'VAR_1601', 'VAR_1602', 'VAR_1603', 'VAR_1604', 'VAR_1605', 'VAR_1606', 'VAR_1607', 'VAR_1608', 'VAR_1609', 'VAR_1610', 'VAR_1611', 'VAR_1612', 'VAR_1613', 'VAR_1614', 'VAR_1615', 'VAR_1616', 'VAR_1617', 'VAR_1618', 'VAR_1619', 'VAR_1620', 'VAR_1621', 'VAR_1622', 'VAR_1623', 'VAR_1624', 'VAR_1625', 'VAR_1626', 'VAR_1627', 'VAR_1628', 'VAR_1629', 'VAR_1630', 'VAR_1631', 'VAR_1632', 'VAR_1633', 'VAR_1634', 'VAR_1635', 'VAR_1636', 'VAR_1637', 'VAR_1638', 'VAR_1639', 'VAR_1640', 'VAR_1641', 'VAR_1642', 'VAR_1643', 'VAR_1644', 'VAR_1645', 'VAR_1646', 'VAR_1647', 'VAR_1648', 'VAR_1649', 'VAR_1650', 'VAR_1651', 'VAR_1652', 'VAR_1653', 'VAR_1654', 'VAR_1655', 'VAR_1656', 'VAR_1657', 'VAR_1658', 'VAR_1659', 'VAR_1660', 'VAR_1661', 'VAR_1662', 'VAR_1663', 'VAR_1664', 'VAR_1665', 'VAR_1666', 'VAR_1667', 'VAR_1668', 'VAR_1669', 'VAR_1670', 'VAR_1671', 'VAR_1672', 'VAR_1673', 'VAR_1674', 'VAR_1675', 'VAR_1676', 'VAR_1677', 'VAR_1678', 'VAR_1679', 'VAR_1680', 'VAR_1681', 'VAR_1682', 'VAR_1683', 'VAR_1684', 'VAR_1685', 'VAR_1686', 'VAR_1687', 'VAR_1688', 'VAR_1689', 'VAR_1690', 'VAR_1691', 'VAR_1692', 'VAR_1693', 'VAR_1694', 'VAR_1695', 'VAR_1696', 'VAR_1697', 'VAR_1698', 'VAR_1699', 'VAR_1700', 'VAR_1701', 'VAR_1702', 'VAR_1703', 'VAR_1704', 'VAR_1705', 'VAR_1706', 'VAR_1707', 'VAR_1708', 'VAR_1709', 'VAR_1710', 'VAR_1711', 'VAR_1712', 'VAR_1713', 'VAR_1714', 'VAR_1715', 'VAR_1716', 'VAR_1717', 'VAR_1718', 'VAR_1719', 'VAR_1720', 'VAR_1721', 'VAR_1722', 'VAR_1723', 'VAR_1724', 'VAR_1725', 'VAR_1726', 'VAR_1727', 'VAR_1728', 'VAR_1729', 'VAR_1730', 'VAR_1731', 'VAR_1732', 'VAR_1733', 'VAR_1734', 'VAR_1735', 'VAR_1736', 'VAR_1737', 'VAR_1738', 'VAR_1739', 'VAR_1740', 'VAR_1741', 'VAR_1742', 'VAR_1743', 'VAR_1744', 'VAR_1745', 'VAR_1746', 'VAR_1747', 'VAR_1748', 'VAR_1749', 'VAR_1750', 'VAR_1751', 'VAR_1752', 'VAR_1753', 'VAR_1754', 'VAR_1755', 'VAR_1756', 'VAR_1757', 'VAR_1758', 'VAR_1759', 'VAR_1760', 'VAR_1761', 'VAR_1762', 'VAR_1763', 'VAR_1764', 'VAR_1765', 'VAR_1766', 'VAR_1767', 'VAR_1768', 'VAR_1769', 'VAR_1770', 'VAR_1771', 'VAR_1772', 'VAR_1773', 'VAR_1774', 'VAR_1775', 'VAR_1776', 'VAR_1777', 'VAR_1778', 'VAR_1779', 'VAR_1780', 'VAR_1781', 'VAR_1782', 'VAR_1783', 'VAR_1784', 'VAR_1785', 'VAR_1786', 'VAR_1787', 'VAR_1788', 'VAR_1789', 'VAR_1790', 'VAR_1791', 'VAR_1792', 'VAR_1793', 'VAR_1794', 'VAR_1795', 'VAR_1796', 'VAR_1797', 'VAR_1798', 'VAR_1799', 'VAR_1800', 'VAR_1801', 'VAR_1802', 'VAR_1803', 'VAR_1804', 'VAR_1805', 'VAR_1806', 'VAR_1807', 'VAR_1808', 'VAR_1809', 'VAR_1810', 'VAR_1811', 'VAR_1812', 'VAR_1813', 'VAR_1814', 'VAR_1815', 'VAR_1816', 'VAR_1817', 'VAR_1818', 'VAR_1819', 'VAR_1820', 'VAR_1821', 'VAR_1822', 'VAR_1823', 'VAR_1824', 'VAR_1825', 'VAR_1826', 'VAR_1827', 'VAR_1828', 'VAR_1829', 'VAR_1830', 'VAR_1831', 'VAR_1832', 'VAR_1833', 'VAR_1834', 'VAR_1835', 'VAR_1836', 'VAR_1837', 'VAR_1838', 'VAR_1839', 'VAR_1840', 'VAR_1841', 'VAR_1842', 'VAR_1843', 'VAR_1844', 'VAR_1845', 'VAR_1846', 'VAR_1847', 'VAR_1848', 'VAR_1849', 'VAR_1850', 'VAR_1851', 'VAR_1852', 'VAR_1853', 'VAR_1854', 'VAR_1855', 'VAR_1856', 'VAR_1857', 'VAR_1858', 'VAR_1859', 'VAR_1860', 'VAR_1861', 'VAR_1862', 'VAR_1863', 'VAR_1864', 'VAR_1865', 'VAR_1866', 'VAR_1867', 'VAR_1868', 'VAR_1869', 'VAR_1870', 'VAR_1871', 'VAR_1872', 'VAR_1873', 'VAR_1874', 'VAR_1875', 'VAR_1876', 'VAR_1877', 'VAR_1878', 'VAR_1879', 'VAR_1880', 'VAR_1881', 'VAR_1882', 'VAR_1883', 'VAR_1884', 'VAR_1885', 'VAR_1886', 'VAR_1887', 'VAR_1888', 'VAR_1889', 'VAR_1890', 'VAR_1891', 'VAR_1892', 'VAR_1893', 'VAR_1894', 'VAR_1895', 'VAR_1896', 'VAR_1897', 'VAR_1898', 'VAR_1899', 'VAR_1900', 'VAR_1901', 'VAR_1902', 'VAR_1903', 'VAR_1904', 'VAR_1905', 'VAR_1906', 'VAR_1907', 'VAR_1908', 'VAR_1909', 'VAR_1910', 'VAR_1911', 'VAR_1912', 'VAR_1913', 'VAR_1914', 'VAR_1915', 'VAR_1916', 'VAR_1917', 'VAR_1918', 'VAR_1919', 'VAR_1920', 'VAR_1921', 'VAR_1922', 'VAR_1923', 'VAR_1924', 'VAR_1925', 'VAR_1926', 'VAR_1927', 'VAR_1928', 'VAR_1929', 'VAR_1930', 'VAR_1931', 'VAR_1932', 'VAR_1933', 'VAR_1934', 'COUNT_VAR_0001', 'COUNT_VAR_0005', 'COUNT_VAR_0008', 'COUNT_VAR_0009', 'COUNT_VAR_0010', 'COUNT_VAR_0011', 'COUNT_VAR_0012', 'COUNT_VAR_0043', 'COUNT_VAR_0044', 'COUNT_VAR_0073', 'COUNT_VAR_0075', 'COUNT_VAR_0156', 'COUNT_VAR_0157', 'COUNT_VAR_0158', 'COUNT_VAR_0159', 'COUNT_VAR_0166', 'COUNT_VAR_0167', 'COUNT_VAR_0168', 'COUNT_VAR_0169', 'COUNT_VAR_0176', 'COUNT_VAR_0177', 'COUNT_VAR_0178', 'COUNT_VAR_0179', 'COUNT_VAR_0196', 'COUNT_VAR_0200', 'COUNT_VAR_0202', 'COUNT_VAR_0204', 'COUNT_VAR_0214', 'COUNT_VAR_0216', 'COUNT_VAR_0217', 'COUNT_VAR_0222', 'COUNT_VAR_0226', 'COUNT_VAR_0229', 'COUNT_VAR_0230', 'COUNT_VAR_0232', 'COUNT_VAR_0236', 'COUNT_VAR_0237', 'COUNT_VAR_0239', 'COUNT_VAR_0274', 'COUNT_VAR_0283', 'COUNT_VAR_0305', 'COUNT_VAR_0325', 'COUNT_VAR_0342', 'COUNT_VAR_0352', 'COUNT_VAR_0353', 'COUNT_VAR_0354', 'COUNT_VAR_0404', 'COUNT_VAR_0466', 'COUNT_VAR_0467', 'COUNT_VAR_0493', 'COUNT_VAR_1934']\n",
      "   INFO ->  start training model\n",
      "   INFO ->  (145231, 1980)\n",
      "   INFO ->  [+]tuning params\n",
      "\u001b[32m[I 2021-09-10 19:33:13,884]\u001b[0m A new study created in memory with name: LgbClassifier\u001b[0m\n",
      "\u001b[32m[I 2021-09-10 19:42:21,851]\u001b[0m Trial 0 finished with value: 0.7867785929383808 and parameters: {'num_leaves': 511, 'num_boost_round': 938, 'max_depth': 5}. Best is trial 0 with value: 0.7867785929383808.\u001b[0m\n",
      "\u001b[32m[I 2021-09-10 19:53:12,955]\u001b[0m Trial 1 finished with value: 0.7944042951616013 and parameters: {'num_leaves': 134, 'num_boost_round': 7159, 'max_depth': 5}. Best is trial 1 with value: 0.7944042951616013.\u001b[0m\n",
      "   INFO ->  Number of finished trials: 2\n",
      "   INFO ->  Best trial:\n",
      "   INFO ->  \tValue: 0.7944042951616013\n",
      "   INFO ->  \tParams: \n",
      "   INFO ->  \t\tnum_leaves: 134\n",
      "   INFO ->  \t\tnum_boost_round: 7159\n",
      "   INFO ->  \t\tmax_depth: 5\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Training on fold 1\n",
      "Training until validation scores don't improve for 150 rounds\n",
      "[100]\ttraining's auc: 0.759986\tvalid_1's auc: 0.749593\n",
      "[200]\ttraining's auc: 0.775247\tvalid_1's auc: 0.759798\n",
      "[300]\ttraining's auc: 0.785524\tvalid_1's auc: 0.766028\n",
      "[400]\ttraining's auc: 0.793402\tvalid_1's auc: 0.770163\n",
      "[500]\ttraining's auc: 0.799751\tvalid_1's auc: 0.772608\n",
      "[600]\ttraining's auc: 0.805304\tvalid_1's auc: 0.774614\n",
      "[700]\ttraining's auc: 0.810117\tvalid_1's auc: 0.776145\n",
      "[800]\ttraining's auc: 0.814528\tvalid_1's auc: 0.777348\n",
      "[900]\ttraining's auc: 0.818599\tvalid_1's auc: 0.778348\n",
      "[1000]\ttraining's auc: 0.822354\tvalid_1's auc: 0.779186\n",
      "[1100]\ttraining's auc: 0.825763\tvalid_1's auc: 0.779883\n",
      "[1200]\ttraining's auc: 0.828909\tvalid_1's auc: 0.780302\n",
      "[1300]\ttraining's auc: 0.831959\tvalid_1's auc: 0.780811\n",
      "[1400]\ttraining's auc: 0.834864\tvalid_1's auc: 0.781158\n",
      "[1500]\ttraining's auc: 0.83762\tvalid_1's auc: 0.781588\n",
      "[1600]\ttraining's auc: 0.840275\tvalid_1's auc: 0.781888\n",
      "[1700]\ttraining's auc: 0.842843\tvalid_1's auc: 0.782204\n",
      "[1800]\ttraining's auc: 0.845472\tvalid_1's auc: 0.782371\n",
      "[1900]\ttraining's auc: 0.84801\tvalid_1's auc: 0.782618\n",
      "[2000]\ttraining's auc: 0.85032\tvalid_1's auc: 0.782875\n",
      "[2100]\ttraining's auc: 0.852668\tvalid_1's auc: 0.783157\n",
      "[2200]\ttraining's auc: 0.854926\tvalid_1's auc: 0.7834\n",
      "[2300]\ttraining's auc: 0.857089\tvalid_1's auc: 0.783557\n",
      "[2400]\ttraining's auc: 0.859179\tvalid_1's auc: 0.783778\n",
      "[2500]\ttraining's auc: 0.861347\tvalid_1's auc: 0.783939\n",
      "[2600]\ttraining's auc: 0.863341\tvalid_1's auc: 0.784033\n",
      "[2700]\ttraining's auc: 0.86532\tvalid_1's auc: 0.784186\n",
      "[2800]\ttraining's auc: 0.867371\tvalid_1's auc: 0.784321\n",
      "[2900]\ttraining's auc: 0.869438\tvalid_1's auc: 0.784386\n",
      "[3000]\ttraining's auc: 0.871358\tvalid_1's auc: 0.78447\n",
      "[3100]\ttraining's auc: 0.873252\tvalid_1's auc: 0.784586\n",
      "[3200]\ttraining's auc: 0.875157\tvalid_1's auc: 0.784705\n",
      "[3300]\ttraining's auc: 0.877014\tvalid_1's auc: 0.784772\n",
      "[3400]\ttraining's auc: 0.87891\tvalid_1's auc: 0.784887\n",
      "[3500]\ttraining's auc: 0.880785\tvalid_1's auc: 0.784964\n",
      "[3600]\ttraining's auc: 0.882524\tvalid_1's auc: 0.785075\n",
      "[3700]\ttraining's auc: 0.884278\tvalid_1's auc: 0.785155\n",
      "[3800]\ttraining's auc: 0.886088\tvalid_1's auc: 0.785245\n",
      "[3900]\ttraining's auc: 0.887731\tvalid_1's auc: 0.785282\n",
      "[4000]\ttraining's auc: 0.889556\tvalid_1's auc: 0.785307\n",
      "[4100]\ttraining's auc: 0.891227\tvalid_1's auc: 0.785397\n",
      "[4200]\ttraining's auc: 0.89288\tvalid_1's auc: 0.785486\n",
      "[4300]\ttraining's auc: 0.894429\tvalid_1's auc: 0.785542\n",
      "[4400]\ttraining's auc: 0.896074\tvalid_1's auc: 0.785621\n",
      "[4500]\ttraining's auc: 0.897641\tvalid_1's auc: 0.785679\n",
      "[4600]\ttraining's auc: 0.899222\tvalid_1's auc: 0.785784\n",
      "[4700]\ttraining's auc: 0.900895\tvalid_1's auc: 0.785812\n",
      "[4800]\ttraining's auc: 0.902376\tvalid_1's auc: 0.785845\n",
      "[4900]\ttraining's auc: 0.903846\tvalid_1's auc: 0.785931\n",
      "[5000]\ttraining's auc: 0.905235\tvalid_1's auc: 0.785988\n",
      "[5100]\ttraining's auc: 0.906713\tvalid_1's auc: 0.785974\n",
      "[5200]\ttraining's auc: 0.908113\tvalid_1's auc: 0.78605\n",
      "[5300]\ttraining's auc: 0.909556\tvalid_1's auc: 0.786078\n",
      "[5400]\ttraining's auc: 0.911033\tvalid_1's auc: 0.786134\n",
      "[5500]\ttraining's auc: 0.91242\tvalid_1's auc: 0.786127\n",
      "[5600]\ttraining's auc: 0.913822\tvalid_1's auc: 0.786146\n",
      "[5700]\ttraining's auc: 0.915195\tvalid_1's auc: 0.786093\n",
      "Early stopping, best iteration is:\n",
      "[5589]\ttraining's auc: 0.91367\tvalid_1's auc: 0.786158\n",
      "AUC: 0.7861581050211489\n",
      "Fold 1 finished in 0:04:28.643291\n",
      "Training on fold 2\n",
      "Training until validation scores don't improve for 150 rounds\n",
      "[100]\ttraining's auc: 0.760203\tvalid_1's auc: 0.745483\n",
      "[200]\ttraining's auc: 0.774482\tvalid_1's auc: 0.757095\n",
      "[300]\ttraining's auc: 0.784902\tvalid_1's auc: 0.764177\n",
      "[400]\ttraining's auc: 0.792806\tvalid_1's auc: 0.768583\n",
      "[500]\ttraining's auc: 0.79909\tvalid_1's auc: 0.771583\n",
      "[600]\ttraining's auc: 0.804412\tvalid_1's auc: 0.77371\n",
      "[700]\ttraining's auc: 0.809161\tvalid_1's auc: 0.775421\n",
      "[800]\ttraining's auc: 0.813524\tvalid_1's auc: 0.776908\n",
      "[900]\ttraining's auc: 0.817617\tvalid_1's auc: 0.778068\n",
      "[1000]\ttraining's auc: 0.821445\tvalid_1's auc: 0.779125\n",
      "[1100]\ttraining's auc: 0.824902\tvalid_1's auc: 0.779876\n",
      "[1200]\ttraining's auc: 0.828319\tvalid_1's auc: 0.780599\n",
      "[1300]\ttraining's auc: 0.831433\tvalid_1's auc: 0.781127\n",
      "[1400]\ttraining's auc: 0.83443\tvalid_1's auc: 0.781748\n",
      "[1500]\ttraining's auc: 0.83727\tvalid_1's auc: 0.782184\n",
      "[1600]\ttraining's auc: 0.839935\tvalid_1's auc: 0.782499\n",
      "[1700]\ttraining's auc: 0.842719\tvalid_1's auc: 0.782859\n",
      "[1800]\ttraining's auc: 0.845284\tvalid_1's auc: 0.783148\n",
      "[1900]\ttraining's auc: 0.847734\tvalid_1's auc: 0.783463\n",
      "[2000]\ttraining's auc: 0.850072\tvalid_1's auc: 0.783652\n",
      "[2100]\ttraining's auc: 0.852489\tvalid_1's auc: 0.78388\n",
      "[2200]\ttraining's auc: 0.854848\tvalid_1's auc: 0.784101\n",
      "[2300]\ttraining's auc: 0.857132\tvalid_1's auc: 0.784321\n",
      "[2400]\ttraining's auc: 0.859424\tvalid_1's auc: 0.784533\n",
      "[2500]\ttraining's auc: 0.861434\tvalid_1's auc: 0.784676\n",
      "[2600]\ttraining's auc: 0.863617\tvalid_1's auc: 0.784866\n",
      "[2700]\ttraining's auc: 0.865634\tvalid_1's auc: 0.785047\n",
      "[2800]\ttraining's auc: 0.867801\tvalid_1's auc: 0.785301\n",
      "[2900]\ttraining's auc: 0.86977\tvalid_1's auc: 0.785502\n",
      "[3000]\ttraining's auc: 0.871624\tvalid_1's auc: 0.78561\n",
      "[3100]\ttraining's auc: 0.873643\tvalid_1's auc: 0.785687\n",
      "[3200]\ttraining's auc: 0.875482\tvalid_1's auc: 0.785789\n",
      "[3300]\ttraining's auc: 0.877406\tvalid_1's auc: 0.785928\n",
      "[3400]\ttraining's auc: 0.879086\tvalid_1's auc: 0.786014\n",
      "[3500]\ttraining's auc: 0.880881\tvalid_1's auc: 0.786113\n",
      "[3600]\ttraining's auc: 0.882794\tvalid_1's auc: 0.786304\n",
      "[3700]\ttraining's auc: 0.884501\tvalid_1's auc: 0.786409\n",
      "[3800]\ttraining's auc: 0.886208\tvalid_1's auc: 0.786506\n",
      "[3900]\ttraining's auc: 0.887761\tvalid_1's auc: 0.786589\n",
      "[4000]\ttraining's auc: 0.889376\tvalid_1's auc: 0.786667\n",
      "[4100]\ttraining's auc: 0.891007\tvalid_1's auc: 0.786785\n",
      "[4200]\ttraining's auc: 0.892761\tvalid_1's auc: 0.786917\n",
      "[4300]\ttraining's auc: 0.894418\tvalid_1's auc: 0.786955\n",
      "[4400]\ttraining's auc: 0.895987\tvalid_1's auc: 0.787037\n",
      "[4500]\ttraining's auc: 0.897466\tvalid_1's auc: 0.787114\n",
      "[4600]\ttraining's auc: 0.89892\tvalid_1's auc: 0.787105\n",
      "[4700]\ttraining's auc: 0.900479\tvalid_1's auc: 0.787172\n",
      "[4800]\ttraining's auc: 0.901936\tvalid_1's auc: 0.787226\n",
      "[4900]\ttraining's auc: 0.903497\tvalid_1's auc: 0.787271\n",
      "[5000]\ttraining's auc: 0.905\tvalid_1's auc: 0.787387\n",
      "[5100]\ttraining's auc: 0.906559\tvalid_1's auc: 0.787445\n",
      "[5200]\ttraining's auc: 0.90789\tvalid_1's auc: 0.787524\n",
      "[5300]\ttraining's auc: 0.909243\tvalid_1's auc: 0.787615\n",
      "[5400]\ttraining's auc: 0.910595\tvalid_1's auc: 0.787651\n",
      "[5500]\ttraining's auc: 0.911863\tvalid_1's auc: 0.787675\n",
      "[5600]\ttraining's auc: 0.913147\tvalid_1's auc: 0.78774\n",
      "[5700]\ttraining's auc: 0.914483\tvalid_1's auc: 0.787783\n",
      "[5800]\ttraining's auc: 0.915721\tvalid_1's auc: 0.787822\n",
      "[5900]\ttraining's auc: 0.917057\tvalid_1's auc: 0.787834\n",
      "[6000]\ttraining's auc: 0.918357\tvalid_1's auc: 0.787908\n",
      "[6100]\ttraining's auc: 0.919613\tvalid_1's auc: 0.787991\n",
      "[6200]\ttraining's auc: 0.92087\tvalid_1's auc: 0.788096\n",
      "[6300]\ttraining's auc: 0.922004\tvalid_1's auc: 0.788132\n",
      "[6400]\ttraining's auc: 0.923226\tvalid_1's auc: 0.788166\n",
      "[6500]\ttraining's auc: 0.924347\tvalid_1's auc: 0.788185\n",
      "[6600]\ttraining's auc: 0.92551\tvalid_1's auc: 0.788241\n",
      "[6700]\ttraining's auc: 0.926664\tvalid_1's auc: 0.788291\n",
      "[6800]\ttraining's auc: 0.927935\tvalid_1's auc: 0.788342\n",
      "[6900]\ttraining's auc: 0.929078\tvalid_1's auc: 0.788357\n",
      "[7000]\ttraining's auc: 0.930194\tvalid_1's auc: 0.788394\n",
      "[7100]\ttraining's auc: 0.931351\tvalid_1's auc: 0.788508\n",
      "Did not meet early stopping. Best iteration is:\n",
      "[7159]\ttraining's auc: 0.932056\tvalid_1's auc: 0.788531\n",
      "AUC: 0.7885306006028793\n",
      "Fold 2 finished in 0:05:26.456440\n",
      "Training on fold 3\n",
      "Training until validation scores don't improve for 150 rounds\n",
      "[100]\ttraining's auc: 0.760125\tvalid_1's auc: 0.747351\n",
      "[200]\ttraining's auc: 0.774848\tvalid_1's auc: 0.758629\n",
      "[300]\ttraining's auc: 0.785316\tvalid_1's auc: 0.765341\n",
      "[400]\ttraining's auc: 0.793148\tvalid_1's auc: 0.769514\n",
      "[500]\ttraining's auc: 0.799603\tvalid_1's auc: 0.77223\n",
      "[600]\ttraining's auc: 0.804973\tvalid_1's auc: 0.774281\n",
      "[700]\ttraining's auc: 0.809857\tvalid_1's auc: 0.775642\n",
      "[800]\ttraining's auc: 0.814226\tvalid_1's auc: 0.776711\n",
      "[900]\ttraining's auc: 0.818442\tvalid_1's auc: 0.777627\n",
      "[1000]\ttraining's auc: 0.822153\tvalid_1's auc: 0.778319\n",
      "[1100]\ttraining's auc: 0.825607\tvalid_1's auc: 0.778941\n",
      "[1200]\ttraining's auc: 0.829026\tvalid_1's auc: 0.779489\n",
      "[1300]\ttraining's auc: 0.832062\tvalid_1's auc: 0.779906\n",
      "[1400]\ttraining's auc: 0.83507\tvalid_1's auc: 0.780306\n",
      "[1500]\ttraining's auc: 0.837942\tvalid_1's auc: 0.780618\n",
      "[1600]\ttraining's auc: 0.840708\tvalid_1's auc: 0.780989\n",
      "[1700]\ttraining's auc: 0.843242\tvalid_1's auc: 0.781267\n",
      "[1800]\ttraining's auc: 0.845634\tvalid_1's auc: 0.78153\n",
      "[1900]\ttraining's auc: 0.847967\tvalid_1's auc: 0.781759\n",
      "[2000]\ttraining's auc: 0.850346\tvalid_1's auc: 0.781994\n",
      "[2100]\ttraining's auc: 0.852526\tvalid_1's auc: 0.782181\n",
      "[2200]\ttraining's auc: 0.854722\tvalid_1's auc: 0.782371\n",
      "[2300]\ttraining's auc: 0.856801\tvalid_1's auc: 0.782505\n",
      "[2400]\ttraining's auc: 0.85896\tvalid_1's auc: 0.782663\n",
      "[2500]\ttraining's auc: 0.861058\tvalid_1's auc: 0.782806\n",
      "[2600]\ttraining's auc: 0.86315\tvalid_1's auc: 0.782927\n",
      "[2700]\ttraining's auc: 0.865163\tvalid_1's auc: 0.783043\n",
      "[2800]\ttraining's auc: 0.867159\tvalid_1's auc: 0.783182\n",
      "[2900]\ttraining's auc: 0.869138\tvalid_1's auc: 0.783313\n",
      "[3000]\ttraining's auc: 0.870983\tvalid_1's auc: 0.783455\n",
      "[3100]\ttraining's auc: 0.872785\tvalid_1's auc: 0.783595\n",
      "[3200]\ttraining's auc: 0.874551\tvalid_1's auc: 0.783699\n",
      "[3300]\ttraining's auc: 0.876162\tvalid_1's auc: 0.783856\n",
      "[3400]\ttraining's auc: 0.877953\tvalid_1's auc: 0.783927\n",
      "[3500]\ttraining's auc: 0.879589\tvalid_1's auc: 0.784009\n",
      "[3600]\ttraining's auc: 0.881363\tvalid_1's auc: 0.7841\n",
      "[3700]\ttraining's auc: 0.882965\tvalid_1's auc: 0.784224\n",
      "[3800]\ttraining's auc: 0.884759\tvalid_1's auc: 0.784293\n",
      "[3900]\ttraining's auc: 0.886405\tvalid_1's auc: 0.784342\n",
      "[4000]\ttraining's auc: 0.888027\tvalid_1's auc: 0.784466\n",
      "[4100]\ttraining's auc: 0.889706\tvalid_1's auc: 0.784588\n",
      "[4200]\ttraining's auc: 0.891338\tvalid_1's auc: 0.784671\n",
      "[4300]\ttraining's auc: 0.892931\tvalid_1's auc: 0.784793\n",
      "[4400]\ttraining's auc: 0.894669\tvalid_1's auc: 0.784801\n",
      "[4500]\ttraining's auc: 0.896199\tvalid_1's auc: 0.784869\n",
      "[4600]\ttraining's auc: 0.897726\tvalid_1's auc: 0.784914\n",
      "[4700]\ttraining's auc: 0.899184\tvalid_1's auc: 0.784991\n",
      "[4800]\ttraining's auc: 0.900712\tvalid_1's auc: 0.78505\n",
      "[4900]\ttraining's auc: 0.902276\tvalid_1's auc: 0.785142\n",
      "[5000]\ttraining's auc: 0.903841\tvalid_1's auc: 0.785247\n",
      "[5100]\ttraining's auc: 0.905289\tvalid_1's auc: 0.785349\n",
      "[5200]\ttraining's auc: 0.906626\tvalid_1's auc: 0.785398\n",
      "[5300]\ttraining's auc: 0.908145\tvalid_1's auc: 0.785454\n",
      "[5400]\ttraining's auc: 0.909475\tvalid_1's auc: 0.785505\n",
      "[5500]\ttraining's auc: 0.910823\tvalid_1's auc: 0.785567\n",
      "[5600]\ttraining's auc: 0.912174\tvalid_1's auc: 0.785601\n",
      "[5700]\ttraining's auc: 0.913487\tvalid_1's auc: 0.785664\n",
      "[5800]\ttraining's auc: 0.914839\tvalid_1's auc: 0.785699\n",
      "[5900]\ttraining's auc: 0.916303\tvalid_1's auc: 0.785734\n",
      "[6000]\ttraining's auc: 0.917651\tvalid_1's auc: 0.785731\n",
      "[6100]\ttraining's auc: 0.918842\tvalid_1's auc: 0.785732\n",
      "[6200]\ttraining's auc: 0.920074\tvalid_1's auc: 0.785733\n",
      "Early stopping, best iteration is:\n",
      "[6065]\ttraining's auc: 0.918445\tvalid_1's auc: 0.785776\n",
      "AUC: 0.7857764379446359\n",
      "Fold 3 finished in 0:05:09.575831\n",
      "Training on fold 4\n",
      "Training until validation scores don't improve for 150 rounds\n",
      "[100]\ttraining's auc: 0.758964\tvalid_1's auc: 0.752958\n",
      "[200]\ttraining's auc: 0.773583\tvalid_1's auc: 0.764403\n",
      "[300]\ttraining's auc: 0.78375\tvalid_1's auc: 0.770976\n",
      "[400]\ttraining's auc: 0.791597\tvalid_1's auc: 0.775188\n",
      "[500]\ttraining's auc: 0.798077\tvalid_1's auc: 0.778082\n",
      "[600]\ttraining's auc: 0.803647\tvalid_1's auc: 0.780156\n",
      "[700]\ttraining's auc: 0.808605\tvalid_1's auc: 0.781762\n",
      "[800]\ttraining's auc: 0.812973\tvalid_1's auc: 0.782973\n",
      "[900]\ttraining's auc: 0.816916\tvalid_1's auc: 0.784049\n",
      "[1000]\ttraining's auc: 0.820494\tvalid_1's auc: 0.78483\n",
      "[1100]\ttraining's auc: 0.823983\tvalid_1's auc: 0.785438\n",
      "[1200]\ttraining's auc: 0.827131\tvalid_1's auc: 0.785936\n",
      "[1300]\ttraining's auc: 0.830106\tvalid_1's auc: 0.786308\n",
      "[1400]\ttraining's auc: 0.8331\tvalid_1's auc: 0.78672\n",
      "[1500]\ttraining's auc: 0.835825\tvalid_1's auc: 0.787053\n",
      "[1600]\ttraining's auc: 0.838672\tvalid_1's auc: 0.787384\n",
      "[1700]\ttraining's auc: 0.841317\tvalid_1's auc: 0.787749\n",
      "[1800]\ttraining's auc: 0.843828\tvalid_1's auc: 0.788069\n",
      "[1900]\ttraining's auc: 0.846227\tvalid_1's auc: 0.788364\n",
      "[2000]\ttraining's auc: 0.848552\tvalid_1's auc: 0.788543\n",
      "[2100]\ttraining's auc: 0.850826\tvalid_1's auc: 0.788742\n",
      "[2200]\ttraining's auc: 0.853188\tvalid_1's auc: 0.788915\n",
      "[2300]\ttraining's auc: 0.855415\tvalid_1's auc: 0.789072\n",
      "[2400]\ttraining's auc: 0.857507\tvalid_1's auc: 0.789218\n",
      "[2500]\ttraining's auc: 0.859647\tvalid_1's auc: 0.789337\n",
      "[2600]\ttraining's auc: 0.861818\tvalid_1's auc: 0.789575\n",
      "[2700]\ttraining's auc: 0.863789\tvalid_1's auc: 0.789703\n",
      "[2800]\ttraining's auc: 0.86586\tvalid_1's auc: 0.789752\n",
      "[2900]\ttraining's auc: 0.867987\tvalid_1's auc: 0.789903\n",
      "[3000]\ttraining's auc: 0.87\tvalid_1's auc: 0.790007\n",
      "[3100]\ttraining's auc: 0.871903\tvalid_1's auc: 0.790127\n",
      "[3200]\ttraining's auc: 0.873894\tvalid_1's auc: 0.790214\n",
      "[3300]\ttraining's auc: 0.875918\tvalid_1's auc: 0.790298\n",
      "[3400]\ttraining's auc: 0.877752\tvalid_1's auc: 0.790418\n",
      "[3500]\ttraining's auc: 0.879657\tvalid_1's auc: 0.790536\n",
      "[3600]\ttraining's auc: 0.881495\tvalid_1's auc: 0.790592\n",
      "[3700]\ttraining's auc: 0.883188\tvalid_1's auc: 0.790662\n",
      "[3800]\ttraining's auc: 0.885056\tvalid_1's auc: 0.790692\n",
      "[3900]\ttraining's auc: 0.886768\tvalid_1's auc: 0.790748\n",
      "[4000]\ttraining's auc: 0.888653\tvalid_1's auc: 0.790815\n",
      "[4100]\ttraining's auc: 0.890423\tvalid_1's auc: 0.79087\n",
      "[4200]\ttraining's auc: 0.892204\tvalid_1's auc: 0.790903\n",
      "[4300]\ttraining's auc: 0.89389\tvalid_1's auc: 0.791002\n",
      "[4400]\ttraining's auc: 0.895599\tvalid_1's auc: 0.791049\n",
      "[4500]\ttraining's auc: 0.897168\tvalid_1's auc: 0.791099\n",
      "[4600]\ttraining's auc: 0.898877\tvalid_1's auc: 0.791136\n",
      "[4700]\ttraining's auc: 0.900409\tvalid_1's auc: 0.791133\n",
      "[4800]\ttraining's auc: 0.901946\tvalid_1's auc: 0.791204\n",
      "[4900]\ttraining's auc: 0.903376\tvalid_1's auc: 0.791219\n",
      "[5000]\ttraining's auc: 0.904903\tvalid_1's auc: 0.791297\n",
      "[5100]\ttraining's auc: 0.906359\tvalid_1's auc: 0.791251\n",
      "[5200]\ttraining's auc: 0.907783\tvalid_1's auc: 0.791335\n",
      "[5300]\ttraining's auc: 0.909228\tvalid_1's auc: 0.791353\n",
      "Early stopping, best iteration is:\n",
      "[5231]\ttraining's auc: 0.908308\tvalid_1's auc: 0.791378\n",
      "AUC: 0.7913778071950501\n",
      "Fold 4 finished in 0:04:52.701584\n",
      "Training on fold 5\n",
      "Training until validation scores don't improve for 150 rounds\n",
      "[100]\ttraining's auc: 0.758187\tvalid_1's auc: 0.751794\n",
      "[200]\ttraining's auc: 0.773279\tvalid_1's auc: 0.763306\n",
      "[300]\ttraining's auc: 0.783595\tvalid_1's auc: 0.770214\n",
      "[400]\ttraining's auc: 0.791595\tvalid_1's auc: 0.774878\n",
      "[500]\ttraining's auc: 0.798101\tvalid_1's auc: 0.777761\n",
      "[600]\ttraining's auc: 0.803706\tvalid_1's auc: 0.779867\n",
      "[700]\ttraining's auc: 0.808619\tvalid_1's auc: 0.781336\n",
      "[800]\ttraining's auc: 0.813183\tvalid_1's auc: 0.782571\n",
      "[900]\ttraining's auc: 0.817203\tvalid_1's auc: 0.783585\n",
      "[1000]\ttraining's auc: 0.821063\tvalid_1's auc: 0.784464\n",
      "[1100]\ttraining's auc: 0.824568\tvalid_1's auc: 0.785158\n",
      "[1200]\ttraining's auc: 0.827807\tvalid_1's auc: 0.785738\n",
      "[1300]\ttraining's auc: 0.830945\tvalid_1's auc: 0.786163\n",
      "[1400]\ttraining's auc: 0.833881\tvalid_1's auc: 0.786587\n",
      "[1500]\ttraining's auc: 0.836694\tvalid_1's auc: 0.787008\n",
      "[1600]\ttraining's auc: 0.839453\tvalid_1's auc: 0.787363\n",
      "[1700]\ttraining's auc: 0.842197\tvalid_1's auc: 0.787661\n",
      "[1800]\ttraining's auc: 0.844704\tvalid_1's auc: 0.787887\n",
      "[1900]\ttraining's auc: 0.847341\tvalid_1's auc: 0.788147\n",
      "[2000]\ttraining's auc: 0.849895\tvalid_1's auc: 0.788399\n",
      "[2100]\ttraining's auc: 0.85227\tvalid_1's auc: 0.788627\n",
      "[2200]\ttraining's auc: 0.854511\tvalid_1's auc: 0.788761\n",
      "[2300]\ttraining's auc: 0.856924\tvalid_1's auc: 0.788943\n",
      "[2400]\ttraining's auc: 0.85895\tvalid_1's auc: 0.789091\n",
      "[2500]\ttraining's auc: 0.861194\tvalid_1's auc: 0.789268\n",
      "[2600]\ttraining's auc: 0.863238\tvalid_1's auc: 0.789452\n",
      "[2700]\ttraining's auc: 0.865293\tvalid_1's auc: 0.789571\n",
      "[2800]\ttraining's auc: 0.86734\tvalid_1's auc: 0.789637\n",
      "[2900]\ttraining's auc: 0.869237\tvalid_1's auc: 0.789766\n",
      "[3000]\ttraining's auc: 0.871174\tvalid_1's auc: 0.789862\n",
      "[3100]\ttraining's auc: 0.873105\tvalid_1's auc: 0.789968\n",
      "[3200]\ttraining's auc: 0.875114\tvalid_1's auc: 0.790007\n",
      "[3300]\ttraining's auc: 0.877042\tvalid_1's auc: 0.790069\n",
      "[3400]\ttraining's auc: 0.878875\tvalid_1's auc: 0.790129\n",
      "[3500]\ttraining's auc: 0.880606\tvalid_1's auc: 0.790207\n",
      "[3600]\ttraining's auc: 0.882473\tvalid_1's auc: 0.790298\n",
      "[3700]\ttraining's auc: 0.884215\tvalid_1's auc: 0.790374\n",
      "[3800]\ttraining's auc: 0.886025\tvalid_1's auc: 0.790439\n",
      "[3900]\ttraining's auc: 0.887709\tvalid_1's auc: 0.790471\n",
      "[4000]\ttraining's auc: 0.889468\tvalid_1's auc: 0.790563\n",
      "[4100]\ttraining's auc: 0.891236\tvalid_1's auc: 0.790568\n",
      "[4200]\ttraining's auc: 0.892959\tvalid_1's auc: 0.790655\n",
      "[4300]\ttraining's auc: 0.894532\tvalid_1's auc: 0.790714\n",
      "[4400]\ttraining's auc: 0.896123\tvalid_1's auc: 0.79079\n",
      "[4500]\ttraining's auc: 0.8978\tvalid_1's auc: 0.790873\n",
      "[4600]\ttraining's auc: 0.899345\tvalid_1's auc: 0.790926\n",
      "[4700]\ttraining's auc: 0.900896\tvalid_1's auc: 0.791022\n",
      "[4800]\ttraining's auc: 0.902404\tvalid_1's auc: 0.791033\n",
      "[4900]\ttraining's auc: 0.903872\tvalid_1's auc: 0.791055\n",
      "[5000]\ttraining's auc: 0.90535\tvalid_1's auc: 0.791105\n",
      "[5100]\ttraining's auc: 0.906881\tvalid_1's auc: 0.791224\n",
      "[5200]\ttraining's auc: 0.908331\tvalid_1's auc: 0.791279\n",
      "[5300]\ttraining's auc: 0.909825\tvalid_1's auc: 0.791338\n",
      "[5400]\ttraining's auc: 0.911196\tvalid_1's auc: 0.791388\n",
      "[5500]\ttraining's auc: 0.912599\tvalid_1's auc: 0.791431\n",
      "[5600]\ttraining's auc: 0.914046\tvalid_1's auc: 0.791475\n",
      "[5700]\ttraining's auc: 0.915536\tvalid_1's auc: 0.791485\n",
      "[5800]\ttraining's auc: 0.916863\tvalid_1's auc: 0.79154\n",
      "[5900]\ttraining's auc: 0.918167\tvalid_1's auc: 0.791583\n",
      "[6000]\ttraining's auc: 0.919517\tvalid_1's auc: 0.791666\n",
      "[6100]\ttraining's auc: 0.920837\tvalid_1's auc: 0.791693\n",
      "[6200]\ttraining's auc: 0.922019\tvalid_1's auc: 0.791693\n",
      "[6300]\ttraining's auc: 0.923282\tvalid_1's auc: 0.791735\n",
      "[6400]\ttraining's auc: 0.924469\tvalid_1's auc: 0.791773\n",
      "[6500]\ttraining's auc: 0.925719\tvalid_1's auc: 0.791807\n",
      "[6600]\ttraining's auc: 0.926967\tvalid_1's auc: 0.791805\n",
      "[6700]\ttraining's auc: 0.928082\tvalid_1's auc: 0.791804\n",
      "[6800]\ttraining's auc: 0.9293\tvalid_1's auc: 0.791855\n",
      "[6900]\ttraining's auc: 0.930417\tvalid_1's auc: 0.791849\n",
      "Early stopping, best iteration is:\n",
      "[6844]\ttraining's auc: 0.929779\tvalid_1's auc: 0.791883\n",
      "AUC: 0.7918826906680527\n",
      "Fold 5 finished in 0:11:55.867708\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "   INFO ->  (145231, 1980)\n",
      "   INFO ->  [+]tuning params\n",
      "\u001b[32m[I 2021-09-10 20:25:27,806]\u001b[0m A new study created in memory with name: XGBClassifier\u001b[0m\n",
      "\u001b[32m[I 2021-09-10 20:37:05,587]\u001b[0m Trial 0 finished with value: 0.6263200655205812 and parameters: {'max_depth': 10, 'subsample': 0.7, 'n_estimators': 1900, 'reg_alpha': 17, 'reg_lambda': 80, 'min_child_weight': 15}. Best is trial 0 with value: 0.6263200655205812.\u001b[0m\n",
      "   INFO ->  Number of finished trials: 1\n",
      "   INFO ->  Best trial:\n",
      "   INFO ->  \tValue: 0.6263200655205812\n",
      "   INFO ->  \tParams: \n",
      "   INFO ->  \t\tmax_depth: 10\n",
      "   INFO ->  \t\tsubsample: 0.7\n",
      "   INFO ->  \t\tn_estimators: 1900\n",
      "   INFO ->  \t\treg_alpha: 17\n",
      "   INFO ->  \t\treg_lambda: 80\n",
      "   INFO ->  \t\tmin_child_weight: 15\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Training on fold 1\n",
      "AUC: 0.634809142077796\n",
      "Fold 1 finished in 0:14:51.756243\n",
      "Training on fold 2\n",
      "AUC: 0.6267281596847132\n",
      "Fold 2 finished in 0:14:56.875634\n",
      "Training on fold 3\n",
      "AUC: 0.6367668404858736\n",
      "Fold 3 finished in 0:14:48.296170\n",
      "Training on fold 4\n",
      "AUC: 0.6371067556737713\n",
      "Fold 4 finished in 0:14:50.299808\n",
      "Training on fold 5\n",
      "AUC: 0.6282573036752905\n",
      "Fold 5 finished in 0:14:33.144116\n",
      "Training on fold 6\n",
      "AUC: 0.6347908403921502\n",
      "Fold 6 finished in 0:14:38.552515\n",
      "Training on fold 7\n",
      "AUC: 0.6306746883449069\n",
      "Fold 7 finished in 0:14:37.093673\n",
      "Training on fold 8\n",
      "AUC: 0.6300445090984811\n",
      "Fold 8 finished in 0:14:40.429062\n",
      "Training on fold 9\n",
      "AUC: 0.6344819711905562\n",
      "Fold 9 finished in 0:14:36.076913\n",
      "Training on fold 10\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "   INFO ->  Average KFold AUC: 0.6318350391213036\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "AUC: 0.6246901805894965\n",
      "Fold 10 finished in 0:14:38.158710\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "   INFO ->  feature importance\n",
      "   INFO ->         feature  fold_1  fold_2  fold_3  fold_4  fold_5  average\n",
      "0     VAR_0004     946    1099    1041     998    1149   1046.6\n",
      "1     VAR_0003     664     742     769     655     816    729.2\n",
      "2     VAR_0212     529     829     624     497     876    671.0\n",
      "3     VAR_0241     613     621     537     517     644    586.4\n",
      "4     VAR_0968     517     584     583     581     642    581.4\n",
      "...        ...     ...     ...     ...     ...     ...      ...\n",
      "1975  VAR_0438       0       0       0       0       0      0.0\n",
      "1976  VAR_0130       0       0       0       0       0      0.0\n",
      "1977  VAR_0138       0       0       0       0       0      0.0\n",
      "1978  VAR_0411       0       0       0       0       0      0.0\n",
      "1979  VAR_0847       0       0       0       0       0      0.0\n",
      "\n",
      "[1980 rows x 7 columns]\n"
     ]
    }
   ],
   "source": [
    "sub = autox.get_submit()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "id": "acc2fce8",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2021-09-10T15:05:26.433343Z",
     "start_time": "2021-09-10T15:05:26.430112Z"
    }
   },
   "outputs": [],
   "source": [
    "target = 'ID'\n",
    "id_ = ['target']"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "id": "0daecefe",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2021-09-10T15:05:26.446882Z",
     "start_time": "2021-09-10T15:05:26.434971Z"
    },
    "scrolled": true
   },
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>ID</th>\n",
       "      <th>target</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>1</td>\n",
       "      <td>0.174472</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>3</td>\n",
       "      <td>0.152863</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>6</td>\n",
       "      <td>0.144397</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>9</td>\n",
       "      <td>0.104494</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>10</td>\n",
       "      <td>0.788660</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>...</th>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>145227</th>\n",
       "      <td>290451</td>\n",
       "      <td>0.012320</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>145228</th>\n",
       "      <td>290455</td>\n",
       "      <td>0.148331</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>145229</th>\n",
       "      <td>290456</td>\n",
       "      <td>0.044773</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>145230</th>\n",
       "      <td>290460</td>\n",
       "      <td>0.109702</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>145231</th>\n",
       "      <td>290462</td>\n",
       "      <td>0.035168</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "<p>145232 rows × 2 columns</p>\n",
       "</div>"
      ],
      "text/plain": [
       "            ID    target\n",
       "0            1  0.174472\n",
       "1            3  0.152863\n",
       "2            6  0.144397\n",
       "3            9  0.104494\n",
       "4           10  0.788660\n",
       "...        ...       ...\n",
       "145227  290451  0.012320\n",
       "145228  290455  0.148331\n",
       "145229  290456  0.044773\n",
       "145230  290460  0.109702\n",
       "145231  290462  0.035168\n",
       "\n",
       "[145232 rows x 2 columns]"
      ]
     },
     "execution_count": 8,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "sub"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "id": "82900122",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2021-09-10T15:05:26.769656Z",
     "start_time": "2021-09-10T15:05:26.448512Z"
    }
   },
   "outputs": [],
   "source": [
    "sub.to_csv(\"./sub/autox_kaggle_springleaf_0910.csv\", index = False)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "2e729948",
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.7.10"
  },
  "toc": {
   "base_numbering": 1,
   "nav_menu": {},
   "number_sections": true,
   "sideBar": true,
   "skip_h1_title": false,
   "title_cell": "Table of Contents",
   "title_sidebar": "Contents",
   "toc_cell": false,
   "toc_position": {},
   "toc_section_display": true,
   "toc_window_display": true
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
