{"metadata":{"kernelspec":{"language":"python","display_name":"Python 3","name":"python3"},"language_info":{"name":"python","version":"3.11.13","mimetype":"text/x-python","codemirror_mode":{"name":"ipython","version":3},"pygments_lexer":"ipython3","nbconvert_exporter":"python","file_extension":".py"},"kaggle":{"accelerator":"gpu","dataSources":[{"sourceId":10385,"databundleVersionId":298493,"sourceType":"competition"}],"dockerImageVersionId":31089,"isInternetEnabled":true,"language":"python","sourceType":"notebook","isGpuEnabled":true}},"nbformat_minor":4,"nbformat":4,"cells":[{"cell_type":"markdown","source":"# 导入库","metadata":{}},{"cell_type":"code","source":"import pandas as pd\nfrom tqdm import tqdm\nimport numpy as np\nimport scipy.ndimage\nfrom sklearn.preprocessing import StandardScaler\nimport matplotlib.pyplot as plt\nimport seaborn as sns\nimport os\nimport lightgbm as lgb\nfrom sklearn.model_selection import StratifiedKFold \nimport joblib\nimport optuna\nfrom sklearn.model_selection import train_test_split\nfrom lightgbm import create_tree_digraph\nfrom sklearn.metrics import accuracy_score, roc_auc_score, classification_report, confusion_matrix, log_loss\nimport warnings\nwarnings.filterwarnings('ignore')","metadata":{"trusted":true,"execution":{"iopub.status.busy":"2025-09-19T13:48:36.870455Z","iopub.execute_input":"2025-09-19T13:48:36.871216Z","iopub.status.idle":"2025-09-19T13:48:39.144623Z","shell.execute_reply.started":"2025-09-19T13:48:36.871181Z","shell.execute_reply":"2025-09-19T13:48:39.143868Z"}},"outputs":[],"execution_count":1},{"cell_type":"markdown","source":"# 导入数据","metadata":{}},{"cell_type":"code","source":"train=pd.read_csv(\"/kaggle/input/santander-customer-transaction-prediction/train.csv\").drop(['ID_code'],axis=1)\ntest=pd.read_csv(\"/kaggle/input/santander-customer-transaction-prediction/test.csv\").drop(['ID_code'],axis=1)","metadata":{"trusted":true,"execution":{"iopub.status.busy":"2025-09-19T13:48:39.465660Z","iopub.execute_input":"2025-09-19T13:48:39.466255Z","iopub.status.idle":"2025-09-19T13:48:50.881295Z","shell.execute_reply.started":"2025-09-19T13:48:39.466226Z","shell.execute_reply":"2025-09-19T13:48:50.880525Z"}},"outputs":[],"execution_count":2},{"cell_type":"markdown","source":"# 找到伪造test数据集","metadata":{}},{"cell_type":"markdown","source":"我们发现在test中存在部分由原来数据合成的伪造test数据，通过统计每个数在该列中出现的次数，如果一整行数据都是非唯一值，那么说明该数据是由其他列拼凑起来的，默认其结果为0","metadata":{}},{"cell_type":"code","source":"te_=test.values\n#如果一行数据中的每一列的数据在其他行中都有出现过说明这行数据是伪造数据\nunique_samples = []\nunique_count = np.zeros_like(te_)\nfor feature in tqdm(range(te_.shape[1])):\n    _, index_, count_ = np.unique(te_[:, feature], return_counts=True, return_index=True)\n    unique_count[index_[count_ == 1], feature] += 1\n\n# Samples which have unique values are real the others are fake\nreal_samples_indexes = np.argwhere(np.sum(unique_count, axis=1) > 0)[:, 0]\nsynthetic_samples_indexes = np.argwhere(np.sum(unique_count, axis=1) == 0)[:, 0]","metadata":{"trusted":true,"execution":{"iopub.status.busy":"2025-09-19T13:48:50.882668Z","iopub.execute_input":"2025-09-19T13:48:50.882957Z","iopub.status.idle":"2025-09-19T13:48:56.559443Z","shell.execute_reply.started":"2025-09-19T13:48:50.882934Z","shell.execute_reply":"2025-09-19T13:48:56.558865Z"}},"outputs":[{"name":"stderr","text":"100%|██████████| 200/200 [00:05<00:00, 36.27it/s]\n","output_type":"stream"}],"execution_count":3},{"cell_type":"code","source":"X_test=test.iloc[real_samples_indexes,:]\nX_fake=test.iloc[synthetic_samples_indexes,:]","metadata":{"trusted":true,"execution":{"iopub.status.busy":"2025-09-19T13:48:56.560135Z","iopub.execute_input":"2025-09-19T13:48:56.560347Z","iopub.status.idle":"2025-09-19T13:48:56.705873Z","shell.execute_reply.started":"2025-09-19T13:48:56.560328Z","shell.execute_reply":"2025-09-19T13:48:56.705192Z"}},"outputs":[],"execution_count":4},{"cell_type":"code","source":"features = [c for c in train.columns if c not in ['ID_code', 'target']]\nX_train=train.drop(['target'],axis=1)\ntarget_train=train['target']\nX_all = pd.concat([X_train, X_test])","metadata":{"trusted":true,"execution":{"iopub.status.busy":"2025-09-19T13:48:56.707677Z","iopub.execute_input":"2025-09-19T13:48:56.707945Z","iopub.status.idle":"2025-09-19T13:48:56.962232Z","shell.execute_reply.started":"2025-09-19T13:48:56.707925Z","shell.execute_reply":"2025-09-19T13:48:56.961590Z"}},"outputs":[],"execution_count":5},{"cell_type":"markdown","source":"# 预处理","metadata":{}},{"cell_type":"markdown","source":"## 删除低相关性和低重要性的特征","metadata":{}},{"cell_type":"code","source":"drop_vars = [100,10,96,17,7,161,126,136,38,98,103,117,124,158,27,30,185]\n\nvar_len = 200 - len(drop_vars)","metadata":{"trusted":true,"execution":{"iopub.status.busy":"2025-09-19T13:48:56.963015Z","iopub.execute_input":"2025-09-19T13:48:56.963246Z","iopub.status.idle":"2025-09-19T13:48:56.967538Z","shell.execute_reply.started":"2025-09-19T13:48:56.963227Z","shell.execute_reply":"2025-09-19T13:48:56.966866Z"}},"outputs":[],"execution_count":6},{"cell_type":"code","source":"drop_cols = X_train.columns[drop_vars]\n\n# 删除对应列\nX_train = X_train.drop(columns=drop_cols)\nX_test = X_test.drop(columns=drop_cols)\nX_fake = X_fake.drop(columns=drop_cols)\nX_all = pd.concat([X_train, X_test])\nfeatures = [c for c in X_all.columns if c not in ['ID_code']]","metadata":{"trusted":true,"execution":{"iopub.status.busy":"2025-09-19T13:48:56.968338Z","iopub.execute_input":"2025-09-19T13:48:56.968582Z","iopub.status.idle":"2025-09-19T13:48:57.301775Z","shell.execute_reply.started":"2025-09-19T13:48:56.968560Z","shell.execute_reply":"2025-09-19T13:48:57.301185Z"}},"outputs":[],"execution_count":7},{"cell_type":"markdown","source":"# 翻转","metadata":{}},{"cell_type":"code","source":"# reverse_list = [0, 1, 2, 3, 4, 5, 6, 7, 8, 11, 15, 16, 18, 19, 22, 24, 25, 26,\n#                 27, 29, 32, 35, 37, 40, 41, 47, 48, 49, 51, 52, 53, 55, 60, 61,\n#                 62, 65, 66, 67, 69, 70, 71, 74, 78, 79, 82, 84, 89, 90, 91, 94,\n#                 95, 96, 97, 99, 103, 105, 106, 110, 111, 112, 118, 119, 125, 128,\n#                 130, 133, 134, 135, 137, 138, 140, 144, 145, 147, 151, 155, 157,\n#                 159, 161, 162, 163, 164, 167, 168, 170, 171, 173, 175, 176, 179,\n#                 180, 181, 184, 185, 187, 189, 190, 191, 195, 196, 199,\n                \n#                 ]\n\n# for j in reverse_list:\n#     X[:, j] *= -1","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"markdown","source":"## 简单计数编码","metadata":{}},{"cell_type":"code","source":"def count_encode_and_merge(df: pd.DataFrame, target_column) -> pd.DataFrame:\n    \"\"\"\n    对每个特征列进行计数编码，并将计数编码后的列与原始数据合并。\n\n    参数:\n    ----\n    df : DataFrame\n        输入的原始数据表（不包含目标列）。\n    target_column : str, optional\n        目标列的名称（默认值为 None）。如果提供，则从 `df` 中删除目标列。\n\n    返回:\n    ----\n    DataFrame\n        合并后的数据，包括原始数据和计数编码后的列。\n    \"\"\"\n    # 如果指定了 target_column，删除该列\n    if target_column:\n        df = df.drop(columns=[target_column])\n    \n    # 对每一列进行计数编码\n    count_encoded_df = df.apply(lambda col: col.map(col.value_counts()))\n    \n    # 将计数编码后的列与原数据合并\n    df_encoded = pd.concat([df, count_encoded_df.add_suffix('_count')], axis=1)\n\n    features_count=count_encoded_df.add_suffix('_count').columns\n    \n    return df_encoded,features_count","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"X_train,features_count = count_encode_and_merge(X_train, target_column=None)\nX_test,features_count = count_encode_and_merge(X_test, target_column=None)\nX_fake,features_count = count_encode_and_merge(X_fake, target_column=None)\nX_train.columns = X_train.columns.astype(str)\nX_test.columns = X_test.columns.astype(str)\nX_fake.columns = X_fake.columns.astype(str)","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"X_all = pd.concat([X_train, X_test])","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"markdown","source":"## 计数加高斯平滑密度以及偏差","metadata":{}},{"cell_type":"code","source":"sigma_fac = 0.001\nsigma_base = 4\n\neps = 0.00000001\n\ndef get_count(X_all, X_fake):\n    features_count = np.zeros((X_all.shape[0], len(features)))\n    features_density = np.zeros((X_all.shape[0], len(features)))\n    features_deviation = np.zeros((X_all.shape[0], len(features)))\n\n    features_count_fake = np.zeros((X_fake.shape[0], len(features)))\n    features_density_fake = np.zeros((X_fake.shape[0], len(features)))\n    features_deviation_fake = np.zeros((X_fake.shape[0], len(features)))\n    \n    sigmas = []\n\n    for i,var in enumerate(tqdm(features)):\n        X_all_var_int = (X_all[var].values * 10000).round().astype(int)\n        X_fake_var_int = (X_fake[var].values * 10000).round().astype(int)\n        lo = X_all_var_int.min()\n        X_all_var_int -= lo\n        X_fake_var_int -= lo\n        hi = X_all_var_int.max()+1\n        counts_all = np.bincount(X_all_var_int, minlength=hi).astype(float)\n        zeros = (counts_all == 0).astype(int)\n        before_zeros = np.concatenate([zeros[1:],[0]])\n        indices_all = np.arange(counts_all.shape[0])\n        # Geometric mean of twice sigma_base and a sigma_scaled which is scaled to the length of array \n        sigma_scaled = counts_all.shape[0]*sigma_fac\n        sigma = np.power(sigma_base * sigma_base * sigma_scaled, 1/3)\n        sigmas.append(sigma)\n        counts_all_smooth = scipy.ndimage.filters.gaussian_filter1d(counts_all, sigma)\n        deviation = counts_all / (counts_all_smooth+eps)\n        indices = X_all_var_int\n        features_count[:,i] = counts_all[indices]\n        features_density[:,i] = counts_all_smooth[indices]\n        features_deviation[:,i] = deviation[indices]\n        indices_fake = X_fake_var_int\n        features_count_fake[:,i] = counts_all[indices_fake]\n        features_density_fake[:,i] = counts_all_smooth[indices_fake]\n        features_deviation_fake[:,i] = deviation[indices_fake]\n        \n    features_count_names = [var+'_count' for var in features]\n    features_density_names = [var+'_density' for var in features]\n    features_deviation_names = [var+'_deviation' for var in features]\n\n    X_all_count = pd.DataFrame(columns=features_count_names, data = features_count)\n    X_all_count.index = X_all.index\n    X_all_density = pd.DataFrame(columns=features_density_names, data = features_density)\n    X_all_density.index = X_all.index\n    X_all_deviation = pd.DataFrame(columns=features_deviation_names, data = features_deviation)\n    X_all_deviation.index = X_all.index\n    X_all = pd.concat([X_all,X_all_count, X_all_density, X_all_deviation], axis=1)\n    \n    X_fake_count = pd.DataFrame(columns=features_count_names, data = features_count_fake)\n    X_fake_count.index = X_fake.index\n    X_fake_density = pd.DataFrame(columns=features_density_names, data = features_density_fake)\n    X_fake_density.index = X_fake.index\n    X_fake_deviation = pd.DataFrame(columns=features_deviation_names, data = features_deviation_fake)\n    X_fake_deviation.index = X_fake.index\n    X_fake = pd.concat([X_fake,X_fake_count, X_fake_density, X_fake_deviation], axis=1)    \n\n    features_count = features_count_names\n    features_density = features_density_names\n    features_deviation = features_deviation_names\n    return X_all, features_count, features_density, features_deviation, X_fake\n\nX_all, features_count, features_density, features_deviation, X_fake = get_count(X_all, X_fake)\nprint(X_all.shape)","metadata":{"trusted":true,"execution":{"iopub.status.busy":"2025-09-19T13:48:57.303029Z","iopub.execute_input":"2025-09-19T13:48:57.303283Z","iopub.status.idle":"2025-09-19T13:49:11.654447Z","shell.execute_reply.started":"2025-09-19T13:48:57.303254Z","shell.execute_reply":"2025-09-19T13:49:11.653542Z"}},"outputs":[{"name":"stderr","text":"100%|██████████| 183/183 [00:11<00:00, 15.59it/s]\n","output_type":"stream"},{"name":"stdout","text":"(300000, 732)\n","output_type":"stream"}],"execution_count":8},{"cell_type":"markdown","source":"# 标准化","metadata":{}},{"cell_type":"code","source":"features_to_scale = [features, features_count]\n\nfrom sklearn.preprocessing import StandardScaler\n\ndef get_standardized(X_all, X_fake):\n    scaler = StandardScaler()\n    features_to_scale_flatten = [var for sublist in features_to_scale for var in sublist]\n    scaler.fit(X_all[features_to_scale_flatten])\n    features_scaled = scaler.transform(X_all[features_to_scale_flatten])\n    features_scaled_fake = scaler.transform(X_fake[features_to_scale_flatten])\n    X_all[features_to_scale_flatten] = features_scaled\n    X_fake[features_to_scale_flatten] = features_scaled_fake\n    return X_all, X_fake\n\nX_all, X_fake = get_standardized(X_all, X_fake)\n\nprint(X_all.shape)","metadata":{"trusted":true,"execution":{"iopub.status.busy":"2025-09-19T13:49:11.655533Z","iopub.execute_input":"2025-09-19T13:49:11.655840Z","iopub.status.idle":"2025-09-19T13:49:15.672624Z","shell.execute_reply.started":"2025-09-19T13:49:11.655801Z","shell.execute_reply":"2025-09-19T13:49:15.672040Z"}},"outputs":[{"name":"stdout","text":"(300000, 732)\n","output_type":"stream"}],"execution_count":9},{"cell_type":"code","source":"train_length=200000\nX_train = X_all.iloc[:train_length,:]\nX_test = X_all.iloc[train_length:,:]\ndel X_all\nimport gc\ngc.collect()\nprint(X_train.shape, X_test.shape)","metadata":{"trusted":true,"execution":{"iopub.status.busy":"2025-09-19T13:49:15.673246Z","iopub.execute_input":"2025-09-19T13:49:15.673460Z","iopub.status.idle":"2025-09-19T13:49:15.862237Z","shell.execute_reply.started":"2025-09-19T13:49:15.673445Z","shell.execute_reply":"2025-09-19T13:49:15.861431Z"}},"outputs":[{"name":"stdout","text":"(200000, 732) (100000, 732)\n","output_type":"stream"}],"execution_count":10},{"cell_type":"markdown","source":"# 一些可视化函数","metadata":{}},{"cell_type":"code","source":"#查看划分的比例\ndef plot_class_distribution(y, title=\"Class Distribution\",save_path=None):\n    \"\"\"\n    可视化二分类数据的数量和比例\n    :param y: 二分类目标变量 (pd.Series 或 list)\n    :param title: 图表标题\n    \"\"\"\n    # 转换为 Series\n    if not isinstance(y, pd.Series):\n        y = pd.Series(y)\n\n    # 统计数量和比例\n    class_counts = y.value_counts()\n    class_percent = y.value_counts(normalize=True) * 100\n\n    # 合并到一个 DataFrame 方便展示\n    df = pd.DataFrame({\"Count\": class_counts, \"Percentage\": class_percent.round(2)})\n\n    # 绘制柱状图\n    plt.figure(figsize=(6, 4))\n    sns.barplot(x=df.index, y=df[\"Count\"], palette=\"Blues_d\")\n    \n    # 在柱子上显示数量和比例\n    for i, (count, pct) in enumerate(zip(df[\"Count\"], df[\"Percentage\"])):\n        plt.text(i, count + 0.5, f\"{count} ({pct}%)\", ha=\"center\")\n\n    plt.title(title)\n    plt.xlabel(\"Class\")\n    plt.ylabel(\"Count\")\n    plt.xticks(rotation=0)\n    plt.show()\n    if save_path:\n        plt.savefig(f\"{save_path}/{title}\")\n\nplot_class_distribution(target_train, \"Train Target Distribution\")","metadata":{"trusted":true,"execution":{"iopub.status.busy":"2025-09-19T13:49:15.864121Z","iopub.execute_input":"2025-09-19T13:49:15.864633Z","iopub.status.idle":"2025-09-19T13:49:16.101082Z","shell.execute_reply.started":"2025-09-19T13:49:15.864601Z","shell.execute_reply":"2025-09-19T13:49:16.100364Z"}},"outputs":[{"output_type":"display_data","data":{"text/plain":"<Figure size 600x400 with 1 Axes>","image/png":"iVBORw0KGgoAAAANSUhEUgAAAjYAAAGJCAYAAACZwnkIAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8pXeV/AAAACXBIWXMAAA9hAAAPYQGoP6dpAABNYUlEQVR4nO3deVwV9f4/8NcB5bDIOaDIVii45JIIioa4QhJHJW+olbto7uFKJWKKaIuGaW4oWSlWmmYlmRqGKHpLREFRceGKIeZycEE4irLP749+zNcRUEDw4Ph6Ph7ncZn5vGfmPfOgy8vZjkIQBAFEREREMmCg7waIiIiIagqDDREREckGgw0RERHJBoMNERERyQaDDREREckGgw0RERHJBoMNERERyQaDDREREckGgw0RERHJBoMN0XNi9OjRcHR01Hcbsvc0j7OjoyNGjx4tTkdGRkKhUCAxMfGpbN/T0xOenp5PZVtElcVgQ6RnCoWiUp+4uDh9typydHSsVM+RkZH6blXizJkzCA0NxcWLFytVHxoaKtkfU1NTNGnSBP3798eGDRuQn5+vl76eprrcG1F56um7AaLn3XfffSeZ/vbbbxETE1Nmfps2bZ5oO1999RVKSkqeaB2lli9fjrt374rTu3fvxg8//IAvvvgCVlZW4vyuXbvWyPZqypkzZ7BgwQJ4enpW6azK2rVr0aBBA+Tn5+PKlSvYs2cP3nnnHSxfvhw7d+6Eg4ODWFud41zdvlJTU2FgULv/Pn1Ub3/88UetbpuoOhhsiPRsxIgRkunDhw8jJiamzPyH3bt3D6amppXeTv369avVX3n8/Pwk01qtFj/88AP8/Pxq5DJMVfettr355puSwBYSEoJNmzZh1KhReOutt3D48GFxrCaPc3kEQUBeXh5MTEygVCprdVuPY2RkpNftE5WHl6KIngGenp5o164dkpKS0LNnT5iammLOnDkAgF9//RW+vr6wt7eHUqlE8+bN8dFHH6G4uFiyjofv/bh48SIUCgU+//xzrFu3Ds2bN4dSqUTnzp1x9OjRJ+65sn09at9u3bqFkSNHQqVSwcLCAv7+/jhx4kS5l7nOnTuHN998Ew0bNoSxsTE6deqEHTt2iOORkZF46623AABeXl5PfIlv+PDhGDduHBISEhATEyPOL+8emy1btsDNzQ3m5uZQqVRwdnbGihUrKtWXo6MjXn/9dezZswedOnWCiYkJvvzyS3HswXtsSt27dw8TJ05Eo0aNoFKpMGrUKNy+fVtSo1AoEBoaWmbZB9f5uN7Ku8fm+vXrGDt2LGxsbGBsbAwXFxds3LhRUlPbv3v0fOMZG6JnxK1bt9C3b18MGTIEI0aMgI2NDYB///g0aNAAgYGBaNCgAfbt24eQkBDodDosWbLksevdvHkz7ty5g4kTJ0KhUCAsLAwDBw7E33///URnH6rSV3n7VlJSgv79++PIkSOYPHkyWrdujV9//RX+/v5ltnX69Gl069YNL7zwAmbPng0zMzP8+OOP8PPzw88//4wBAwagZ8+emDZtGlauXIk5c+aIl/ae5BLfyJEjsW7dOvzxxx947bXXyq2JiYnB0KFD0bt3b3z22WcAgLNnz+Kvv/7C9OnTK9VXamoqhg4diokTJ2L8+PFo1arVI/uaMmUKLCwsEBoaitTUVKxduxYZGRmIi4uDQqGo9P5V9Zjdv38fnp6eSEtLw5QpU+Dk5IRt27Zh9OjRyM7OxvTp0yX1tfW7R885gYjqlICAAOHh/zR79eolABAiIiLK1N+7d6/MvIkTJwqmpqZCXl6eOM/f319o2rSpOJ2eni4AEBo1aiRkZWWJ83/99VcBgPDbb79VuuclS5YIAIT09PQq91XRvv38888CAGH58uXivOLiYuHVV18VAAgbNmwQ5/fu3VtwdnaWrLekpETo2rWr0LJlS3Hetm3bBADC/v37K7Vf8+fPFwAIN27cKHf89u3bAgBhwIAB4ryHj/P06dMFlUolFBUVVbidR/XVtGlTAYAQHR1d7pi/v784vWHDBgGA4ObmJhQUFIjzw8LCBADCr7/+Ks4DIMyfP/+x63xUb7169RJ69eolTi9fvlwAIHz//ffivIKCAsHDw0No0KCBoNPpBEGo2d89oofxUhTRM0KpVGLMmDFl5puYmIg/37lzBzdv3kSPHj1w7949nDt37rHrHTx4MCwtLcXpHj16AAD+/vvvJ+q3Kn2Vt2/R0dGoX78+xo8fL84zMDBAQECApC4rKwv79u3D22+/LW7n5s2buHXrFjQaDc6fP48rV6480b5UpEGDBuL+VcTCwgK5ubmSy1VV5eTkBI1GU+n6CRMmSM54TJ48GfXq1cPu3bur3UNl7N69G7a2thg6dKg4r379+pg2bRru3r2LAwcOSOpr63ePnm8MNkTPiBdeeKHcmzVPnz6NAQMGQK1WQ6VSoXHjxuKNxzk5OY9db5MmTSTTpX9oHr4no6qq0ld5+5aRkQE7O7syNxG3aNFCMp2WlgZBEDBv3jw0btxY8pk/fz6Af+/7qA2lT4aZm5tXWPPuu+/ipZdeQt++ffHiiy/inXfeQXR0dJW24+TkVKX6li1bSqYbNGgAOzu7Wn9kOyMjAy1btizzpFbppauMjAzJ/Nr63aPnG++xIXpGPHgGpFR2djZ69eoFlUqFhQsXonnz5jA2NsaxY8cQFBRUqceODQ0Ny50vCEK1e61qX+XtW2WVruv999+v8KzGw2GopqSkpDx2/dbW1khOTsaePXvw+++/4/fff8eGDRswatSoMjfVVuRJjk9VPXxzd22qjd89IgYbomdYXFwcbt26hV9++QU9e/YU56enp+uxq5rpq2nTpti/f3+ZR7/T0tIkdc2aNQPw7yUPb2/vR66zKjfOVkbpu4Yed5nIyMgI/fv3R//+/VFSUoJ3330XX375JebNm4cWLVrUeF/nz5+Hl5eXOH337l1cu3YN/fr1E+dZWloiOztbslxBQQGuXbsmmVeV3po2bYqTJ0+ipKREctam9NJj06ZNq7IbRNXCS1FEz7DSf/E++C/cgoICrFmzRl8tAaiZvjQaDQoLC/HVV1+J80pKShAeHi6ps7a2hqenJ7788ssyf5QB4MaNG+LPZmZmAFDmD3p1bN68GV9//TU8PDzQu3fvCutu3bolmTYwMED79u0BQHxzcU32BQDr1q1DYWGhOL127VoUFRWhb9++4rzmzZvj4MGDZZZ7+IxNVXrr168ftFottm7dKs4rKirCqlWr0KBBA/Tq1as6u0NUJTxjQ/QM69q1KywtLeHv749p06ZBoVDgu+++0/up/Jroy8/PD6+88gree+89pKWloXXr1tixYweysrIASM8khIeHo3v37nB2dsb48ePRrFkzZGZmIj4+HpcvX8aJEycAAK6urjA0NMRnn32GnJwcKJVKvPrqq7C2tn5kLz/99BMaNGiAgoIC8c3Df/31F1xcXLBt27ZHLjtu3DhkZWXh1VdfxYsvvoiMjAysWrUKrq6u4r0n1e2rIgUFBejduzfefvttpKamYs2aNejevTv+85//SPqaNGkSBg0ahNdeew0nTpzAnj17JC8irGpvEyZMwJdffonRo0cjKSkJjo6O+Omnn/DXX39h+fLlj7wXiaimMNgQPcMaNWqEnTt34r333sPcuXNhaWmJESNGoHfv3lV6iqYu9mVoaIhdu3Zh+vTp2LhxIwwMDDBgwADMnz8f3bp1g7GxsVjbtm1bJCYmYsGCBYiMjMStW7dgbW2NDh06ICQkRKyztbVFREQEFi1ahLFjx6K4uBj79+9/bICYPHkyAMDY2BhWVlZwdXXF+vXrMWzYsMe+/XfEiBFYt24d1qxZg+zsbNja2mLw4MEIDQ0VL9dUt6+KrF69Gps2bUJISAgKCwsxdOhQrFy5UhIGx48fj/T0dHzzzTeIjo5Gjx49EBMTU+bsU1V6MzExQVxcHGbPno2NGzdCp9OhVatW2LBhQ7kvEiSqDQpB3/+0IyKqgqioKAwYMAB//vknunXrpu92iKiOYbAhojrr/v37kieCiouL4ePjg8TERGi12qf6tBARPRt4KYqI6qypU6fi/v378PDwQH5+Pn755RccOnQIn376KUMNEZWLZ2yIqM7avHkzli5dirS0NOTl5aFFixaYPHkypkyZou/WiKiOYrAhIiIi2eB7bIiIiEg2GGyIiIhINnjz8FNUUlKCq1evwtzcvMZfoU5ERCRngiDgzp07sLe3L/NFqw9isHmKrl69CgcHB323QURE9Mz6559/8OKLL1Y4zmDzFJW+Tvyff/6BSqXSczdERETPDp1OBwcHh8d+NQeDzVNUevlJpVIx2BAREVXD427l4M3DREREJBsMNkRPoGfPnti8ebO+26g1N2/ehLW1NS5fvqzvVoiIKoXBhmrcwYMH0b9/f9jb20OhUCAqKqpMjUKhKPezZMkSsebYsWN47bXXYGFhgUaNGmHChAm4e/euZD2xsbHo2rUrzM3NYWtri6CgIBQVFUlqTp48iR49esDY2BgODg4ICwuTjH/11Vfo0aMHLC0tYWlpCW9vbxw5cuSx+7ljxw5kZmZiyJAh4jytVouRI0fC1tYWZmZm6NixI37++WfJcpXZr4dlZmZi9OjRsLe3h6mpKfr06YPz589Lajw9Pcscz0mTJonjWVlZ6N+/Pxo0aIAOHTrg+PHjkuUDAgKwdOlSyTwrKyuMGjUK8+fPf+zxICKqCxhsqMbl5ubCxcUF4eHhFdZcu3ZN8lm/fj0UCgUGDRoE4N8nyLy9vdGiRQskJCQgOjoap0+fxujRo8V1nDhxAv369UOfPn1w/PhxbN26FTt27MDs2bPFGp1OBx8fHzRt2hRJSUlYsmQJQkNDsW7dOrEmLi4OQ4cOxf79+xEfHw8HBwf4+PjgypUrj9zPlStXYsyYMZLHDkeNGoXU1FTs2LEDp06dwsCBA/H222+LIaIy+/UwQRDg5+eHv//+G7/++iuOHz+Opk2bwtvbG7m5uZLa8ePHS47rgyHuk08+wZ07d3Ds2DF4enpi/Pjx4tjhw4eRkJCAGTNmlNn+mDFjsGnTJmRlZT3yeBAR1QkCPTU5OTkCACEnJ0ffrTw1AITt27c/tu6NN94QXn31VXH6yy+/FKytrYXi4mJx3smTJwUAwvnz5wVBEITg4GChU6dOkvXs2LFDMDY2FnQ6nSAIgrBmzRrB0tJSyM/PF2uCgoKEVq1aVdhLUVGRYG5uLmzcuLHCmuvXrwsKhUJISUmRzDczMxO+/fZbybyGDRsKX331VaX362GpqakCAMm2iouLhcaNG4vrFQRB6NWrlzB9+vQKe+7bt6+wdu1aQRAE4cyZM4KpqakgCIJQUFAguLi4CEePHq1wWScnJ+Hrr7+ucJyIqLZV9m8oz9iQ3mVmZmLXrl0YO3asOC8/Px9GRkaSsyGl3+b8559/ijXGxsaSdZmYmCAvLw9JSUkAgPj4ePTs2RNGRkZijUajQWpqKm7fvl1uP/fu3UNhYSEaNmxYYc9//vknTE1N0aZNG8n8rl27YuvWrcjKykJJSQm2bNmCvLw8eHp6Vnq/Hpafnw8Akn01MDCAUqkss8ymTZtgZWWFdu3aITg4GPfu3RPHXFxcsG/fPhQVFWHPnj1o3749ACAsLAyenp7o1KlThfv7yiuv4L///W+F40REdQWDDendxo0bYW5ujoEDB4rzXn31VWi1WixZsgQFBQW4ffu2eInp2rVrAP4NKIcOHcIPP/yA4uJiXLlyBQsXLpTUaLVa2NjYSLZXOq3VasvtJygoCPb29vD29q6w54yMDNjY2JR5++WPP/6IwsJCNGrUCEqlEhMnTsT27dvRokWLSu/Xw1q3bo0mTZogODgYt2/fRkFBAT777DNcvnxZssywYcPw/fffY//+/QgODsZ3332HESNGiOOzZ89GvXr10Lx5c2zfvh3ffPMNzp8/j40bN2LevHmYNGkSmjVrhrfffhs5OTmSHuzt7ZGRkVHh8SAiqisYbEjv1q9fj+HDh0vOSLz88svYuHEjli5dClNTU9ja2sLJyUkSJnx8fLBkyRJMmjQJSqUSL730Evr16wcAj3zd9qMsXrwYW7Zswfbt28ucDXrQ/fv3yx2fN28esrOzsXfvXiQmJiIwMBBvv/02Tp06Ven9elj9+vXxyy+/4H//+x8aNmwIU1NT7N+/H3379pUsM2HCBGg0Gjg7O2P48OH49ttvsX37dly4cAEAoFarsXnzZmRkZODAgQNo27YtJk6ciCVLlmDTpk34+++/kZqaClNTUzEgljIxMZGc/SEiqrOe0qUxEniPTXkOHjwoABCSk5MrrNFqtcKdO3eEu3fvCgYGBsKPP/4oGS8pKRGuXLki3Lt3Tzhz5owAQDhy5IggCIIwcuRI4Y033pDU79u3TwAgZGVlSeYvWbJEUKvVj7zXpNS6desEOzs7yby0tLQy98IIgiD07t1bmDhxYpX3qzzZ2dnC9evXBUEQhFdeeUV49913K6y9e/euAECIjo4ud3z9+vXCgAEDBEEQhAEDBgjh4eGCIAjCzp07hY4dO0pqJ02aJPj6+j62PyKi2sJ7bOiZ8M0338DNzQ0uLi4V1tjY2KBBgwbYunUrjI2N8dprr0nGFQoF7O3tYWJigh9++AEODg7o2LEjAMDDwwMHDx5EYWGhWB8TE4NWrVrB0tJSnBcWFoaPPvoI0dHRj7zXpFSHDh2g1Wol9+mUntF4+MyLoaEhSkpKqrxf5VGr1WjcuDHOnz+PxMREvPHGGxXWJicnAwDs7OzKjN24cQMLFy7EqlWrAADFxcXiMSosLERxcbGkPiUlBR06dHhsf0REeveUghYJz88Zmzt37gjHjx8Xjh8/LgAQli1bJhw/flzIyMiQ1OXk5AimpqbikzoPW7VqlZCUlCSkpqYKq1evFkxMTIQVK1ZIasLCwoSTJ08KKSkpwsKFC4X69etLzhBlZ2cLNjY2wsiRI4WUlBRhy5YtgqmpqfDll1+KNYsXLxaMjIyEn376Sbh27Zr4uXPnToX7WFRUJDRu3Fj47bffxHkFBQVCixYthB49eggJCQlCWlqa8PnnnwsKhULYtWtXlfarVatWwi+//CJO//jjj8L+/fuFCxcuCFFRUULTpk2FgQMHiuNpaWnCwoULhcTERCE9PV349ddfhWbNmgk9e/Yst/9hw4YJq1atEqc/++wzwc3NTThz5ozQt29fyZmg3NxcwcTERDh48GCFx4OIqLZV9m+oQhAEQc/Z6rmh0+mgVquRk5NTK98VNXV9XI2vszoun0tG1JKZZea37qqB99j/e8dMyoHf8OeWcIxZ+hOUpg3K1Md8/SkunkxAYf59WNo6oINmMFp39ZHUbF8SiBsZ/0NxUSGsHJrjlf/4o6mzu6Tm5j8XcGDTClxPPwdjczXavzoQbv2GiuMbZw3BnVuZZbbf+T/+cH9jdIX7eWjbl7iTdR2aifPEedmZl3Hop3W4lpaCwrz7UFvbl+m7Mvu1eqwXeo8JQpvufQAAJ/b+jOPRW3FPdxtm6kZo1dUHnfuPhGG9+gCAO1nXEfPVJ7h15SKK8u+jQUNrNOvYHZ1fHwkjEzPJujNSjuBI1Aa8OScciv9/dqkwPw+x6xcjI+UobJxaw2fCXJiq/j2j9b+EWBzZsREjPvm2wmPxNK16x1PfLRCRHlT2byiDzVP0vASb50VuThY2zxuDwSFfQmVlq+92as22T95F+94D0apLxU+JPU0MNkTPp8r+DeU9NkTVZKZuiN6jP8DdrOv6bqXW3L+Tg+Yde+Al9976boWIqFLq6bsBomdZs47d9d1CrTIxV6Nj36GPLyQiqiP0esbmcV+WWJkvSnR0dCwzvnjxYsl6HvcliACwbds2tG7dGsbGxnB2dsbu3bsl44IgICQkBHZ2djAxMYG3t3eZLyEkIiIi/dJrsHnclyU+7osSSy1cuFBSN3XqVHGsMl+CeOjQIQwdOhRjx47F8ePH4efnBz8/P6SkpIg1YWFhWLlyJSIiIpCQkAAzMzNoNBrk5eXV8FEhIiKi6tLrpai+ffuib9++FY7b2kpvyPz111/h5eWFZs2aSeabm5uXqS21adMmFBQUYP369TAyMsLLL7+M5ORkLFu2DBMmTAAArFixAn369MEHH3wAAPjoo48QExOD1atXIyIiAoIgYPny5Zg7d6743pBvv/0WNjY2iIqKwpAhQ6p9DIiIiKjmPDM3D5f3RYmlFi9ejEaNGqFDhw5YsmQJioqKxLHKfAlifHx8me8F0mg0iI+PBwCkp6dDq9VKatRqNdzd3cWa8uTn50On00k+REREVHuemZuHy/uiRACYNm0aOnbsiIYNG+LQoUMIDg7GtWvXsGzZMgD/ftGhk5OTZJkHvwTR0tKywi9KLP2SxNL/fVRNeRYtWoQFCxZUY2+JiIioOp6ZYFPeFyUCQGBgoPhz+/btYWRkhIkTJ2LRokVQKpVPu02J4OBgSX86nQ4ODg567IiIiEjenolLUf/973+RmpqKcePGPbbW3d0dRUVFuHjxIoB/79PJzJS+VbZ0uvS+nIpqHhx/cLnyasqjVCqhUqkkHyIiIqo9z0SwqcwXJZZKTk6GgYEBrK2tAVTuSxA9PDwQGxsrWU9MTAw8PDwAAE5OTrC1tZXU6HQ6JCQkiDVERESkf3q9FHX37l2kpaWJ0+np6UhOTkbDhg3RpEkTAP8GiG3btmHp0qVllo+Pj0dCQgK8vLxgbm6O+Ph4zJw5EyNGjBBDy7Bhw7BgwQKMHTsWQUFBSElJwYoVK/DFF1+I65k+fTp69eqFpUuXwtfXF1u2bEFiYqL4SLhCocCMGTPw8ccfo2XLlnBycsK8efNgb28PPz+/WjxCREREVBV6DTaJiYnw8vISp0vvR/H390dkZCQAYMuWLRAEAUOHln37qVKpxJYtWxAaGor8/Hw4OTlh5syZkvta1Go1/vjjDwQEBMDNzQ1WVlYICQkRH/UGgK5du2Lz5s2YO3cu5syZg5YtWyIqKgrt2rUTa2bNmoXc3FxMmDAB2dnZ6N69O6Kjo8vc80NERET6wy/BfIr4JZhET45fgkn0fOKXYBIREdFzh8GGiIiIZIPBhoiIiGSDwYaIiIhkg8GGiIiIZIPBhoiIiGSDwYaIiIhkg8GGiIiIZIPBhoiIiGSDwYaIiIhkg8GGiIiIZIPBhoiIiGSDwYaIiIhkg8GGiIiIZIPBhoiIiGSDwYaIiIhkg8GGiIiIZIPBhoiIiGSDwYaIiIhkg8GGiIiIZIPBhoiIiGSDwYaIiIhkg8GGiIiIZIPBhoiIiGSDwYaIiIhkg8GGiIiIZIPBhoiIiGSDwYaIiIhkg8GGiIiIZIPBhoiIiGRDr8Hm4MGD6N+/P+zt7aFQKBAVFSUZHz16NBQKheTTp08fSU1WVhaGDx8OlUoFCwsLjB07Fnfv3pXUnDx5Ej169ICxsTEcHBwQFhZWppdt27ahdevWMDY2hrOzM3bv3i0ZFwQBISEhsLOzg4mJCby9vXH+/PmaORBERERUI/QabHJzc+Hi4oLw8PAKa/r06YNr166Jnx9++EEyPnz4cJw+fRoxMTHYuXMnDh48iAkTJojjOp0OPj4+aNq0KZKSkrBkyRKEhoZi3bp1Ys2hQ4cwdOhQjB07FsePH4efnx/8/PyQkpIi1oSFhWHlypWIiIhAQkICzMzMoNFokJeXV4NHhIiIiJ6EQhAEQd9NAIBCocD27dvh5+cnzhs9ejSys7PLnMkpdfbsWbRt2xZHjx5Fp06dAADR0dHo168fLl++DHt7e6xduxYffvghtFotjIyMAACzZ89GVFQUzp07BwAYPHgwcnNzsXPnTnHdXbp0gaurKyIiIiAIAuzt7fHee+/h/fffBwDk5OTAxsYGkZGRGDJkSKX2UafTQa1WIycnByqVqqqH6LGmro+r8XUS1TWr3vHUdwtEpAeV/Rta5++xiYuLg7W1NVq1aoXJkyfj1q1b4lh8fDwsLCzEUAMA3t7eMDAwQEJCgljTs2dPMdQAgEajQWpqKm7fvi3WeHt7S7ar0WgQHx8PAEhPT4dWq5XUqNVquLu7izXlyc/Ph06nk3yIiIio9tTpYNOnTx98++23iI2NxWeffYYDBw6gb9++KC4uBgBotVpYW1tLlqlXrx4aNmwIrVYr1tjY2EhqSqcfV/Pg+IPLlVdTnkWLFkGtVosfBweHKu0/ERERVU09fTfwKA9e4nF2dkb79u3RvHlzxMXFoXfv3nrsrHKCg4MRGBgoTut0OoYbIiKiWlSnz9g8rFmzZrCyskJaWhoAwNbWFtevX5fUFBUVISsrC7a2tmJNZmampKZ0+nE1D44/uFx5NeVRKpVQqVSSDxEREdWeZyrYXL58Gbdu3YKdnR0AwMPDA9nZ2UhKShJr9u3bh5KSEri7u4s1Bw8eRGFhoVgTExODVq1awdLSUqyJjY2VbCsmJgYeHh4AACcnJ9ja2kpqdDodEhISxBoiIiLSP70Gm7t37yI5ORnJyckA/r1JNzk5GZcuXcLdu3fxwQcf4PDhw7h48SJiY2PxxhtvoEWLFtBoNACANm3aoE+fPhg/fjyOHDmCv/76C1OmTMGQIUNgb28PABg2bBiMjIwwduxYnD59Glu3bsWKFSskl4imT5+O6OhoLF26FOfOnUNoaCgSExMxZcoUAP8+sTVjxgx8/PHH2LFjB06dOoVRo0bB3t5e8hQXERER6Zde77FJTEyEl5eXOF0aNvz9/bF27VqcPHkSGzduRHZ2Nuzt7eHj44OPPvoISqVSXGbTpk2YMmUKevfuDQMDAwwaNAgrV64Ux9VqNf744w8EBATAzc0NVlZWCAkJkbzrpmvXrti8eTPmzp2LOXPmoGXLloiKikK7du3EmlmzZiE3NxcTJkxAdnY2unfvjujoaBgbG9fmISIiIqIqqDPvsXke8D02RE+O77Ehej7J5j02RERERJXFYENERESywWBDREREssFgQ0RERLLBYENERESywWBDREREssFgQ0RERLLBYENERESywWBDREREssFgQ0RERLLBYENERESywWBDREREssFgQ0RERLLBYENERESywWBDREREssFgQ0RERLLBYENERESywWBDREREssFgQ0RERLLBYENERESywWBDREREssFgQ0RERLLBYENERESywWBDREREssFgQ0RERLLBYENERESywWBDREREssFgQ0RERLLBYENERESywWBDREREsqHXYHPw4EH0798f9vb2UCgUiIqKEscKCwsRFBQEZ2dnmJmZwd7eHqNGjcLVq1cl63B0dIRCoZB8Fi9eLKk5efIkevToAWNjYzg4OCAsLKxML9u2bUPr1q1hbGwMZ2dn7N69WzIuCAJCQkJgZ2cHExMTeHt74/z58zV3MIiIiOiJ6TXY5ObmwsXFBeHh4WXG7t27h2PHjmHevHk4duwYfvnlF6SmpuI///lPmdqFCxfi2rVr4mfq1KnimE6ng4+PD5o2bYqkpCQsWbIEoaGhWLdunVhz6NAhDB06FGPHjsXx48fh5+cHPz8/pKSkiDVhYWFYuXIlIiIikJCQADMzM2g0GuTl5dXwUSEiIqLqUgiCIOi7CQBQKBTYvn07/Pz8Kqw5evQoXnnlFWRkZKBJkyYA/j1jM2PGDMyYMaPcZdauXYsPP/wQWq0WRkZGAIDZs2cjKioK586dAwAMHjwYubm52Llzp7hcly5d4OrqioiICAiCAHt7e7z33nt4//33AQA5OTmwsbFBZGQkhgwZUql91Ol0UKvVyMnJgUqlqtQyVTF1fVyNr5Oorln1jqe+WyAiPajs39Bn6h6bnJwcKBQKWFhYSOYvXrwYjRo1QocOHbBkyRIUFRWJY/Hx8ejZs6cYagBAo9EgNTUVt2/fFmu8vb0l69RoNIiPjwcApKenQ6vVSmrUajXc3d3FmvLk5+dDp9NJPkRERFR76um7gcrKy8tDUFAQhg4dKklq06ZNQ8eOHdGwYUMcOnQIwcHBuHbtGpYtWwYA0Gq1cHJykqzLxsZGHLO0tIRWqxXnPVij1WrFugeXK6+mPIsWLcKCBQuqucdERERUVc9EsCksLMTbb78NQRCwdu1ayVhgYKD4c/v27WFkZISJEydi0aJFUCqVT7tVieDgYEl/Op0ODg4OeuyIiIhI3ur8pajSUJORkYGYmJjH3pvi7u6OoqIiXLx4EQBga2uLzMxMSU3ptK2t7SNrHhx/cLnyasqjVCqhUqkkHyIiIqo9dTrYlIaa8+fPY+/evWjUqNFjl0lOToaBgQGsra0BAB4eHjh48CAKCwvFmpiYGLRq1QqWlpZiTWxsrGQ9MTEx8PDwAAA4OTnB1tZWUqPT6ZCQkCDWEBERkf7p9VLU3bt3kZaWJk6np6cjOTkZDRs2hJ2dHd58800cO3YMO3fuRHFxsXg/S8OGDWFkZIT4+HgkJCTAy8sL5ubmiI+Px8yZMzFixAgxtAwbNgwLFizA2LFjERQUhJSUFKxYsQJffPGFuN3p06ejV69eWLp0KXx9fbFlyxYkJiaKj4QrFArMmDEDH3/8MVq2bAknJyfMmzcP9vb2j3yKi4iIiJ4uvQabxMREeHl5idOl96P4+/sjNDQUO3bsAAC4urpKltu/fz88PT2hVCqxZcsWhIaGIj8/H05OTpg5c6bkvha1Wo0//vgDAQEBcHNzg5WVFUJCQjBhwgSxpmvXrti8eTPmzp2LOXPmoGXLloiKikK7du3EmlmzZiE3NxcTJkxAdnY2unfvjujoaBgbG9fGoSEiIqJqqDPvsXke8D02RE+O77Ehej7J8j02RERERI/CYENERESywWBDREREssFgQ0RERLLBYENERESywWBDREREssFgQ0RERLLBYENERESywWBDREREssFgQ0RERLLBYENERESywWBDREREssFgQ0RERLLBYENERESywWBDREREssFgQ0RERLLBYENERESywWBDREREssFgQ0RERLLBYENERESywWBDREREssFgQ0RERLLBYENERESywWBDREREssFgQ0RERLJRrWDTrFkz3Lp1q8z87OxsNGvW7ImbIiIiIqqOagWbixcvori4uMz8/Px8XLly5YmbIiIiIqqOelUp3rFjh/jznj17oFarxeni4mLExsbC0dGxxpojIiIiqooqBRs/Pz8AgEKhgL+/v2Ssfv36cHR0xNKlS2usOSIiIqKqqFKwKSkpAQA4OTnh6NGjsLKyqpWmiIiIiKqjWvfYpKen10ioOXjwIPr37w97e3soFApERUVJxgVBQEhICOzs7GBiYgJvb2+cP39eUpOVlYXhw4dDpVLBwsICY8eOxd27dyU1J0+eRI8ePWBsbAwHBweEhYWV6WXbtm1o3bo1jI2N4ezsjN27d1e5FyIiItKvaj/uHRsbizlz5mDcuHF45513JJ/Kys3NhYuLC8LDw8sdDwsLw8qVKxEREYGEhASYmZlBo9EgLy9PrBk+fDhOnz6NmJgY7Ny5EwcPHsSECRPEcZ1OBx8fHzRt2hRJSUlYsmQJQkNDsW7dOrHm0KFDGDp0KMaOHYvjx4/Dz88Pfn5+SElJqVIvREREpF8KQRCEqi60YMECLFy4EJ06dYKdnR0UCoVkfPv27VVvRKHA9u3bxft4BEGAvb093nvvPbz//vsAgJycHNjY2CAyMhJDhgzB2bNn0bZtWxw9ehSdOnUCAERHR6Nfv364fPky7O3tsXbtWnz44YfQarUwMjICAMyePRtRUVE4d+4cAGDw4MHIzc3Fzp07xX66dOkCV1dXREREVKqXytDpdFCr1cjJyYFKparyMXqcqevjanydRHXNqnc89d0CEelBZf+GVuuMTUREBCIjI5GQkICoqChs375d8qkJ6enp0Gq18Pb2Fuep1Wq4u7sjPj4eABAfHw8LCwsx1ACAt7c3DAwMkJCQINb07NlTDDUAoNFokJqaitu3b4s1D26ntKZ0O5XppTz5+fnQ6XSSDxEREdWeagWbgoICdO3ataZ7kdBqtQAAGxsbyXwbGxtxTKvVwtraWjJer149NGzYUFJT3joe3EZFNQ+OP66X8ixatAhqtVr8ODg4PGaviYiI6ElUK9iMGzcOmzdvruleZCc4OBg5OTni559//tF3S0RERLJWpce9S+Xl5WHdunXYu3cv2rdvj/r160vGly1b9sSN2draAgAyMzNhZ2cnzs/MzISrq6tYc/36dclyRUVFyMrKEpe3tbVFZmampKZ0+nE1D44/rpfyKJVKKJXKSu0vERERPblqnbE5efIkXF1dYWBggJSUFBw/flz8JCcn10hjTk5OsLW1RWxsrDhPp9MhISEBHh4eAAAPDw9kZ2cjKSlJrNm3bx9KSkrg7u4u1hw8eBCFhYViTUxMDFq1agVLS0ux5sHtlNaUbqcyvRAREZH+VeuMzf79+2tk43fv3kVaWpo4nZ6ejuTkZDRs2BBNmjTBjBkz8PHHH6Nly5ZwcnLCvHnzYG9vLz451aZNG/Tp0wfjx49HREQECgsLMWXKFAwZMgT29vYAgGHDhmHBggUYO3YsgoKCkJKSghUrVuCLL74Qtzt9+nT06tULS5cuha+vL7Zs2YLExETxkXCFQvHYXoiIiEj/qhVsakpiYiK8vLzE6cDAQACAv78/IiMjMWvWLOTm5mLChAnIzs5G9+7dER0dDWNjY3GZTZs2YcqUKejduzcMDAwwaNAgrFy5UhxXq9X4448/EBAQADc3N1hZWSEkJETyrpuuXbti8+bNmDt3LubMmYOWLVsiKioK7dq1E2sq0wsRERHpV7XeY+Pl5VXm3TUP2rdv3xM1JVd8jw3Rk+N7bIieT5X9G1qtMzYP3zBbWFiI5ORkpKSklPlyTCIiIqKnpVrB5sH7Ux4UGhpa5nuaiIiIiJ6Wan9XVHlGjBiB9evX1+QqiYiIiCqtRoNNfHw8b6YlIiIivanWpaiBAwdKpgVBwLVr15CYmIh58+bVSGNEREREVVWtYKNWqyXTBgYGaNWqFRYuXAgfH58aaYyIiIioqqoVbDZs2FDTfRARERE9sSd6QV9SUhLOnj0LAHj55ZfRoUOHGmmKiIiIqDqqFWyuX7+OIUOGIC4uDhYWFgCA7OxseHl5YcuWLWjcuHFN9khERERUKdV6Kmrq1Km4c+cOTp8+jaysLGRlZSElJQU6nQ7Tpk2r6R6JiIiIKqVaZ2yio6Oxd+9etGnTRpzXtm1bhIeH8+ZhIiIi0ptqnbEpKSlB/fr1y8yvX78+SkpKnrgpIiIiouqoVrB59dVXMX36dFy9elWcd+XKFcycORO9e/euseaIiIiIqqJawWb16tXQ6XRwdHRE8+bN0bx5czg5OUGn02HVqlU13SMRERFRpVTrHhsHBwccO3YMe/fuxblz5wAAbdq0gbe3d402R0RERFQVVTpjs2/fPrRt2xY6nQ4KhQKvvfYapk6diqlTp6Jz5854+eWX8d///re2eiUiIiJ6pCoFm+XLl2P8+PFQqVRlxtRqNSZOnIhly5bVWHNEREREVVGlYHPixAn06dOnwnEfHx8kJSU9cVNERERE1VGlYJOZmVnuY96l6tWrhxs3bjxxU0RERETVUaVg88ILLyAlJaXC8ZMnT8LOzu6JmyIiIiKqjioFm379+mHevHnIy8srM3b//n3Mnz8fr7/+eo01R0RERFQVVXrce+7cufjll1/w0ksvYcqUKWjVqhUA4Ny5cwgPD0dxcTE+/PDDWmmUiIiI6HGqFGxsbGxw6NAhTJ48GcHBwRAEAQCgUCig0WgQHh4OGxubWmmUiIiI6HGq/IK+pk2bYvfu3bh9+zbS0tIgCAJatmwJS0vL2uiPiIiIqNKq9eZhALC0tETnzp1rshciIiKiJ1Kt74oiIiIiqosYbIiIiEg2GGyIiIhINhhsiIiISDYYbIiIiEg26nywcXR0hEKhKPMJCAgAAHh6epYZmzRpkmQdly5dgq+vL0xNTWFtbY0PPvgARUVFkpq4uDh07NgRSqUSLVq0QGRkZJlewsPD4ejoCGNjY7i7u+PIkSO1tt9ERERUdXU+2Bw9ehTXrl0TPzExMQCAt956S6wZP368pCYsLEwcKy4uhq+vLwoKCnDo0CFs3LgRkZGRCAkJEWvS09Ph6+sLLy8vJCcnY8aMGRg3bhz27Nkj1mzduhWBgYGYP38+jh07BhcXF2g0Gly/fv0pHAUiIiKqDIVQ+vrgZ8SMGTOwc+dOnD9/HgqFAp6ennB1dcXy5cvLrf/999/x+uuv4+rVq+JbkSMiIhAUFIQbN27AyMgIQUFB2LVrl+QLPocMGYLs7GxER0cDANzd3dG5c2esXr0aAFBSUgIHBwdMnToVs2fPLnfb+fn5yM/PF6d1Oh0cHByQk5MDlUpVE4dDYur6uBpfJ1Fds+odT323QER6oNPpoFarH/s3tM6fsXlQQUEBvv/+e7zzzjtQKBTi/E2bNsHKygrt2rVDcHAw7t27J47Fx8fD2dlZ8lUPGo0GOp0Op0+fFmu8vb0l29JoNIiPjxe3m5SUJKkxMDCAt7e3WFOeRYsWQa1Wix8HB4cnOwBERET0SNV+87A+REVFITs7G6NHjxbnDRs2DE2bNoW9vT1OnjyJoKAgpKam4pdffgEAaLXaMt9fVTqt1WofWaPT6XD//n3cvn0bxcXF5dacO3euwn6Dg4MRGBgoTpeesSEiIqLa8UwFm2+++QZ9+/aFvb29OG/ChAniz87OzrCzs0Pv3r1x4cIFNG/eXB9tipRKJZRKpV57ICIiep48M5eiMjIysHfvXowbN+6Rde7u7gCAtLQ0AICtrS0yMzMlNaXTtra2j6xRqVQwMTGBlZUVDA0Ny60pXQcRERHp3zMTbDZs2ABra2v4+vo+si45ORkAYGdnBwDw8PDAqVOnJE8vxcTEQKVSoW3btmJNbGysZD0xMTHw8PAAABgZGcHNzU1SU1JSgtjYWLGGiIiI9O+ZCDYlJSXYsGED/P39Ua/e/109u3DhAj766CMkJSXh4sWL2LFjB0aNGoWePXuiffv2AAAfHx+0bdsWI0eOxIkTJ7Bnzx7MnTsXAQEB4mWiSZMm4e+//8asWbNw7tw5rFmzBj/++CNmzpwpbiswMBBfffUVNm7ciLNnz2Ly5MnIzc3FmDFjnu7BICIiogo9E/fY7N27F5cuXcI777wjmW9kZIS9e/di+fLlyM3NhYODAwYNGoS5c+eKNYaGhti5cycmT54MDw8PmJmZwd/fHwsXLhRrnJycsGvXLsycORMrVqzAiy++iK+//hoajUasGTx4MG7cuIGQkBBotVq4uroiOjq6zA3FREREpD/P3HtsnmWVfQa/uvgeG3oe8D02RM8nWb7HhoiIiOhRGGyIiIhINhhsiIiISDYYbIiIiEg2GGyIiIhINhhsiIiISDYYbIiIiEg2GGyIiIhINhhsiIiISDYYbIiIiEg2GGyIiIhINhhsiIiISDYYbIiIiEg2GGyIiIhINhhsiIiISDYYbIiIiEg2GGyIiIhINhhsiIiISDYYbIiIiEg2GGyIiIhINhhsiIiISDYYbIiIiEg2GGyIiIhINhhsiIiISDYYbIiIiEg2GGyIiIhINhhsiIiISDYYbIiIiEg2GGyIiIhINup0sAkNDYVCoZB8WrduLY7n5eUhICAAjRo1QoMGDTBo0CBkZmZK1nHp0iX4+vrC1NQU1tbW+OCDD1BUVCSpiYuLQ8eOHaFUKtGiRQtERkaW6SU8PByOjo4wNjaGu7s7jhw5Uiv7TERERNVXp4MNALz88su4du2a+Pnzzz/FsZkzZ+K3337Dtm3bcODAAVy9ehUDBw4Ux4uLi+Hr64uCggIcOnQIGzduRGRkJEJCQsSa9PR0+Pr6wsvLC8nJyZgxYwbGjRuHPXv2iDVbt25FYGAg5s+fj2PHjsHFxQUajQbXr19/OgeBiIiIKkUhCIKg7yYqEhoaiqioKCQnJ5cZy8nJQePGjbF582a8+eabAIBz586hTZs2iI+PR5cuXfD777/j9ddfx9WrV2FjYwMAiIiIQFBQEG7cuAEjIyMEBQVh165dSElJEdc9ZMgQZGdnIzo6GgDg7u6Ozp07Y/Xq1QCAkpISODg4YOrUqZg9e3al90en00GtViMnJwcqlaq6h6VCU9fH1fg6ieqaVe946rsFItKDyv4NrfNnbM6fPw97e3s0a9YMw4cPx6VLlwAASUlJKCwshLe3t1jbunVrNGnSBPHx8QCA+Ph4ODs7i6EGADQaDXQ6HU6fPi3WPLiO0prSdRQUFCApKUlSY2BgAG9vb7GmIvn5+dDpdJIPERER1Z46HWzc3d0RGRmJ6OhorF27Funp6ejRowfu3LkDrVYLIyMjWFhYSJaxsbGBVqsFAGi1WkmoKR0vHXtUjU6nw/3793Hz5k0UFxeXW1O6joosWrQIarVa/Dg4OFT5GBAREVHl1dN3A4/St29f8ef27dvD3d0dTZs2xY8//ggTExM9dlY5wcHBCAwMFKd1Oh3DDRERUS2q02dsHmZhYYGXXnoJaWlpsLW1RUFBAbKzsyU1mZmZsLW1BQDY2tqWeUqqdPpxNSqVCiYmJrCysoKhoWG5NaXrqIhSqYRKpZJ8iIiIqPY8U8Hm7t27uHDhAuzs7ODm5ob69esjNjZWHE9NTcWlS5fg4eEBAPDw8MCpU6ckTy/FxMRApVKhbdu2Ys2D6yitKV2HkZER3NzcJDUlJSWIjY0Va4iIiKhuqNPB5v3338eBAwdw8eJFHDp0CAMGDIChoSGGDh0KtVqNsWPHIjAwEPv370dSUhLGjBkDDw8PdOnSBQDg4+ODtm3bYuTIkThx4gT27NmDuXPnIiAgAEqlEgAwadIk/P3335g1axbOnTuHNWvW4Mcff8TMmTPFPgIDA/HVV19h48aNOHv2LCZPnozc3FyMGTNGL8eFiIiIylen77G5fPkyhg4dilu3bqFx48bo3r07Dh8+jMaNGwMAvvjiCxgYGGDQoEHIz8+HRqPBmjVrxOUNDQ2xc+dOTJ48GR4eHjAzM4O/vz8WLlwo1jg5OWHXrl2YOXMmVqxYgRdffBFff/01NBqNWDN48GDcuHEDISEh0Gq1cHV1RXR0dJkbiomIiEi/6vR7bOSG77EhenJ8jw3R80k277EhIiIiqiwGGyIiIpINBhsiIiKSDQYbIiIikg0GGyIiIpINBhsiIiKSDQYbIiIikg0GGyIiIpINBhsiIiKSDQYbIiIikg0GGyIiIpINBhsiIiKSDQYbIiIikg0GGyIiIpINBhsiIiKSDQYbIiIikg0GGyIiIpINBhsiIiKSDQYbIiIikg0GGyIiIpINBhsiIiKSDQYbIiIikg0GGyIiIpINBhsiIiKSDQYbIiIikg0GGyIiIpINBhsiIiKSDQYbIiIikg0GGyIiIpINBhsiIiKSjTodbBYtWoTOnTvD3Nwc1tbW8PPzQ2pqqqTG09MTCoVC8pk0aZKk5tKlS/D19YWpqSmsra3xwQcfoKioSFITFxeHjh07QqlUokWLFoiMjCzTT3h4OBwdHWFsbAx3d3ccOXKkxveZiIiIqq9OB5sDBw4gICAAhw8fRkxMDAoLC+Hj44Pc3FxJ3fjx43Ht2jXxExYWJo4VFxfD19cXBQUFOHToEDZu3IjIyEiEhISINenp6fD19YWXlxeSk5MxY8YMjBs3Dnv27BFrtm7disDAQMyfPx/Hjh2Di4sLNBoNrl+/XvsHgoiIiCpFIQiCoO8mKuvGjRuwtrbGgQMH0LNnTwD/nrFxdXXF8uXLy13m999/x+uvv46rV6/CxsYGABAREYGgoCDcuHEDRkZGCAoKwq5du5CSkiIuN2TIEGRnZyM6OhoA4O7ujs6dO2P16tUAgJKSEjg4OGDq1KmYPXt2pfrX6XRQq9XIycmBSqWq7mGo0NT1cTW+TqK6ZtU7nvpugeqokSNHok2bNpgzZ46+W6kVZ86cgY+PD1JTU2FmZqbvdp66yv4NrdNnbB6Wk5MDAGjYsKFk/qZNm2BlZYV27dohODgY9+7dE8fi4+Ph7OwshhoA0Gg00Ol0OH36tFjj7e0tWadGo0F8fDwAoKCgAElJSZIaAwMDeHt7izXlyc/Ph06nk3yIiOqaylz2z8vLQ0BAABo1aoQGDRpg0KBByMzMlNRU5rJ/eHg42rRpAxMTE7Rq1QrffvttmX6WL1+OVq1awcTEBA4ODpg5cyby8vIeuQ8nTpzA7t27MW3aNHHeL7/8Ah8fHzRq1AgKhQLJyclllqvMfj1MEASEhITAzs4OJiYm8Pb2xvnz5yU1jo6OZW6TWLx4sTh+8eJF9OzZE2ZmZujZsycuXrwoWf7111/Hzz//LJnXtm1bdOnSBcuWLXtkf8+7ZybYlJSUYMaMGejWrRvatWsnzh82bBi+//577N+/H8HBwfjuu+8wYsQIcVyr1UpCDQBxWqvVPrJGp9Ph/v37uHnzJoqLi8utKV1HeRYtWgS1Wi1+HBwcqrfzRES1qDKX/WfOnInffvsN27Ztw4EDB3D16lUMHDhQHK/MZf+1a9ciODgYoaGhOH36NBYsWICAgAD89ttvYs3mzZsxe/ZszJ8/H2fPnsU333yDrVu3PvYszKpVq/DWW2+hQYMG4rzc3Fx0794dn332WYXLPW6/yhMWFoaVK1ciIiICCQkJMDMzg0ajKRO+Fi5cKLlNYurUqeLYe++9hxdeeAHJycmws7PD+++/L45t3boVBgYGGDRoUJltjxkzBmvXri0TGOn/1NN3A5UVEBCAlJQU/Pnnn5L5EyZMEH92dnaGnZ0devfujQsXLqB58+ZPu02J4OBgBAYGitM6nY7hhojqnNJL7qUiIyNhbW2NpKQk9OzZEzk5Ofjmm2+wefNmvPrqqwCADRs2oE2bNjh8+DC6dOmCP/74A2fOnMHevXthY2MDV1dXfPTRRwgKCkJoaCiMjIzw3XffYeLEiRg8eDAAoFmzZjh69Cg+++wz9O/fHwBw6NAhdOvWDcOGDQPw75mPoUOHIiEhocL+i4uL8dNPP2HTpk2S+SNHjgSAMmdDSlVmvx4mCAKWL1+OuXPn4o033gAAfPvtt7CxsUFUVBSGDBki1pqbm8PW1rbcbZ89exbLli1Dy5YtMXr0aDHYZGdnY+7cudi3b1+5y7322mvIysrCgQMH0Lt37wqPyfPsmThjM2XKFOzcuRP79+/Hiy+++Mhad3d3AEBaWhoAwNbWtsxpxdLp0l+4impUKhVMTExgZWUFQ0PDcmsq+qUFAKVSCZVKJfkQEdV1D1/2T0pKQmFhoeRyfOvWrdGkSRPxcnxlLvvn5+fD2NhYsi0TExMcOXIEhYWFAICuXbsiKSlJfOr077//xu7du9GvX78K+z158iRycnLQqVOnKu1nZfbrYenp6dBqtZJl1Go13N3dyyyzePFiNGrUCB06dMCSJUskZ1lcXFywd+9elJSU4I8//kD79u0BAB988AECAgIq/EewkZERXF1d8d///rdK+/o8qdPBRhAETJkyBdu3b8e+ffvg5OT02GVKr6Ha2dkBADw8PHDq1CnJ00sxMTFQqVRo27atWBMbGytZT0xMDDw8PAD8+4vk5uYmqSkpKUFsbKxYQ0QkB+Vd9tdqtTAyMoKFhYWk9sHL8ZW57K/RaPD1118jKSkJgiAgMTERX3/9NQoLC3Hz5k0A/95esHDhQnTv3h3169dH8+bN4enp+chLURkZGTA0NIS1tXWV9rUy+1XeMg/uW0XLTJs2DVu2bMH+/fsxceJEfPrpp5g1a5Y4/vnnn+PcuXNwdHTE+fPn8fnnn+PgwYNITk7GqFGj8Pbbb6NZs2aYNGkSCgoKJNuyt7dHRkZGlfb1eVKnL0UFBARg8+bN+PXXX2Fubi7+0qjVapiYmODChQvYvHkz+vXrh0aNGuHkyZOYOXMmevbsKaZfHx8ftG3bFiNHjkRYWBi0Wi3mzp2LgIAAKJVKAMCkSZOwevVqzJo1C++88w727duHH3/8Ebt27RJ7CQwMhL+/Pzp16oRXXnkFy5cvR25uLsaMGfP0DwwRUS2p6LJ/TZg3bx60Wi26dOkCQRBgY2MDf39/hIWFwcDg339nx8XF4dNPP8WaNWvg7u6OtLQ0TJ8+HR999BHmzZtX7nrv378PpVIJhUJR4z1X14O3IbRv3x5GRkaYOHEiFi1aBKVSiRdeeAE7d+4Ua/Lz86HRaLBx40Z8/PHHMDc3R2pqKvr06YMvv/xScn+OiYmJ5CEZkqrTZ2zWrl2LnJwceHp6ws7OTvxs3boVwL9nUvbu3QsfHx+0bt0a7733HgYNGiS5Ec3Q0BA7d+6EoaEhPDw8MGLECIwaNQoLFy4Ua5ycnLBr1y7ExMTAxcUFS5cuxddffw2NRiPWDB48GJ9//jlCQkLg6uqK5ORkREdHl0ntRETPqoou+9va2qKgoADZ2dmS+gcvx1fmsr+JiQnWr1+Pe/fu4eLFi7h06RIcHR1hbm6Oxo0bA/g3/IwcORLjxo2Ds7MzBgwYgE8//RSLFi1CSUlJuX1bWVnh3r17Zc5sPE5l9qu8ZR7ct8osA/x7m0RRUVGF9/t8+umn8PHxgZubG+Li4jBo0CDUr18fAwcORFxcnKQ2KytLPF5UVp0+Y/O4V+w4ODjgwIEDj11P06ZNsXv37kfWeHp64vjx44+smTJlCqZMmfLY7RERPUsEQcDUqVOxfft2xMXFlbns7+bmhvr16yM2NlZ8Uic1NRWXLl0SL8d7eHjgk08+wfXr18VLQg9f9i9Vv359MTht2bIFr7/+unjG5t69e+LPpQwNDcU+y+Pq6grg3/e8lP5cGZXZr4c5OTnB1tYWsbGx4rZ0Oh0SEhIwefLkCreVnJwMAwODci+XnT17Fps3bxZvpSguLhbvOSosLERxcbGkPiUlBW+++Wal9/N5U6eDDRER1b7HXfZXq9UYO3YsAgMD0bBhQ6hUKkydOhUeHh7ik0OVuez/v//9D0eOHIG7uztu376NZcuWISUlBRs3bhR76d+/P5YtW4YOHTqIl6LmzZuH/v37iwHnYY0bN0bHjh3x559/SoJNVlYWLl26hKtXrwKA+G4eW1tb2NraVmq/gH9vKF60aBEGDBgAhUKBGTNm4OOPP0bLli3h5OSEefPmwd7eHn5+fgD+vZE6ISEBXl5eMDc3R3x8PGbOnIkRI0bA0tJS0rsgCJgwYQK++OIL8aV73bp1w1dffYWXXnoJ3377LYYOHSrWX7x4EVeuXCnz7jX6P8/Um4efdXzzMNGTe5bfPPzqu4v03UK59q8t/8bc1l6DYNfaDQBQXFSIC4d2IzPtJEqKi9DQoSVe6vkGlKbmYn3endtIPfgrsq+mw7Befdi26ohmXTQwMPg3kOTevo4ze7fiXvZNKAwMYGnfDM279IGp5f9dVikpKUZGUhwy/3cc+bk61Dcxg1XT1nBy90F9pUmF+3Al5TC0/zsOt4H/d9bk2rkknNv/c5lax06vwqmzd6X3a//aOZJjIQgC0o/uxbUzR1FUkAe1bVO81PMNmFpYAQDu3LiC/x3cgXvZN1BSXARjlSVsX+oAB5fuMDCUnk+4cjoBty+noZ1muDiv4N5dnNm7Fbrrl9GwSUu08XoThvWNAAAZx+KQfTUdLq/X3fs7960JrpX1VvZvKIPNU8RgQ/TkGGyoPMVFhUj4YRlefm0o1LZN9N1OrSgpLsLhzcvQ1nswLOya6rudCuk72NTpm4eJiIgqw7BefbR59S0U5uU+vvgZlXc3B0079qrToaYu4D02REQkC5YvNNN3C7XKVN0IpupG+m6jzuMZGyIiIpINBhsiIiKSDQYbIiIikg0GGyIiIpINBhsiIiKSDQYbIiIikg0GGyIiIpINBhsiIiKSDQYbIiIikg0GGyIiIpINBhsiIiKSDQYbIiIikg0GGyIiIpINBhsiIiKSDQYbIiIikg0GGyIiIpINBhsiIiKSDQYbIiIikg0GGyIiIpINBhsiIiKSDQYbIiIikg0GGyIiIpINBhsiIiKSDQYbIiIikg0GGyIiIpINBpsqCg8Ph6OjI4yNjeHu7o4jR47ouyUiIiL6/xhsqmDr1q0IDAzE/PnzcezYMbi4uECj0eD69ev6bo2IiIjAYFMly5Ytw/jx4zFmzBi0bdsWERERMDU1xfr16/XdGhEREQGop+8GnhUFBQVISkpCcHCwOM/AwADe3t6Ij48vd5n8/Hzk5+eL0zk5OQAAnU5XOz3ez62V9RLVJbX138/TUFSQp+8WiGpdbf03WrpeQRAeWcdgU0k3b95EcXExbGxsJPNtbGxw7ty5cpdZtGgRFixYUGa+g4NDrfRI9DxYN0XfHRDRo6i/WVir679z5w7UanWF4ww2tSg4OBiBgYHidElJCbKystCoUSMoFAo9dkY1QafTwcHBAf/88w9UKpW+2yGih/C/UXkRBAF37tyBvb39I+sYbCrJysoKhoaGyMzMlMzPzMyEra1tucsolUoolUrJPAsLi9pqkfREpVLx/zSJ6jD+NyofjzpTU4o3D1eSkZER3NzcEBsbK84rKSlBbGwsPDw89NgZERERleIZmyoIDAyEv78/OnXqhFdeeQXLly9Hbm4uxowZo+/WiIiICAw2VTJ48GDcuHEDISEh0Gq1cHV1RXR0dJkbiun5oFQqMX/+/DKXG4mobuB/o88nhfC456aIiIiInhG8x4aIiIhkg8GGiIiIZIPBhoiIiGSDwYaIiIhkg8GGqJrCw8Ph6OgIY2NjuLu748iRI/puiYgAHDx4EP3794e9vT0UCgWioqL03RI9RQw2RNWwdetWBAYGYv78+Th27BhcXFyg0Whw/fp1fbdG9NzLzc2Fi4sLwsPD9d0K6QEf9yaqBnd3d3Tu3BmrV68G8O9bqB0cHDB16lTMnj1bz90RUSmFQoHt27fDz89P363QU8IzNkRVVFBQgKSkJHh7e4vzDAwM4O3tjfj4eD12RkREDDZEVXTz5k0UFxeXeeO0jY0NtFqtnroiIiKAwYaIiIhkhMGGqIqsrKxgaGiIzMxMyfzMzEzY2trqqSsiIgIYbIiqzMjICG5uboiNjRXnlZSUIDY2Fh4eHnrsjIiI+O3eRNUQGBgIf39/dOrUCa+88gqWL1+O3NxcjBkzRt+tET337t69i7S0NHE6PT0dycnJaNiwIZo0aaLHzuhp4OPeRNW0evVqLFmyBFqtFq6urli5ciXc3d313RbRcy8uLg5eXl5l5vv7+yMyMvLpN0RPFYMNERERyQbvsSEiIiLZYLAhIiIi2WCwISIiItlgsCEiIiLZYLAhIiIi2WCwISIiItlgsCEiIiLZYLAhIiIi2WCwIaLnikKhQFRUlL7bIKJawmBDRLKi1WoxdepUNGvWDEqlEg4ODujfv7/kS0uJSL74JZhEJBsXL15Et27dYGFhgSVLlsDZ2RmFhYXYs2cPAgICcO7cOX23SES1jGdsiEg23n33XSgUChw5cgSDBg3CSy+9hJdffhmBgYE4fPhwucsEBQXhpZdegqmpKZo1a4Z58+ahsLBQHD9x4gS8vLxgbm4OlUoFNzc3JCYmAgAyMjLQv39/WFpawszMDC+//DJ27979VPaViMrHMzZEJAtZWVmIjo7GJ598AjMzszLjFhYW5S5nbm6OyMhI2Nvb49SpUxg/fjzMzc0xa9YsAMDw4cPRoUMHrF27FoaGhkhOTkb9+vUBAAEBASgoKMDBgwdhZmaGM2fOoEGDBrW2j0T0eAw2RCQLaWlpEAQBrVu3rtJyc+fOFX92dHTE+++/jy1btojB5tKlS/jggw/E9bZs2VKsv3TpEgYNGgRnZ2cAQLNmzZ50N4joCfFSFBHJgiAI1Vpu69at6NatG2xtbdGgQQPMnTsXly5dEscDAwMxbtw4eHt7Y/Hixbhw4YI4Nm3aNHz88cfo1q0b5s+fj5MnTz7xfhDRk2GwISJZaNmyJRQKRZVuEI6Pj8fw4cPRr18/7Ny5E8ePH8eHH36IgoICsSY0NBSnT5+Gr68v9u3bh7Zt22L79u0AgHHjxuHvv//GyJEjcerUKXTq1AmrVq2q8X0jospTCNX9Zw4RUR3Tt29fnDp1CqmpqWXus8nOzoaFhQUUCgW2b98OPz8/LF26FGvWrJGchRk3bhx++uknZGdnl7uNoUOHIjc3Fzt27CgzFhwcjF27dvHMDZEe8YwNEclGeHg4iouL8corr+Dnn3/G+fPncfbsWaxcuRIeHh5l6lu2bIlLly5hy5YtuHDhAlauXCmejQGA+/fvY8qUKYiLi0NGRgb++usvHD16FG3atAEAzJgxA3v27EF6ejqOHTuG/fv3i2NEpB+8eZiIZKNZs2Y4duwYPvnkE7z33nu4du0aGjduDDc3N6xdu7ZM/X/+8x/MnDkTU6ZMQX5+Pnx9fTFv3jyEhoYCAAwNDXHr1i2MGjUKmZmZsLKywsCBA7FgwQIAQHFxMQICAnD58mWoVCr06dMHX3zxxdPcZSJ6CC9FERERkWzwUhQRERHJBoMNERERyQaDDREREckGgw0RERHJBoMNERERyQaDDREREckGgw0RERHJBoMNERERyQaDDREREckGgw0RERHJBoMNERERycb/A9b6de/jBqv+AAAAAElFTkSuQmCC\n"},"metadata":{}}],"execution_count":11},{"cell_type":"code","source":"def evaluate_binary_classifier(\n    y_true, y_pred, y_proba, model_name, model=None,\n    label_names=('Class 0', 'Class 1'), title_suffix='(Validation Set)', \n    save_results=True\n):\n    \"\"\"\n    二分类评估工具函数：打印Accuracy/AUC/分类报告，并绘制混淆矩阵\n    :param y_true: 真实标签（验证集）\n    :param y_pred: 预测标签（由模型predict得到）\n    :param y_proba: 正类概率（由模型predict_proba得到的[:,1]）\n    :param model: 训练好的模型\n    :param model_name: 模型名称（用于文件夹命名和保存模型）\n    :param label_names: 混淆矩阵坐标轴的类别名称\n    :param title_suffix: 图标题后缀，便于标注是验证集/测试集\n    :param save_results: 是否保存评估结果、混淆矩阵图和模型，默认保存\n    :return: 指标字典\n    \"\"\"\n    \n    # 创建保存目录\n    save_dir = f\"/kaggle/working/{model_name}_evaluation\"\n    if save_results and not os.path.exists(save_dir):\n        os.makedirs(save_dir)\n\n    # 评估指标计算\n    acc = accuracy_score(y_true, y_pred)\n    auc = roc_auc_score(y_true, y_proba)\n    \n    # 打印数值指标\n    print(f\"\\n=== {title_suffix} ===\")\n    print(f\"Accuracy: {acc:.4f}\")\n    print(f\"AUC: {auc:.4f}\")\n    print(\"\\n分类报告:\")\n    print(classification_report(y_true, y_pred))\n    \n    # 如果需要保存评估结果\n    if save_results:\n        # 保存评估指标到txt文件\n        with open(f\"{save_dir}/{title_suffix}_evaluation_results.txt\", \"w\") as f:\n            f.write(f\"=== {title_suffix} ===\\n\")\n            f.write(f\"Accuracy: {acc:.4f}\\n\")\n            f.write(f\"AUC: {auc:.4f}\\n\")\n            f.write(\"\\n分类报告:\\n\")\n            f.write(classification_report(y_true, y_pred))\n        \n        # 保存混淆矩阵图\n        plt.figure(figsize=(8, 6))\n        cm = confusion_matrix(y_true, y_pred)\n        sns.heatmap(\n            cm, annot=True, fmt='d', cmap='Blues',\n            xticklabels=label_names,\n            yticklabels=label_names\n        )\n        plt.title(f'Confusion Matrix {title_suffix}')\n        plt.ylabel('True Label')\n        plt.xlabel('Predicted Label')\n        plt.savefig(f\"{save_dir}/{title_suffix}_confusion_matrix.png\")\n        plt.close()\n\n        #保存标签比例\n        result = y_pred.astype(int)\n        plot_class_distribution(result, f\"{model_name} {title_suffix} Target Distribution\",save_dir)\n\n        if model is not None:\n        # 保存模型\n            joblib.dump(model, f\"{save_dir}/model_{title_suffix}.joblib\")\n    \n    # 总是展示混淆矩阵\n    plt.figure(figsize=(8, 6))\n    cm = confusion_matrix(y_true, y_pred)\n    sns.heatmap(\n        cm, annot=True, fmt='d', cmap='Blues',\n        xticklabels=label_names,\n        yticklabels=label_names\n    )\n    plt.title(f'Confusion Matrix {title_suffix}')\n    plt.ylabel('True Label')\n    plt.xlabel('Predicted Label')\n    plt.show()","metadata":{"trusted":true,"execution":{"iopub.status.busy":"2025-09-19T13:49:16.101690Z","iopub.execute_input":"2025-09-19T13:49:16.101917Z","iopub.status.idle":"2025-09-19T13:49:16.110189Z","shell.execute_reply.started":"2025-09-19T13:49:16.101901Z","shell.execute_reply":"2025-09-19T13:49:16.109544Z"}},"outputs":[],"execution_count":12},{"cell_type":"code","source":"def plot_feature_importance(\n    model, \n    feature_names=None, \n    top_n=20, \n    importance_type='gain', \n    title='Top Feature Importance',\n    save_path=None\n):\n    \"\"\"\n    通用特征重要性可视化函数（支持XGBoost、CatBoost和LightGBM）\n    :param model: 训练好的 XGBClassifier / CatBoostClassifier / LGBMClassifier\n    :param feature_names: 特征名列表（默认自动生成）\n    :param top_n: 展示前N个特征\n    :param importance_type: \n        - XGBoost: 'weight'/'gain'/'cover'\n        - LightGBM: 'split'/'gain'\n    :param title: 图标题\n    :param save_path: 保存路径（如 'feature_importance.png'），为 None 时不保存\n    \"\"\"\n\n    # ===== XGBoost =====\n    if hasattr(model, \"get_booster\"):  \n        plt.figure(figsize=(12, 8))\n        xgb.plot_importance(model, max_num_features=top_n, importance_type=importance_type)\n        plt.title(title)\n\n    # ===== CatBoost =====\n    elif hasattr(model, \"get_feature_importance\"):  \n        importances = model.get_feature_importance()\n        if feature_names is None:\n            feature_names = [f'feat_{i}' for i in range(len(importances))]\n        idx = np.argsort(importances)[::-1][:top_n]\n        plt.figure(figsize=(12, 8))\n        plt.barh([feature_names[i] for i in idx][::-1], np.array(importances)[idx][::-1])\n        plt.title(title)\n        plt.xlabel('Importance')\n        plt.ylabel('Feature')\n        plt.tight_layout()\n\n    # ===== LightGBM =====\n    elif hasattr(model, \"feature_importances_\"):  \n        importances = model.feature_importances_\n        if feature_names is None:\n            feature_names = [f'feat_{i}' for i in range(len(importances))]\n        idx = np.argsort(importances)[::-1][:top_n]\n        plt.figure(figsize=(12, 8))\n        plt.barh([feature_names[i] for i in idx][::-1], np.array(importances)[idx][::-1])\n        plt.title(title)\n        plt.xlabel('Importance')\n        plt.ylabel('Feature')\n        plt.tight_layout()\n\n    else:\n        raise ValueError(\"Unsupported model type: only XGBoost, CatBoost and LightGBM are supported.\")\n\n    # 保存或显示\n    if save_path:\n        save_dir = os.path.dirname(save_path)\n        if save_dir and not os.path.exists(save_dir):\n            os.makedirs(save_dir)\n        plt.savefig(save_path, dpi=300, bbox_inches='tight')\n        print(f\"特征重要性图已保存到: {save_path}\")\n        plt.show()\n        plt.close()\n        \n    else:\n        plt.show()","metadata":{"trusted":true,"execution":{"iopub.status.busy":"2025-09-19T13:49:16.110755Z","iopub.execute_input":"2025-09-19T13:49:16.110985Z","iopub.status.idle":"2025-09-19T13:49:16.128427Z","shell.execute_reply.started":"2025-09-19T13:49:16.110960Z","shell.execute_reply":"2025-09-19T13:49:16.127706Z"}},"outputs":[],"execution_count":13},{"cell_type":"markdown","source":"# 单独训练特征集成","metadata":{}},{"cell_type":"markdown","source":"由于通过相关性计算发现每一列直接的相关性都极其低，所以我们采用了单独训练每个特征最后进行集成的策略，使得每个特征得到充分运用，同时避免伪交互的发生，在第二种编码的基础上在public上得到了0.92219的评分","metadata":{}},{"cell_type":"code","source":"params = {\n    'boost_from_average':'false',\n    'boost': 'gbdt',\n    'feature_fraction': 1,\n    'learning_rate': 0.08,\n    'max_depth': -1,\n    'metric':'binary_logloss',\n    'num_leaves': 4,\n    'num_threads': 8,\n    'tree_learner': 'serial',\n    'objective': 'binary',\n    'reg_alpha': 2,\n    'reg_lambda': 0,\n    'verbosity': 1,\n    'max_bin':256,\n}\n\n# reg_alpha\nreg_alpha_values = [0.75, 1, 2, 3]\nreg_alpha_var = [3, 0, 2, 3, 2, 0, 1, 1, 3, 2, 2, 0, 2, 0, 2, 2, 2, 1, 1, 2, 1, 2, 3, 3, 2, 1, 3, 1, 3, 2, 2, 3, 1, 1, 3, 2, 0, 1, 0, 2, 1, 1, 2, 3, 0, 3, 3, 3, 2, 0, 3, 1, 3, 1, 1, 0, 2, 2, 0, 0, 0, 1, 2, 1, 0, 1, 3, 2, 0, 2, 1, 2, 0, 0, 1, 3, 3, 1, 2, 3, 3, 2, 0, 1, 2, 3, 3, 2, 3, 3, 0, 0, 3, 0, 1, 0, 1, 0, 2, 3, 1, 0, 3, 1, 3, 2, 3, 1, 3, 3, 3, 1, 3, 2, 3, 2, 1, 0, 1, 2, 0, 3, 0, 3, 0, 3, 2, 1, 0, 0, 2, 2, 2, 0, 1, 0, 0, 2, 3, 2, 2, 1, 1, 0, 1, 2, 2, 2, 1, 0, 2, 3, 2, 3, 1, 1, 3, 1, 1, 2, 1, 2, 0, 3, 1, 3, 3, 2, 0, 1, 3, 3, 0, 1, 0, 3, 1, 3, 1, 3, 0, 3, 0, 3, 1, 0, 0, 0, 3, 0, 3, 0, 0, 2, 0, 3, 1, 0, 3, 2]\n\n# max_bin\nmax_bin_values = [256, 512, 1024]\nmax_bin_var = [0, 0, 1, 0, 0, 0, 2, 0, 0, 2, 0, 2, 0, 0, 1, 1, 1, 0, 0, 0, 0, 2, 2, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 2, 0, 1, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1, 0, 0, 0, 2, 1, 1, 1, 1, 0, 0, 0, 0, 1, 2, 1, 0, 0, 1, 2, 0, 1, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 2, 1, 0, 0, 0, 2, 0, 1, 1, 0, 1, 0, 0, 1, 2, 1, 2, 1, 0, 0, 1, 0, 2, 0, 1, 0, 0, 2, 1, 1, 1, 0, 0, 0, 2, 0, 0, 2, 1, 0, 0, 1, 0, 1, 2, 0, 0, 0, 0, 0, 2, 2, 2, 2, 1, 1, 2, 1, 1, 0, 0, 1, 0, 0, 1, 1, 0, 1, 0, 2, 0, 1, 0, 1, 1, 0, 2, 1, 1, 1, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 2, 0, 1, 0, 1, 2, 0, 0, 0, 0, 2, 0, 0, 2, 0, 1, 1, 0, 2, 0, 0, 0, 1, 2, 0, 0, 1, 0, 2]\n\n# learning_rate\nlearning_rate_values = [0.06, 0.08, 0.12]\nlearning_rate_var = [2, 2, 2, 1, 2, 2, 2, 0, 1, 2, 0, 2, 2, 2, 0, 2, 2, 0, 2, 1, 2, 2, 2, 2, 2, 0, 1, 0, 2, 0, 0, 2, 0, 2, 2, 2, 1, 2, 0, 0, 2, 0, 0, 1, 2, 1, 2, 0, 0, 2, 1, 2, 2, 2, 2, 0, 0, 2, 1, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 0, 2, 2, 2, 0, 2, 2, 1, 0, 1, 0, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 1, 1, 1, 2, 0, 2, 0, 2, 0, 2, 1, 0, 0, 1, 2, 0, 2, 2, 2, 0, 2, 2, 2, 2, 1, 0, 2, 1, 2, 2, 1, 2, 0, 2, 0, 2, 2, 2, 2, 2, 2, 1, 2, 1, 0, 2, 1, 1, 2, 2, 2, 2, 0, 2, 1, 2, 1, 2, 2, 2, 2, 2, 2, 2, 1, 1, 0, 1, 2, 0, 2, 2, 0, 1, 2, 2, 2, 1, 0, 1, 2, 1, 2, 1, 1, 1, 2, 1, 2, 1, 0, 0, 2, 0, 1, 2, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1]\n\n# num_leaves\nnum_leaves_values = [3, 4, 5]\nnum_leaves_var = [1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 2, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 2, 2, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 2, 1, 2, 0, 0, 0, 0, 0, 1, 1, 0, 0, 2, 1, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 2, 0, 0, 0, 0, 1, 0, 1, 2, 1, 1, 1, 0, 2, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 1, 2, 0, 1, 0, 2, 2, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 0, 1, 2, 0, 0, 0, 1, 0, 1, 1, 0, 1, 0, 1, 0, 2, 0, 0, 0, 0, 1, 0, 0, 2, 1, 0, 1, 2, 1, 1, 0, 0, 0, 2, 1, 2]","metadata":{"trusted":true,"execution":{"iopub.status.busy":"2025-09-19T13:49:16.181172Z","iopub.execute_input":"2025-09-19T13:49:16.181380Z","iopub.status.idle":"2025-09-19T13:49:16.199099Z","shell.execute_reply.started":"2025-09-19T13:49:16.181362Z","shell.execute_reply":"2025-09-19T13:49:16.198567Z"}},"outputs":[],"execution_count":14},{"cell_type":"code","source":"n_folds = 5\nearly_stopping_rounds=10\nsettings = [4]\nnp.random.seed(47)\n\nsettings_best_ind = []\nfeatures_used = [features, features_count]\n\ndef train_trees():\n    preds_oof = np.zeros((len(X_train), len(features)))\n    preds_test = np.zeros((len(X_test), len(features)))\n    preds_train = np.zeros((len(X_train), len(features)))\n    preds_fake = np.zeros((len(X_fake), len(features)))\n\n    features_used_flatten = [var for sublist in features_used for var in sublist]\n    X_train_used = X_train[features_used_flatten]\n    X_test_used = X_test[features_used_flatten]\n    X_fake_used = X_fake[features_used_flatten]\n\n    for i in range(len(features)):\n        params['max_bin'] = max_bin_values[max_bin_var[i]]\n        params['learning_rate'] = learning_rate_values[learning_rate_var[i]]\n        params['reg_alpha'] = reg_alpha_values[reg_alpha_var[i]]\n        params['num_leaves'] = num_leaves_values[num_leaves_var[i]]\n        features_train = [feature_set[i] for feature_set in features_used] \n        print(f'Training on: {features_train}')\n        folds = StratifiedKFold(n_splits=n_folds, shuffle=True, random_state=np.random.randint(100000))\n        list_folds = list(folds.split(X_train_used.values, target_train.values))\n        preds_oof_temp = np.zeros((preds_oof.shape[0], len(settings)))\n        preds_test_temp = np.zeros((preds_test.shape[0], len(settings)))\n        preds_train_temp = np.zeros((preds_train.shape[0], len(settings)))\n        preds_fake_temp = np.zeros((preds_fake.shape[0], len(settings)))\n\n        scores = []\n        for j, setting in enumerate(settings):\n            # setting is used for hyperparameter tuning, here you can add sometinh like params['num_leaves'] = setting\n            print('\\nsetting: ', setting)\n            for k, (trn_idx, val_idx) in enumerate(list_folds):\n                print(\"Fold: {}\".format(k+1), end=\"\")\n                trn_data = lgb.Dataset(X_train_used.iloc[trn_idx][features_train], label=target_train.iloc[trn_idx])\n                val_data = lgb.Dataset(X_train_used.iloc[val_idx][features_train], label=target_train.iloc[val_idx])\n\n                callbacks = [\n                    lgb.early_stopping(stopping_rounds=early_stopping_rounds, verbose=False),  # 替代 early_stopping_rounds\n                    lgb.log_evaluation(period=0)  # 替代 verbose_eval=False；想每100轮打一次就写 period=100\n                ]\n                # Binary Log Loss\n                clf = lgb.train(params, trn_data, 2000, valid_sets=[trn_data, val_data],callbacks=callbacks) \n\n                prediction_val1 = clf.predict(X_train_used.iloc[val_idx][features_train])\n                prediction_test1 = clf.predict(X_test_used[features_train])\n                prediction_train1 = clf.predict(X_train_used.iloc[trn_idx][features_train])\n                prediction_fake1 = clf.predict(X_fake_used[features_train])\n\n                # Predictions\n                s1 = roc_auc_score(target_train.iloc[val_idx], prediction_val1)\n                s1_log = log_loss(target_train.iloc[val_idx], prediction_val1)\n                print(' - val AUC: {:<8.4f} - loss: {:<8.3f}'.format(s1, s1_log*1000), end='')\n\n                # Predictions Train\n                s1_train = roc_auc_score(target_train.iloc[trn_idx], prediction_train1)\n                s1_log_train = log_loss(target_train.iloc[trn_idx], prediction_train1)\n                print(' - train AUC: {:<8.4f} - loss: {:<8.3f}'.format(s1_train, s1_log_train*1000), end='')\n            \n                print('')\n\n\n                preds_oof_temp[val_idx,j] += np.sqrt(prediction_val1 - prediction_val1.mean() + 0.1) \n                preds_test_temp[:,j] += np.sqrt(prediction_test1 - prediction_test1.mean() + 0.1) / n_folds\n                preds_train_temp[trn_idx,j] += np.sqrt(prediction_train1 - prediction_train1.mean() + 0.1) / (n_folds-1)\n                preds_fake_temp[:,j] += np.sqrt(prediction_fake1 - prediction_fake1.mean() + 0.1) / n_folds\n\n            score_setting = roc_auc_score(target_train, preds_oof_temp[:,j])\n            score_setting_log = 1000*log_loss(target_train, np.exp(preds_oof_temp[:,j]))\n            scores.append(score_setting_log)\n            print(\"Score:  - val AUC: {:<8.4f} - loss: {:<8.3f}\".format(score_setting, score_setting_log), end='')\n           \n            score_setting_train = roc_auc_score(target_train, preds_train_temp[:,j])\n            score_setting_log_train = 1000*log_loss(target_train, np.exp(preds_train_temp[:,j]))\n            print(\" - train AUC: {:<8.4f} - loss: {:<8.3f}\".format(score_setting_train, score_setting_log_train))\n\n        best_ind = np.argmin(scores)\n        settings_best_ind.append(best_ind)\n        preds_oof[:,i] = preds_oof_temp[:,best_ind]\n        preds_test[:,i] = preds_test_temp[:,best_ind]\n        preds_train[:,i] = preds_train_temp[:,best_ind]\n        preds_fake[:,i] = preds_fake_temp[:,best_ind]\n\n\n        print('\\nbest setting: ', settings[best_ind])\n        preds_oof_cum = preds_oof[:,:i+1].mean(axis=1)\n        print(\"Cum CV val  : {:<8.4f} - loss: {:<8.3f}\".format(roc_auc_score(target_train, preds_oof_cum), 1000*log_loss(target_train, np.exp(preds_oof_cum))))\n        preds_train_cum = preds_train[:,:i+1].mean(axis=1)\n        print(\"Cum CV train: {:<8.4f} - loss: {:<8.3f}\".format(roc_auc_score(target_train, preds_train_cum), 1000*log_loss(target_train, np.exp(preds_train_cum))))\n        print('*****' * 10 + '\\n')\n        \n    return preds_oof, preds_test, preds_train, preds_fake\n\npreds_oof, preds_test, preds_train, preds_fake = train_trees()","metadata":{"trusted":true,"execution":{"iopub.status.busy":"2025-09-19T13:49:19.520504Z","iopub.execute_input":"2025-09-19T13:49:19.521092Z","iopub.status.idle":"2025-09-19T14:30:34.493480Z","shell.execute_reply.started":"2025-09-19T13:49:19.521065Z","shell.execute_reply":"2025-09-19T14:30:34.492608Z"}},"outputs":[{"name":"stdout","text":"Training on: ['var_0', 'var_0_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002977 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 271\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5551   - loss: 323.450  - train AUC: 0.5540   - loss: 323.666 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001003 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 271\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5455   - loss: 323.605  - train AUC: 0.5561   - loss: 323.624 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003277 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 271\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5468   - loss: 323.763  - train AUC: 0.5592   - loss: 323.410 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.000840 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 271\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5462   - loss: 324.346  - train AUC: 0.5567   - loss: 323.373 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003665 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 271\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5475   - loss: 323.997  - train AUC: 0.5575   - loss: 323.469 \nScore:  - val AUC: 0.5477   - loss: 32421.627 - train AUC: 0.5589   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.5477   - loss: 32421.627\nCum CV train: 0.5589   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_1', 'var_1_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003824 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 269\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5468   - loss: 323.673  - train AUC: 0.5520   - loss: 323.725 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002198 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 269\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5469   - loss: 323.707  - train AUC: 0.5528   - loss: 323.678 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001109 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 269\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5437   - loss: 324.298  - train AUC: 0.5501   - loss: 323.623 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002200 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 269\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5467   - loss: 324.092  - train AUC: 0.5503   - loss: 323.715 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002377 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 269\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5464   - loss: 323.933  - train AUC: 0.5512   - loss: 323.643 \nScore:  - val AUC: 0.5456   - loss: 32421.627 - train AUC: 0.5524   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.5738   - loss: 32421.627\nCum CV train: 0.5825   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_2', 'var_2_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002163 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 529\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5515   - loss: 323.738  - train AUC: 0.5576   - loss: 323.180 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002283 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 529\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5447   - loss: 323.881  - train AUC: 0.5587   - loss: 323.175 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.000923 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 529\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5537   - loss: 323.439  - train AUC: 0.5606   - loss: 323.150 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.000961 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 529\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5530   - loss: 323.362  - train AUC: 0.5602   - loss: 323.216 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002255 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 529\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5560   - loss: 323.268  - train AUC: 0.5595   - loss: 323.228 \nScore:  - val AUC: 0.5507   - loss: 32421.627 - train AUC: 0.5620   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.5965   - loss: 32421.627\nCum CV train: 0.6057   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_3', 'var_3_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002253 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 274\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5116   - loss: 325.988  - train AUC: 0.5171   - loss: 325.925 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003840 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 274\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5073   - loss: 326.039  - train AUC: 0.5180   - loss: 325.884 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002873 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 274\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5057   - loss: 326.118  - train AUC: 0.5178   - loss: 325.878 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002860 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 274\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5151   - loss: 326.059  - train AUC: 0.5178   - loss: 325.883 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.004055 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 274\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5103   - loss: 326.095  - train AUC: 0.5171   - loss: 325.884 \nScore:  - val AUC: 0.5088   - loss: 32421.627 - train AUC: 0.5196   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.5978   - loss: 32421.627\nCum CV train: 0.6083   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_4', 'var_4_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002307 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 276\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5105   - loss: 325.955  - train AUC: 0.5194   - loss: 325.943 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002039 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 276\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.4978   - loss: 326.254  - train AUC: 0.5195   - loss: 325.879 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003858 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 276\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.4992   - loss: 326.147  - train AUC: 0.5228   - loss: 325.893 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002754 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 276\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5125   - loss: 326.011  - train AUC: 0.5179   - loss: 325.928 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003450 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 276\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5033   - loss: 326.067  - train AUC: 0.5196   - loss: 325.925 \nScore:  - val AUC: 0.5041   - loss: 32421.627 - train AUC: 0.5236   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.5984   - loss: 32421.627\nCum CV train: 0.6103   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_5', 'var_5_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.000990 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5307   - loss: 324.709  - train AUC: 0.5354   - loss: 324.315 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002124 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5259   - loss: 324.570  - train AUC: 0.5339   - loss: 324.429 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002194 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5282   - loss: 324.575  - train AUC: 0.5366   - loss: 324.333 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001011 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5330   - loss: 324.165  - train AUC: 0.5341   - loss: 324.476 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.000940 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5242   - loss: 324.883  - train AUC: 0.5348   - loss: 324.353 \nScore:  - val AUC: 0.5281   - loss: 32421.627 - train AUC: 0.5365   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.6078   - loss: 32421.627\nCum CV train: 0.6200   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_6', 'var_6_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002143 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1055\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5572   - loss: 323.334  - train AUC: 0.5621   - loss: 323.069 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003711 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1055\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5557   - loss: 323.311  - train AUC: 0.5629   - loss: 323.062 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002133 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1055\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5560   - loss: 323.482  - train AUC: 0.5657   - loss: 322.978 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003104 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1055\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5613   - loss: 323.028  - train AUC: 0.5652   - loss: 323.050 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002342 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1055\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5633   - loss: 322.952  - train AUC: 0.5699   - loss: 322.834 \nScore:  - val AUC: 0.5581   - loss: 32421.627 - train AUC: 0.5708   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.6281   - loss: 32421.627\nCum CV train: 0.6392   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_8', 'var_8_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.000966 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 271\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5181   - loss: 325.749  - train AUC: 0.5266   - loss: 325.675 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002799 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 271\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5192   - loss: 325.774  - train AUC: 0.5253   - loss: 325.666 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003821 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 271\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5166   - loss: 325.930  - train AUC: 0.5233   - loss: 325.614 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002212 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 271\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5230   - loss: 325.711  - train AUC: 0.5306   - loss: 325.566 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002172 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 271\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5164   - loss: 325.871  - train AUC: 0.5260   - loss: 325.654 \nScore:  - val AUC: 0.5183   - loss: 32421.627 - train AUC: 0.5294   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.6303   - loss: 32421.627\nCum CV train: 0.6422   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_9', 'var_9_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.001865 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 279\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5359   - loss: 324.871  - train AUC: 0.5502   - loss: 324.161 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003691 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 279\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5469   - loss: 324.102  - train AUC: 0.5481   - loss: 324.311 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002008 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 279\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5393   - loss: 324.615  - train AUC: 0.5473   - loss: 324.245 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002212 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 279\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5486   - loss: 324.322  - train AUC: 0.5479   - loss: 324.227 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002320 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 279\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5492   - loss: 324.322  - train AUC: 0.5481   - loss: 324.176 \nScore:  - val AUC: 0.5432   - loss: 32421.627 - train AUC: 0.5495   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.6410   - loss: 32421.627\nCum CV train: 0.6532   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_11', 'var_11_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003870 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1035\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5150   - loss: 325.754  - train AUC: 0.5265   - loss: 325.535 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002830 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1035\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5141   - loss: 325.905  - train AUC: 0.5264   - loss: 325.478 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002137 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1035\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5212   - loss: 325.563  - train AUC: 0.5248   - loss: 325.545 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002333 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1035\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5279   - loss: 325.590  - train AUC: 0.5258   - loss: 325.513 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002187 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1035\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5153   - loss: 325.770  - train AUC: 0.5289   - loss: 325.377 \nScore:  - val AUC: 0.5175   - loss: 32421.627 - train AUC: 0.5280   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.6435   - loss: 32421.627\nCum CV train: 0.6566   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_12', 'var_12_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002254 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 355\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5593   - loss: 322.593  - train AUC: 0.5655   - loss: 322.488 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003466 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 355\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5603   - loss: 322.385  - train AUC: 0.5660   - loss: 322.509 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003609 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 355\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5671   - loss: 322.366  - train AUC: 0.5633   - loss: 322.532 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002192 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 355\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5588   - loss: 322.980  - train AUC: 0.5658   - loss: 322.371 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002177 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 355\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5562   - loss: 322.875  - train AUC: 0.5674   - loss: 322.350 \nScore:  - val AUC: 0.5595   - loss: 32421.627 - train AUC: 0.5669   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.6605   - loss: 32421.627\nCum CV train: 0.6729   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_13', 'var_13_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002006 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1037\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5527   - loss: 323.583  - train AUC: 0.5587   - loss: 323.191 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.000972 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 1037\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5539   - loss: 323.572  - train AUC: 0.5602   - loss: 323.096 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001124 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 1037\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5585   - loss: 323.450  - train AUC: 0.5580   - loss: 323.165 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002032 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1037\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5572   - loss: 323.363  - train AUC: 0.5596   - loss: 323.147 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001129 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 1037\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5503   - loss: 323.321  - train AUC: 0.5597   - loss: 323.236 \nScore:  - val AUC: 0.5539   - loss: 32421.627 - train AUC: 0.5606   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.6716   - loss: 32421.627\nCum CV train: 0.6843   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_14', 'var_14_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002449 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 273\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Info] [binary:BoostFromScore]: pavg=0.100494 -> initscore=-2.191750\n[LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements\n - val AUC: 0.5062   - loss: 326.096  - train AUC: 0.5142   - loss: 326.073 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002125 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 273\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Info] [binary:BoostFromScore]: pavg=0.100494 -> initscore=-2.191750\n[LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements\n - val AUC: 0.5065   - loss: 326.088  - train AUC: 0.5115   - loss: 326.115 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.001969 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 273\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Info] [binary:BoostFromScore]: pavg=0.100487 -> initscore=-2.191820\n[LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements\n - val AUC: 0.5129   - loss: 326.100  - train AUC: 0.5218   - loss: 325.941 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003512 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 273\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Info] [binary:BoostFromScore]: pavg=0.100487 -> initscore=-2.191820\n[LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements\n - val AUC: 0.5000   - loss: 326.180  - train AUC: 0.5000   - loss: 326.153 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002336 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 273\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Info] [binary:BoostFromScore]: pavg=0.100487 -> initscore=-2.191820\n[LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements\n - val AUC: 0.5033   - loss: 326.165  - train AUC: 0.5075   - loss: 326.104 \nScore:  - val AUC: 0.5073   - loss: 32421.627 - train AUC: 0.5197   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.6719   - loss: 32421.627\nCum CV train: 0.6848   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_15', 'var_15_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002067 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 303\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5128   - loss: 325.929  - train AUC: 0.5236   - loss: 325.781 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002145 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 303\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5167   - loss: 325.900  - train AUC: 0.5225   - loss: 325.781 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.001951 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 303\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5094   - loss: 326.069  - train AUC: 0.5221   - loss: 325.799 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002140 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 303\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5107   - loss: 326.043  - train AUC: 0.5231   - loss: 325.792 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002396 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 303\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5222   - loss: 325.873  - train AUC: 0.5205   - loss: 325.817 \nScore:  - val AUC: 0.5137   - loss: 32421.627 - train AUC: 0.5258   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.6728   - loss: 32421.627\nCum CV train: 0.6864   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_16', 'var_16_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001071 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 528\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5013   - loss: 326.124  - train AUC: 0.5204   - loss: 325.901 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002413 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 528\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5021   - loss: 326.159  - train AUC: 0.5192   - loss: 325.916 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002047 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 528\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5146   - loss: 326.032  - train AUC: 0.5216   - loss: 325.870 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002352 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 528\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5116   - loss: 326.023  - train AUC: 0.5236   - loss: 325.879 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003180 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 528\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5074   - loss: 326.069  - train AUC: 0.5212   - loss: 325.937 \nScore:  - val AUC: 0.5068   - loss: 32421.627 - train AUC: 0.5262   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.6730   - loss: 32421.627\nCum CV train: 0.6874   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_18', 'var_18_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001178 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 523\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5390   - loss: 324.043  - train AUC: 0.5457   - loss: 324.036 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003024 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 523\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5472   - loss: 323.767  - train AUC: 0.5431   - loss: 324.109 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002485 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 523\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5419   - loss: 323.889  - train AUC: 0.5445   - loss: 324.050 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002289 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 523\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5319   - loss: 324.963  - train AUC: 0.5450   - loss: 323.844 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002677 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 523\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5374   - loss: 324.399  - train AUC: 0.5430   - loss: 324.013 \nScore:  - val AUC: 0.5391   - loss: 32421.627 - train AUC: 0.5458   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.6808   - loss: 32421.627\nCum CV train: 0.6954   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_19', 'var_19_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003246 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 522\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5128   - loss: 325.887  - train AUC: 0.5213   - loss: 325.758 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001096 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 522\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5116   - loss: 325.781  - train AUC: 0.5227   - loss: 325.751 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001027 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 522\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.4966   - loss: 326.292  - train AUC: 0.5234   - loss: 325.650 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002131 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 522\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5151   - loss: 325.803  - train AUC: 0.5219   - loss: 325.767 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002857 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 522\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5164   - loss: 325.966  - train AUC: 0.5206   - loss: 325.704 \nScore:  - val AUC: 0.5096   - loss: 32421.627 - train AUC: 0.5249   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.6817   - loss: 32421.627\nCum CV train: 0.6971   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_20', 'var_20_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002236 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 268\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5149   - loss: 325.949  - train AUC: 0.5259   - loss: 325.548 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001021 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 268\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5206   - loss: 325.631  - train AUC: 0.5284   - loss: 325.545 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002100 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 268\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5225   - loss: 325.587  - train AUC: 0.5264   - loss: 325.559 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001106 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5129   - loss: 325.909  - train AUC: 0.5261   - loss: 325.560 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002325 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5219   - loss: 325.716  - train AUC: 0.5240   - loss: 325.620 \nScore:  - val AUC: 0.5178   - loss: 32421.627 - train AUC: 0.5281   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.6835   - loss: 32421.627\nCum CV train: 0.6994   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_21', 'var_21_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002131 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5550   - loss: 323.500  - train AUC: 0.5609   - loss: 323.107 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002161 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5635   - loss: 322.815  - train AUC: 0.5625   - loss: 323.068 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002385 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5537   - loss: 323.491  - train AUC: 0.5632   - loss: 323.040 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002257 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5539   - loss: 323.701  - train AUC: 0.5629   - loss: 322.962 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002097 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5593   - loss: 323.187  - train AUC: 0.5626   - loss: 322.956 \nScore:  - val AUC: 0.5564   - loss: 32421.627 - train AUC: 0.5635   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.6943   - loss: 32421.627\nCum CV train: 0.7100   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_22', 'var_22_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001079 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 271\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5493   - loss: 323.347  - train AUC: 0.5568   - loss: 323.071 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002723 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 271\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5476   - loss: 323.455  - train AUC: 0.5579   - loss: 323.071 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002168 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 271\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5520   - loss: 323.587  - train AUC: 0.5555   - loss: 323.042 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001150 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 271\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5618   - loss: 323.026  - train AUC: 0.5551   - loss: 323.147 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001066 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 271\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5506   - loss: 323.164  - train AUC: 0.5571   - loss: 323.093 \nScore:  - val AUC: 0.5516   - loss: 32421.627 - train AUC: 0.5579   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.7036   - loss: 32421.627\nCum CV train: 0.7191   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_23', 'var_23_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002156 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 296\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5195   - loss: 325.859  - train AUC: 0.5280   - loss: 325.549 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002486 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 296\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5184   - loss: 325.959  - train AUC: 0.5289   - loss: 325.488 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002026 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 296\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5212   - loss: 325.859  - train AUC: 0.5295   - loss: 325.502 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.001844 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 296\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5267   - loss: 325.670  - train AUC: 0.5287   - loss: 325.576 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002025 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 296\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5271   - loss: 325.597  - train AUC: 0.5269   - loss: 325.621 \nScore:  - val AUC: 0.5221   - loss: 32421.627 - train AUC: 0.5298   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.7049   - loss: 32421.627\nCum CV train: 0.7211   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_24', 'var_24_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002092 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1037\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5204   - loss: 325.617  - train AUC: 0.5363   - loss: 325.255 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001134 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 1037\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5289   - loss: 325.580  - train AUC: 0.5358   - loss: 325.185 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002262 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1037\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5301   - loss: 325.379  - train AUC: 0.5319   - loss: 325.341 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003213 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1037\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5256   - loss: 325.556  - train AUC: 0.5306   - loss: 325.340 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002028 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1037\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5281   - loss: 325.465  - train AUC: 0.5344   - loss: 325.328 \nScore:  - val AUC: 0.5259   - loss: 32421.627 - train AUC: 0.5364   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.7069   - loss: 32421.627\nCum CV train: 0.7236   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_25', 'var_25_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002037 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1084\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5087   - loss: 326.008  - train AUC: 0.5243   - loss: 325.837 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003014 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1084\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5059   - loss: 326.108  - train AUC: 0.5245   - loss: 325.824 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002259 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1084\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5092   - loss: 326.188  - train AUC: 0.5197   - loss: 325.860 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002186 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1084\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5107   - loss: 326.055  - train AUC: 0.5263   - loss: 325.787 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002740 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1084\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5078   - loss: 326.170  - train AUC: 0.5244   - loss: 325.773 \nScore:  - val AUC: 0.5082   - loss: 32421.627 - train AUC: 0.5289   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.7071   - loss: 32421.627\nCum CV train: 0.7248   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_26', 'var_26_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001121 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 524\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5548   - loss: 322.603  - train AUC: 0.5633   - loss: 322.144 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001096 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 524\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5575   - loss: 322.549  - train AUC: 0.5624   - loss: 322.171 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001119 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 524\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5658   - loss: 322.194  - train AUC: 0.5607   - loss: 322.255 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002784 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 524\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5611   - loss: 321.782  - train AUC: 0.5622   - loss: 322.274 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002042 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 524\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5562   - loss: 322.992  - train AUC: 0.5623   - loss: 322.117 \nScore:  - val AUC: 0.5583   - loss: 32421.627 - train AUC: 0.5635   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.7180   - loss: 32421.627\nCum CV train: 0.7350   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_28', 'var_28_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003406 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 288\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5251   - loss: 325.692  - train AUC: 0.5345   - loss: 325.462 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002493 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 288\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5221   - loss: 325.808  - train AUC: 0.5340   - loss: 325.480 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002280 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 288\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5188   - loss: 325.917  - train AUC: 0.5338   - loss: 325.503 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003444 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 288\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5242   - loss: 325.773  - train AUC: 0.5319   - loss: 325.505 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.001937 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 288\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5224   - loss: 325.865  - train AUC: 0.5329   - loss: 325.524 \nScore:  - val AUC: 0.5220   - loss: 32421.627 - train AUC: 0.5368   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.7193   - loss: 32421.627\nCum CV train: 0.7371   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_29', 'var_29_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003590 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 271\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Info] [binary:BoostFromScore]: pavg=0.100494 -> initscore=-2.191750\n[LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements\n - val AUC: 0.5079   - loss: 326.097  - train AUC: 0.5060   - loss: 326.132 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002398 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 271\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.4942   - loss: 326.193  - train AUC: 0.5153   - loss: 326.047 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001131 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 271\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Info] [binary:BoostFromScore]: pavg=0.100487 -> initscore=-2.191820\n[LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements\n - val AUC: 0.5039   - loss: 326.170  - train AUC: 0.5089   - loss: 326.096 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002318 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 271\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Info] [binary:BoostFromScore]: pavg=0.100487 -> initscore=-2.191820\n[LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements\n - val AUC: 0.5065   - loss: 326.164  - train AUC: 0.5042   - loss: 326.138 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002170 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 271\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements\n - val AUC: 0.5016   - loss: 655.992  - train AUC: 0.5051   - loss: 655.989 \nScore:  - val AUC: 0.5034   - loss: 32421.627 - train AUC: 0.5134   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.7193   - loss: 32421.627\nCum CV train: 0.7373   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_31', 'var_31_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002090 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 533\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5311   - loss: 325.587  - train AUC: 0.5338   - loss: 325.567 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002255 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 533\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5185   - loss: 326.048  - train AUC: 0.5366   - loss: 325.486 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.000899 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 533\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5239   - loss: 325.891  - train AUC: 0.5328   - loss: 325.532 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002027 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 533\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5302   - loss: 325.677  - train AUC: 0.5349   - loss: 325.503 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002081 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 533\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5287   - loss: 325.750  - train AUC: 0.5330   - loss: 325.550 \nScore:  - val AUC: 0.5259   - loss: 32421.627 - train AUC: 0.5364   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.7204   - loss: 32421.627\nCum CV train: 0.7391   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_32', 'var_32_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003426 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 273\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5265   - loss: 325.147  - train AUC: 0.5330   - loss: 325.119 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.001893 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 273\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5291   - loss: 325.504  - train AUC: 0.5319   - loss: 325.043 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002059 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 273\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5238   - loss: 325.305  - train AUC: 0.5336   - loss: 325.094 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.000933 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 273\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5305   - loss: 325.323  - train AUC: 0.5324   - loss: 325.103 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.001975 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 273\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5319   - loss: 325.118  - train AUC: 0.5317   - loss: 325.089 \nScore:  - val AUC: 0.5264   - loss: 32421.627 - train AUC: 0.5344   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.7233   - loss: 32421.627\nCum CV train: 0.7423   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_33', 'var_33_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002173 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 270\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5345   - loss: 324.445  - train AUC: 0.5454   - loss: 323.811 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001032 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 270\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5435   - loss: 323.493  - train AUC: 0.5464   - loss: 323.849 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001054 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 270\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5379   - loss: 324.126  - train AUC: 0.5456   - loss: 323.808 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002138 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 270\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5384   - loss: 324.331  - train AUC: 0.5452   - loss: 323.806 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002272 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 270\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5460   - loss: 323.751  - train AUC: 0.5437   - loss: 323.936 \nScore:  - val AUC: 0.5384   - loss: 32421.627 - train AUC: 0.5471   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.7292   - loss: 32421.627\nCum CV train: 0.7479   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_34', 'var_34_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.001971 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 295\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5496   - loss: 324.147  - train AUC: 0.5503   - loss: 324.259 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002171 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 295\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5442   - loss: 324.700  - train AUC: 0.5508   - loss: 324.097 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.000800 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 295\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5450   - loss: 324.380  - train AUC: 0.5508   - loss: 324.126 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002151 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 295\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5431   - loss: 324.351  - train AUC: 0.5508   - loss: 324.187 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.001827 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 295\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5492   - loss: 324.232  - train AUC: 0.5508   - loss: 324.162 \nScore:  - val AUC: 0.5457   - loss: 32421.627 - train AUC: 0.5522   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.7343   - loss: 32421.627\nCum CV train: 0.7529   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_35', 'var_35_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001029 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 270\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5286   - loss: 325.190  - train AUC: 0.5427   - loss: 324.948 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003195 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 270\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5437   - loss: 324.909  - train AUC: 0.5405   - loss: 324.959 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001097 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 270\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5396   - loss: 325.005  - train AUC: 0.5394   - loss: 324.998 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002178 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 270\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5336   - loss: 325.237  - train AUC: 0.5427   - loss: 324.856 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003544 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 270\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5387   - loss: 325.195  - train AUC: 0.5428   - loss: 324.813 \nScore:  - val AUC: 0.5360   - loss: 32421.627 - train AUC: 0.5430   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.7374   - loss: 32421.627\nCum CV train: 0.7563   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_36', 'var_36_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002014 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 271\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5332   - loss: 325.296  - train AUC: 0.5426   - loss: 324.834 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002102 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 271\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5407   - loss: 325.048  - train AUC: 0.5425   - loss: 324.877 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002009 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 271\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5373   - loss: 325.015  - train AUC: 0.5468   - loss: 324.691 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002407 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 271\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5392   - loss: 325.052  - train AUC: 0.5409   - loss: 324.899 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001012 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 271\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5446   - loss: 324.662  - train AUC: 0.5421   - loss: 324.872 \nScore:  - val AUC: 0.5384   - loss: 32421.627 - train AUC: 0.5445   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.7406   - loss: 32421.627\nCum CV train: 0.7597   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_37', 'var_37_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002134 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 530\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5037   - loss: 326.085  - train AUC: 0.5214   - loss: 325.939 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002655 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 530\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5077   - loss: 326.033  - train AUC: 0.5196   - loss: 325.932 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.001940 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 530\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5121   - loss: 326.109  - train AUC: 0.5205   - loss: 325.891 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002260 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 530\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5001   - loss: 326.159  - train AUC: 0.5192   - loss: 325.922 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002419 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 530\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5020   - loss: 326.072  - train AUC: 0.5180   - loss: 325.914 \nScore:  - val AUC: 0.5038   - loss: 32421.627 - train AUC: 0.5243   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.7408   - loss: 32421.627\nCum CV train: 0.7604   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_39', 'var_39_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002140 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1036\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.4999   - loss: 326.188  - train AUC: 0.5218   - loss: 325.933 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001199 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 1036\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Info] [binary:BoostFromScore]: pavg=0.100494 -> initscore=-2.191750\n[LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements\n - val AUC: 0.5000   - loss: 326.125  - train AUC: 0.5000   - loss: 326.167 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002057 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1036\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5034   - loss: 326.191  - train AUC: 0.5251   - loss: 325.910 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002090 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1036\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5029   - loss: 326.177  - train AUC: 0.5248   - loss: 325.822 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002099 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1036\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5076   - loss: 326.208  - train AUC: 0.5191   - loss: 325.928 \nScore:  - val AUC: 0.5021   - loss: 32421.627 - train AUC: 0.5291   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.7407   - loss: 32421.627\nCum CV train: 0.7609   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_40', 'var_40_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001162 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5540   - loss: 323.317  - train AUC: 0.5481   - loss: 323.650 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002121 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5470   - loss: 323.858  - train AUC: 0.5501   - loss: 323.459 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001063 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5473   - loss: 323.313  - train AUC: 0.5501   - loss: 323.582 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002028 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5406   - loss: 323.837  - train AUC: 0.5508   - loss: 323.507 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003042 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5432   - loss: 324.009  - train AUC: 0.5511   - loss: 323.403 \nScore:  - val AUC: 0.5453   - loss: 32421.627 - train AUC: 0.5512   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.7469   - loss: 32421.627\nCum CV train: 0.7666   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_41', 'var_41_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002227 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 524\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Info] [binary:BoostFromScore]: pavg=0.100494 -> initscore=-2.191750\n[LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements\n - val AUC: 0.5000   - loss: 326.125  - train AUC: 0.5000   - loss: 326.167 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001213 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 524\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Info] [binary:BoostFromScore]: pavg=0.100494 -> initscore=-2.191750\n[LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements\n - val AUC: 0.5000   - loss: 326.125  - train AUC: 0.5000   - loss: 326.167 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001123 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 524\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Info] [binary:BoostFromScore]: pavg=0.100487 -> initscore=-2.191820\n[LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements\n - val AUC: 0.5038   - loss: 326.172  - train AUC: 0.5074   - loss: 326.126 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002016 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 524\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Info] [binary:BoostFromScore]: pavg=0.100487 -> initscore=-2.191820\n[LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements\n - val AUC: 0.5000   - loss: 326.180  - train AUC: 0.5000   - loss: 326.153 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002022 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 524\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Info] [binary:BoostFromScore]: pavg=0.100487 -> initscore=-2.191820\n[LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements\n - val AUC: 0.5058   - loss: 326.163  - train AUC: 0.5086   - loss: 326.122 \nScore:  - val AUC: 0.5031   - loss: 32421.627 - train AUC: 0.5083   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.7469   - loss: 32421.627\nCum CV train: 0.7666   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_42', 'var_42_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002132 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 288\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5108   - loss: 326.036  - train AUC: 0.5231   - loss: 325.918 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002200 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 288\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5045   - loss: 326.112  - train AUC: 0.5209   - loss: 325.934 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002082 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 288\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5015   - loss: 326.233  - train AUC: 0.5242   - loss: 325.915 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003352 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 288\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5028   - loss: 326.233  - train AUC: 0.5230   - loss: 325.896 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.001926 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 288\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5044   - loss: 326.280  - train AUC: 0.5223   - loss: 325.936 \nScore:  - val AUC: 0.5039   - loss: 32421.627 - train AUC: 0.5269   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.7469   - loss: 32421.627\nCum CV train: 0.7673   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_43', 'var_43_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002483 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 567\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5287   - loss: 325.488  - train AUC: 0.5336   - loss: 325.338 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.001982 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 567\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5225   - loss: 325.564  - train AUC: 0.5337   - loss: 325.339 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002057 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 567\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5225   - loss: 325.662  - train AUC: 0.5321   - loss: 325.336 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002143 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 567\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5307   - loss: 325.346  - train AUC: 0.5317   - loss: 325.441 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002036 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 567\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5239   - loss: 325.693  - train AUC: 0.5323   - loss: 325.299 \nScore:  - val AUC: 0.5250   - loss: 32421.627 - train AUC: 0.5343   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.7485   - loss: 32421.627\nCum CV train: 0.7690   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_44', 'var_44_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.000959 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 268\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5388   - loss: 323.246  - train AUC: 0.5515   - loss: 323.240 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002184 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 268\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5393   - loss: 324.223  - train AUC: 0.5495   - loss: 323.046 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002196 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 268\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5467   - loss: 323.124  - train AUC: 0.5506   - loss: 323.233 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002311 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 268\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5465   - loss: 323.278  - train AUC: 0.5491   - loss: 323.255 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001118 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 268\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5475   - loss: 323.060  - train AUC: 0.5486   - loss: 323.312 \nScore:  - val AUC: 0.5425   - loss: 32421.627 - train AUC: 0.5522   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.7552   - loss: 32421.627\nCum CV train: 0.7754   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_45', 'var_45_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001003 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 264\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5129   - loss: 325.713  - train AUC: 0.5250   - loss: 325.591 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001128 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 264\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5264   - loss: 325.567  - train AUC: 0.5265   - loss: 325.536 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001212 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 264\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5143   - loss: 325.831  - train AUC: 0.5282   - loss: 325.481 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002130 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 264\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5177   - loss: 325.636  - train AUC: 0.5272   - loss: 325.520 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.001992 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 264\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5136   - loss: 325.768  - train AUC: 0.5266   - loss: 325.518 \nScore:  - val AUC: 0.5164   - loss: 32421.627 - train AUC: 0.5292   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.7564   - loss: 32421.627\nCum CV train: 0.7770   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_46', 'var_46_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002079 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 528\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5029   - loss: 326.072  - train AUC: 0.5205   - loss: 325.964 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003648 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 528\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.4970   - loss: 326.162  - train AUC: 0.5153   - loss: 326.004 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003011 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 527\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5056   - loss: 326.106  - train AUC: 0.5192   - loss: 325.912 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002517 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 528\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5018   - loss: 326.133  - train AUC: 0.5172   - loss: 325.987 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003345 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 527\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.4989   - loss: 326.169  - train AUC: 0.5180   - loss: 325.969 \nScore:  - val AUC: 0.5017   - loss: 32421.627 - train AUC: 0.5235   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.7565   - loss: 32421.627\nCum CV train: 0.7774   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_47', 'var_47_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001179 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 266\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5059   - loss: 326.124  - train AUC: 0.5185   - loss: 325.954 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.000897 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 266\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5064   - loss: 326.097  - train AUC: 0.5165   - loss: 325.982 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002000 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 266\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5061   - loss: 326.127  - train AUC: 0.5197   - loss: 325.925 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002180 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 266\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5044   - loss: 326.197  - train AUC: 0.5180   - loss: 325.964 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001198 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 266\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5021   - loss: 326.178  - train AUC: 0.5143   - loss: 325.980 \nScore:  - val AUC: 0.5046   - loss: 32421.627 - train AUC: 0.5206   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.7565   - loss: 32421.627\nCum CV train: 0.7778   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_48', 'var_48_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002105 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 522\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5388   - loss: 325.140  - train AUC: 0.5353   - loss: 325.180 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001149 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 522\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5256   - loss: 325.605  - train AUC: 0.5387   - loss: 325.055 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.001994 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 522\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5310   - loss: 325.375  - train AUC: 0.5361   - loss: 325.137 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002164 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 522\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5343   - loss: 325.225  - train AUC: 0.5366   - loss: 325.157 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001016 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 522\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5355   - loss: 325.083  - train AUC: 0.5389   - loss: 325.139 \nScore:  - val AUC: 0.5320   - loss: 32421.627 - train AUC: 0.5382   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.7587   - loss: 32421.627\nCum CV train: 0.7801   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_49', 'var_49_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002373 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 523\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5269   - loss: 325.055  - train AUC: 0.5337   - loss: 324.764 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002404 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 523\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5310   - loss: 324.923  - train AUC: 0.5331   - loss: 324.782 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001051 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 523\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5311   - loss: 325.027  - train AUC: 0.5339   - loss: 324.783 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001059 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 523\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5304   - loss: 324.751  - train AUC: 0.5353   - loss: 324.787 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002327 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 523\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5234   - loss: 325.110  - train AUC: 0.5339   - loss: 324.775 \nScore:  - val AUC: 0.5278   - loss: 32421.627 - train AUC: 0.5351   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.7614   - loss: 32421.627\nCum CV train: 0.7828   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_50', 'var_50_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002315 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 290\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5054   - loss: 326.095  - train AUC: 0.5216   - loss: 325.897 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002169 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 290\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5223   - loss: 325.894  - train AUC: 0.5210   - loss: 325.855 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002342 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 290\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5192   - loss: 325.959  - train AUC: 0.5220   - loss: 325.852 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003466 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 290\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5096   - loss: 326.167  - train AUC: 0.5225   - loss: 325.786 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002724 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 290\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5085   - loss: 326.065  - train AUC: 0.5215   - loss: 325.858 \nScore:  - val AUC: 0.5120   - loss: 32421.627 - train AUC: 0.5242   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.7617   - loss: 32421.627\nCum CV train: 0.7835   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_51', 'var_51_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002092 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 523\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5344   - loss: 324.965  - train AUC: 0.5305   - loss: 325.001 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001038 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 523\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5166   - loss: 325.621  - train AUC: 0.5318   - loss: 324.886 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003671 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 523\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5210   - loss: 325.291  - train AUC: 0.5369   - loss: 324.819 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.000963 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 523\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5188   - loss: 325.289  - train AUC: 0.5340   - loss: 324.943 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002331 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 523\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5309   - loss: 324.950  - train AUC: 0.5321   - loss: 325.037 \nScore:  - val AUC: 0.5240   - loss: 32421.627 - train AUC: 0.5354   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.7638   - loss: 32421.627\nCum CV train: 0.7859   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_52', 'var_52_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002462 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 270\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5263   - loss: 325.638  - train AUC: 0.5332   - loss: 325.274 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002724 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 270\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5272   - loss: 325.417  - train AUC: 0.5357   - loss: 325.240 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002441 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 270\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5298   - loss: 325.498  - train AUC: 0.5329   - loss: 325.337 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003821 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 270\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5319   - loss: 325.439  - train AUC: 0.5335   - loss: 325.264 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001133 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 270\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5365   - loss: 325.182  - train AUC: 0.5328   - loss: 325.323 \nScore:  - val AUC: 0.5294   - loss: 32421.627 - train AUC: 0.5352   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.7657   - loss: 32421.627\nCum CV train: 0.7879   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_53', 'var_53_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002147 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 289\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5509   - loss: 323.324  - train AUC: 0.5607   - loss: 322.892 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002089 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 289\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5466   - loss: 323.546  - train AUC: 0.5610   - loss: 322.812 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.001983 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 289\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5620   - loss: 322.842  - train AUC: 0.5590   - loss: 322.914 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003454 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 289\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5573   - loss: 322.990  - train AUC: 0.5597   - loss: 322.855 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002159 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 289\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5610   - loss: 322.903  - train AUC: 0.5595   - loss: 322.929 \nScore:  - val AUC: 0.5548   - loss: 32421.627 - train AUC: 0.5611   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.7725   - loss: 32421.627\nCum CV train: 0.7943   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_54', 'var_54_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002291 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 266\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5056   - loss: 326.149  - train AUC: 0.5270   - loss: 325.720 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002411 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 266\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5191   - loss: 325.809  - train AUC: 0.5239   - loss: 325.768 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001169 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 266\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5176   - loss: 325.968  - train AUC: 0.5256   - loss: 325.744 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002403 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 266\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5240   - loss: 325.856  - train AUC: 0.5242   - loss: 325.777 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001067 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 266\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5205   - loss: 325.834  - train AUC: 0.5270   - loss: 325.683 \nScore:  - val AUC: 0.5168   - loss: 32421.627 - train AUC: 0.5277   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.7729   - loss: 32421.627\nCum CV train: 0.7951   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_55', 'var_55_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002083 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1036\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5132   - loss: 325.764  - train AUC: 0.5224   - loss: 325.644 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002508 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1036\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5122   - loss: 325.907  - train AUC: 0.5200   - loss: 325.672 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001006 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 1036\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5199   - loss: 325.701  - train AUC: 0.5218   - loss: 325.652 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003288 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1036\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5176   - loss: 325.772  - train AUC: 0.5225   - loss: 325.643 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002909 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1036\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5062   - loss: 326.127  - train AUC: 0.5223   - loss: 325.621 \nScore:  - val AUC: 0.5141   - loss: 32421.627 - train AUC: 0.5250   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.7737   - loss: 32421.627\nCum CV train: 0.7962   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_56', 'var_56_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002297 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 528\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5235   - loss: 325.134  - train AUC: 0.5374   - loss: 324.538 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002164 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 528\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5282   - loss: 324.901  - train AUC: 0.5388   - loss: 324.536 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003887 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 528\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5270   - loss: 324.776  - train AUC: 0.5384   - loss: 324.589 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002029 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 528\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5337   - loss: 324.819  - train AUC: 0.5354   - loss: 324.569 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002123 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 528\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5284   - loss: 324.801  - train AUC: 0.5373   - loss: 324.588 \nScore:  - val AUC: 0.5272   - loss: 32421.627 - train AUC: 0.5394   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.7761   - loss: 32421.627\nCum CV train: 0.7987   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_57', 'var_57_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002369 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 542\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5188   - loss: 325.938  - train AUC: 0.5301   - loss: 325.637 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002897 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 542\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5121   - loss: 326.016  - train AUC: 0.5262   - loss: 325.741 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002206 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 542\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5084   - loss: 326.236  - train AUC: 0.5278   - loss: 325.707 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002173 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 542\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5098   - loss: 326.129  - train AUC: 0.5291   - loss: 325.705 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.001996 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 542\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5160   - loss: 325.973  - train AUC: 0.5289   - loss: 325.725 \nScore:  - val AUC: 0.5127   - loss: 32421.627 - train AUC: 0.5328   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.7763   - loss: 32421.627\nCum CV train: 0.7997   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_58', 'var_58_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002094 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 525\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5287   - loss: 325.632  - train AUC: 0.5355   - loss: 325.354 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002363 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 525\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5318   - loss: 325.350  - train AUC: 0.5346   - loss: 325.413 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.001969 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 525\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5188   - loss: 325.846  - train AUC: 0.5349   - loss: 325.407 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002471 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 525\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5270   - loss: 325.623  - train AUC: 0.5372   - loss: 325.259 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001138 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 525\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5271   - loss: 325.802  - train AUC: 0.5332   - loss: 325.392 \nScore:  - val AUC: 0.5263   - loss: 32421.627 - train AUC: 0.5368   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.7774   - loss: 32421.627\nCum CV train: 0.8013   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_59', 'var_59_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002339 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 541\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5140   - loss: 325.990  - train AUC: 0.5207   - loss: 325.840 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.001866 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 541\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5047   - loss: 326.031  - train AUC: 0.5164   - loss: 325.941 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002012 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 541\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5026   - loss: 326.124  - train AUC: 0.5191   - loss: 325.892 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.000957 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 541\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5089   - loss: 326.067  - train AUC: 0.5190   - loss: 325.852 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002011 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 541\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5045   - loss: 326.169  - train AUC: 0.5188   - loss: 325.853 \nScore:  - val AUC: 0.5059   - loss: 32421.627 - train AUC: 0.5224   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.7776   - loss: 32421.627\nCum CV train: 0.8018   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_60', 'var_60_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001123 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 268\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5088   - loss: 326.033  - train AUC: 0.5187   - loss: 325.864 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002113 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 268\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5076   - loss: 325.961  - train AUC: 0.5175   - loss: 325.962 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001161 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 268\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5027   - loss: 326.119  - train AUC: 0.5174   - loss: 325.879 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001043 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 268\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5167   - loss: 326.056  - train AUC: 0.5148   - loss: 325.922 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001029 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 268\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5055   - loss: 326.110  - train AUC: 0.5197   - loss: 325.870 \nScore:  - val AUC: 0.5078   - loss: 32421.627 - train AUC: 0.5201   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.7779   - loss: 32421.627\nCum CV train: 0.8024   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_61', 'var_61_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001071 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 264\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5073   - loss: 325.884  - train AUC: 0.5188   - loss: 325.906 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002502 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 264\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5030   - loss: 326.017  - train AUC: 0.5174   - loss: 325.938 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002362 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 264\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5023   - loss: 326.208  - train AUC: 0.5207   - loss: 325.896 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002083 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 264\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5000   - loss: 326.184  - train AUC: 0.5203   - loss: 325.858 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001090 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 264\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5044   - loss: 326.094  - train AUC: 0.5172   - loss: 325.917 \nScore:  - val AUC: 0.5039   - loss: 32421.627 - train AUC: 0.5246   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.7780   - loss: 32421.627\nCum CV train: 0.8029   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_62', 'var_62_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001001 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 275\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5087   - loss: 326.162  - train AUC: 0.5223   - loss: 325.807 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002098 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 275\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5124   - loss: 325.929  - train AUC: 0.5240   - loss: 325.809 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002223 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 275\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.4987   - loss: 326.286  - train AUC: 0.5274   - loss: 325.695 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002288 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 275\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5152   - loss: 325.879  - train AUC: 0.5228   - loss: 325.819 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002414 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 275\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5139   - loss: 325.963  - train AUC: 0.5265   - loss: 325.727 \nScore:  - val AUC: 0.5087   - loss: 32421.627 - train AUC: 0.5288   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.7782   - loss: 32421.627\nCum CV train: 0.8036   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_63', 'var_63_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002301 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 270\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5150   - loss: 325.659  - train AUC: 0.5229   - loss: 325.705 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003810 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 270\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5125   - loss: 325.768  - train AUC: 0.5187   - loss: 325.785 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002230 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 270\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5061   - loss: 326.027  - train AUC: 0.5201   - loss: 325.704 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002908 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 270\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5067   - loss: 326.079  - train AUC: 0.5199   - loss: 325.699 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001045 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 270\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5092   - loss: 325.892  - train AUC: 0.5213   - loss: 325.707 \nScore:  - val AUC: 0.5099   - loss: 32421.627 - train AUC: 0.5237   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.7788   - loss: 32421.627\nCum CV train: 0.8046   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_64', 'var_64_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.000921 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 534\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5134   - loss: 325.972  - train AUC: 0.5186   - loss: 325.879 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002283 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 534\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5075   - loss: 326.021  - train AUC: 0.5199   - loss: 325.863 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001035 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 534\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5116   - loss: 326.009  - train AUC: 0.5215   - loss: 325.819 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002403 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 534\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5111   - loss: 326.134  - train AUC: 0.5205   - loss: 325.829 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002106 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 534\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5059   - loss: 326.130  - train AUC: 0.5198   - loss: 325.854 \nScore:  - val AUC: 0.5088   - loss: 32421.627 - train AUC: 0.5213   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.7791   - loss: 32421.627\nCum CV train: 0.8052   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_65', 'var_65_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001127 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 1036\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5068   - loss: 326.077  - train AUC: 0.5207   - loss: 325.904 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002039 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1036\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5085   - loss: 326.149  - train AUC: 0.5178   - loss: 325.890 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001424 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 1036\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5095   - loss: 326.175  - train AUC: 0.5191   - loss: 325.863 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002255 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1036\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5063   - loss: 326.118  - train AUC: 0.5203   - loss: 325.856 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002122 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1036\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5113   - loss: 326.035  - train AUC: 0.5201   - loss: 325.817 \nScore:  - val AUC: 0.5081   - loss: 32421.627 - train AUC: 0.5226   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.7793   - loss: 32421.627\nCum CV train: 0.8058   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_66', 'var_66_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001036 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 536\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5144   - loss: 325.891  - train AUC: 0.5280   - loss: 325.651 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.001906 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 536\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5150   - loss: 325.918  - train AUC: 0.5295   - loss: 325.587 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002181 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 536\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5237   - loss: 325.743  - train AUC: 0.5284   - loss: 325.608 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002500 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 536\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5214   - loss: 325.956  - train AUC: 0.5283   - loss: 325.557 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002384 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 536\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5268   - loss: 325.694  - train AUC: 0.5253   - loss: 325.693 \nScore:  - val AUC: 0.5194   - loss: 32421.627 - train AUC: 0.5304   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.7800   - loss: 32421.627\nCum CV train: 0.8068   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_67', 'var_67_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002236 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5511   - loss: 324.172  - train AUC: 0.5470   - loss: 324.517 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002411 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5510   - loss: 324.243  - train AUC: 0.5473   - loss: 324.456 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001134 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5345   - loss: 325.231  - train AUC: 0.5488   - loss: 324.361 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001020 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5414   - loss: 324.859  - train AUC: 0.5484   - loss: 324.382 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.000973 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5436   - loss: 324.601  - train AUC: 0.5494   - loss: 324.376 \nScore:  - val AUC: 0.5441   - loss: 32421.627 - train AUC: 0.5494   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.7831   - loss: 32421.627\nCum CV train: 0.8098   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_68', 'var_68_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002437 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 457\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5067   - loss: 326.095  - train AUC: 0.5257   - loss: 325.820 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002133 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 458\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5117   - loss: 325.991  - train AUC: 0.5238   - loss: 325.824 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002042 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 458\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5046   - loss: 326.176  - train AUC: 0.5265   - loss: 325.786 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.000899 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 458\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5156   - loss: 325.992  - train AUC: 0.5260   - loss: 325.813 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001169 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 458\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5037   - loss: 326.226  - train AUC: 0.5270   - loss: 325.756 \nScore:  - val AUC: 0.5082   - loss: 32421.627 - train AUC: 0.5282   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.7833   - loss: 32421.627\nCum CV train: 0.8106   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_69', 'var_69_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001067 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 525\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5019   - loss: 326.049  - train AUC: 0.5162   - loss: 325.816 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002101 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 525\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5052   - loss: 325.753  - train AUC: 0.5187   - loss: 325.838 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001175 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 525\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5076   - loss: 326.001  - train AUC: 0.5171   - loss: 325.761 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001109 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 525\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5087   - loss: 326.034  - train AUC: 0.5178   - loss: 325.767 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001046 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 525\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5025   - loss: 326.020  - train AUC: 0.5200   - loss: 325.753 \nScore:  - val AUC: 0.5049   - loss: 32421.627 - train AUC: 0.5214   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.7838   - loss: 32421.627\nCum CV train: 0.8113   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_70', 'var_70_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003681 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1034\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5308   - loss: 325.066  - train AUC: 0.5311   - loss: 325.222 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001105 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 1034\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5185   - loss: 325.396  - train AUC: 0.5347   - loss: 325.085 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.000972 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 1034\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5276   - loss: 325.465  - train AUC: 0.5293   - loss: 325.112 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003743 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1034\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5265   - loss: 325.445  - train AUC: 0.5294   - loss: 325.138 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002043 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1034\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5265   - loss: 325.324  - train AUC: 0.5287   - loss: 325.204 \nScore:  - val AUC: 0.5248   - loss: 32421.627 - train AUC: 0.5345   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.7855   - loss: 32421.627\nCum CV train: 0.8131   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_71', 'var_71_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.000920 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 319\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5288   - loss: 325.477  - train AUC: 0.5357   - loss: 325.374 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.004036 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 319\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5300   - loss: 325.554  - train AUC: 0.5360   - loss: 325.287 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003434 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 319\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5398   - loss: 325.283  - train AUC: 0.5325   - loss: 325.422 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001100 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 319\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5260   - loss: 325.648  - train AUC: 0.5363   - loss: 325.320 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002191 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 319\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5306   - loss: 325.798  - train AUC: 0.5358   - loss: 325.275 \nScore:  - val AUC: 0.5303   - loss: 32421.627 - train AUC: 0.5364   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.7866   - loss: 32421.627\nCum CV train: 0.8144   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_72', 'var_72_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.000980 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 526\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5089   - loss: 326.044  - train AUC: 0.5224   - loss: 325.846 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003533 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 526\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5119   - loss: 326.043  - train AUC: 0.5198   - loss: 325.842 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.000925 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 526\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5147   - loss: 325.935  - train AUC: 0.5201   - loss: 325.854 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002424 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 526\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5079   - loss: 326.044  - train AUC: 0.5200   - loss: 325.825 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.001931 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 526\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5081   - loss: 326.142  - train AUC: 0.5189   - loss: 325.853 \nScore:  - val AUC: 0.5098   - loss: 32421.627 - train AUC: 0.5216   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.7867   - loss: 32421.627\nCum CV train: 0.8149   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_73', 'var_73_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002370 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Info] [binary:BoostFromScore]: pavg=0.100494 -> initscore=-2.191750\n[LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements\n - val AUC: 0.5002   - loss: 326.002  - train AUC: 0.5122   - loss: 326.030 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.000991 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Info] [binary:BoostFromScore]: pavg=0.100494 -> initscore=-2.191750\n[LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements\n - val AUC: 0.4993   - loss: 326.124  - train AUC: 0.5075   - loss: 326.124 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002253 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Info] [binary:BoostFromScore]: pavg=0.100487 -> initscore=-2.191820\n[LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements\n - val AUC: 0.4987   - loss: 326.167  - train AUC: 0.5084   - loss: 326.088 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002356 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Info] [binary:BoostFromScore]: pavg=0.100487 -> initscore=-2.191820\n[LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements\n - val AUC: 0.5110   - loss: 326.050  - train AUC: 0.5113   - loss: 325.980 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001131 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Info] [binary:BoostFromScore]: pavg=0.100487 -> initscore=-2.191820\n[LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements\n - val AUC: 0.5046   - loss: 326.081  - train AUC: 0.5131   - loss: 325.979 \nScore:  - val AUC: 0.5018   - loss: 32421.627 - train AUC: 0.5181   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.7869   - loss: 32421.627\nCum CV train: 0.8152   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_74', 'var_74_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001195 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 265\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5209   - loss: 325.827  - train AUC: 0.5269   - loss: 325.592 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002833 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 265\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5220   - loss: 325.595  - train AUC: 0.5269   - loss: 325.667 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002206 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 265\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5215   - loss: 325.844  - train AUC: 0.5250   - loss: 325.688 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001167 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 265\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5211   - loss: 325.789  - train AUC: 0.5277   - loss: 325.522 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002533 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 265\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5173   - loss: 325.865  - train AUC: 0.5269   - loss: 325.592 \nScore:  - val AUC: 0.5200   - loss: 32421.627 - train AUC: 0.5283   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.7877   - loss: 32421.627\nCum CV train: 0.8162   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_75', 'var_75_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001146 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 268\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5427   - loss: 324.185  - train AUC: 0.5445   - loss: 324.135 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002346 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 268\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5352   - loss: 324.490  - train AUC: 0.5456   - loss: 324.106 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001076 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 268\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5406   - loss: 324.148  - train AUC: 0.5426   - loss: 324.238 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001065 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 268\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5362   - loss: 324.478  - train AUC: 0.5428   - loss: 324.186 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002080 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 268\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5334   - loss: 324.849  - train AUC: 0.5432   - loss: 324.156 \nScore:  - val AUC: 0.5371   - loss: 32421.627 - train AUC: 0.5449   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.7908   - loss: 32421.627\nCum CV train: 0.8191   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_76', 'var_76_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001157 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 524\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5511   - loss: 323.419  - train AUC: 0.5649   - loss: 322.560 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002115 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 524\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5674   - loss: 322.282  - train AUC: 0.5629   - loss: 322.747 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002136 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 524\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5623   - loss: 322.637  - train AUC: 0.5636   - loss: 322.670 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.000834 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 524\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5569   - loss: 323.194  - train AUC: 0.5644   - loss: 322.580 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002341 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 524\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5633   - loss: 322.716  - train AUC: 0.5662   - loss: 322.532 \nScore:  - val AUC: 0.5599   - loss: 32421.627 - train AUC: 0.5655   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.7965   - loss: 32421.627\nCum CV train: 0.8241   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_77', 'var_77_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003456 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 526\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5141   - loss: 325.809  - train AUC: 0.5249   - loss: 325.750 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001039 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 526\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5200   - loss: 325.750  - train AUC: 0.5286   - loss: 325.617 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002189 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 526\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5149   - loss: 326.013  - train AUC: 0.5267   - loss: 325.638 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001057 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 526\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5143   - loss: 325.904  - train AUC: 0.5252   - loss: 325.673 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002128 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 526\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5057   - loss: 326.223  - train AUC: 0.5255   - loss: 325.637 \nScore:  - val AUC: 0.5131   - loss: 32421.627 - train AUC: 0.5285   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.7969   - loss: 32421.627\nCum CV train: 0.8249   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_78', 'var_78_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002025 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 275\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5376   - loss: 323.888  - train AUC: 0.5483   - loss: 323.777 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.004131 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 275\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5405   - loss: 324.066  - train AUC: 0.5469   - loss: 323.728 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002095 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 275\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5440   - loss: 323.908  - train AUC: 0.5476   - loss: 323.705 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002170 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 275\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5467   - loss: 323.644  - train AUC: 0.5467   - loss: 323.797 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002259 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 275\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5436   - loss: 324.028  - train AUC: 0.5453   - loss: 323.685 \nScore:  - val AUC: 0.5420   - loss: 32421.627 - train AUC: 0.5490   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8008   - loss: 32421.627\nCum CV train: 0.8283   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_79', 'var_79_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002305 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 278\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5057   - loss: 326.095  - train AUC: 0.5199   - loss: 325.974 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002087 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 278\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.4950   - loss: 326.181  - train AUC: 0.5178   - loss: 325.984 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002331 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 278\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5097   - loss: 326.112  - train AUC: 0.5170   - loss: 325.953 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.001941 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 278\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5005   - loss: 326.170  - train AUC: 0.5184   - loss: 325.943 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.001918 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 278\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5005   - loss: 326.157  - train AUC: 0.5166   - loss: 325.958 \nScore:  - val AUC: 0.5013   - loss: 32421.627 - train AUC: 0.5234   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8008   - loss: 32421.627\nCum CV train: 0.8286   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_80', 'var_80_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002296 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 523\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5569   - loss: 322.701  - train AUC: 0.5629   - loss: 322.460 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002573 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 523\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5683   - loss: 321.412  - train AUC: 0.5638   - loss: 322.609 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002115 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 523\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5570   - loss: 323.291  - train AUC: 0.5652   - loss: 322.210 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002205 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 523\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5495   - loss: 323.621  - train AUC: 0.5680   - loss: 322.101 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001030 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 523\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5650   - loss: 322.706  - train AUC: 0.5617   - loss: 322.459 \nScore:  - val AUC: 0.5584   - loss: 32421.627 - train AUC: 0.5661   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8064   - loss: 32421.627\nCum CV train: 0.8337   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_81', 'var_81_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002228 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 275\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5726   - loss: 321.161  - train AUC: 0.5794   - loss: 320.678 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002082 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 275\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5724   - loss: 320.611  - train AUC: 0.5791   - loss: 320.785 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002161 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 275\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5768   - loss: 320.591  - train AUC: 0.5785   - loss: 320.841 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002310 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 275\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5695   - loss: 321.492  - train AUC: 0.5793   - loss: 320.642 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002143 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 275\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5766   - loss: 321.225  - train AUC: 0.5829   - loss: 320.511 \nScore:  - val AUC: 0.5730   - loss: 32421.627 - train AUC: 0.5814   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8148   - loss: 32421.627\nCum CV train: 0.8408   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_82', 'var_82_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002195 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5152   - loss: 325.566  - train AUC: 0.5319   - loss: 325.127 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002048 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5196   - loss: 325.427  - train AUC: 0.5320   - loss: 325.121 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001084 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5266   - loss: 325.493  - train AUC: 0.5316   - loss: 325.074 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001028 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5221   - loss: 325.358  - train AUC: 0.5320   - loss: 325.125 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002411 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5182   - loss: 325.341  - train AUC: 0.5344   - loss: 325.152 \nScore:  - val AUC: 0.5195   - loss: 32421.627 - train AUC: 0.5349   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8163   - loss: 32421.627\nCum CV train: 0.8426   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_83', 'var_83_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001001 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 1034\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5239   - loss: 325.165  - train AUC: 0.5266   - loss: 325.322 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001040 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 1034\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5201   - loss: 325.424  - train AUC: 0.5263   - loss: 325.211 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002131 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1034\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5128   - loss: 325.467  - train AUC: 0.5259   - loss: 325.256 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001158 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 1034\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5175   - loss: 325.490  - train AUC: 0.5255   - loss: 325.204 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002060 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1034\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5129   - loss: 325.686  - train AUC: 0.5270   - loss: 325.108 \nScore:  - val AUC: 0.5164   - loss: 32421.627 - train AUC: 0.5281   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8173   - loss: 32421.627\nCum CV train: 0.8437   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_84', 'var_84_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002118 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 523\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5017   - loss: 326.183  - train AUC: 0.5195   - loss: 325.925 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001170 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 523\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5117   - loss: 325.993  - train AUC: 0.5196   - loss: 325.931 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003935 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 523\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5125   - loss: 326.065  - train AUC: 0.5169   - loss: 325.928 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002558 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 523\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5181   - loss: 325.999  - train AUC: 0.5146   - loss: 325.958 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002689 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 523\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5051   - loss: 326.154  - train AUC: 0.5188   - loss: 325.936 \nScore:  - val AUC: 0.5093   - loss: 32421.627 - train AUC: 0.5203   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8175   - loss: 32421.627\nCum CV train: 0.8441   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_85', 'var_85_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001070 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 270\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5183   - loss: 325.497  - train AUC: 0.5288   - loss: 325.440 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.001979 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 270\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5277   - loss: 325.617  - train AUC: 0.5263   - loss: 325.398 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001109 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 270\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5189   - loss: 325.710  - train AUC: 0.5283   - loss: 325.375 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002209 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 270\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5171   - loss: 325.777  - train AUC: 0.5290   - loss: 325.371 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002314 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 270\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5253   - loss: 325.361  - train AUC: 0.5283   - loss: 325.421 \nScore:  - val AUC: 0.5206   - loss: 32421.627 - train AUC: 0.5296   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8184   - loss: 32421.627\nCum CV train: 0.8450   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_86', 'var_86_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002429 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5377   - loss: 324.653  - train AUC: 0.5432   - loss: 323.871 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002341 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5418   - loss: 323.683  - train AUC: 0.5432   - loss: 324.063 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002175 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5375   - loss: 324.664  - train AUC: 0.5412   - loss: 323.895 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002268 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5347   - loss: 323.998  - train AUC: 0.5442   - loss: 324.034 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002756 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5397   - loss: 323.802  - train AUC: 0.5449   - loss: 323.964 \nScore:  - val AUC: 0.5379   - loss: 32421.627 - train AUC: 0.5448   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8214   - loss: 32421.627\nCum CV train: 0.8478   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_87', 'var_87_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002050 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 269\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5338   - loss: 324.952  - train AUC: 0.5407   - loss: 324.983 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002092 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 269\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5376   - loss: 325.002  - train AUC: 0.5375   - loss: 325.026 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002261 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 269\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5383   - loss: 325.357  - train AUC: 0.5375   - loss: 324.958 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003674 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 269\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5350   - loss: 325.010  - train AUC: 0.5415   - loss: 324.917 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.000854 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 269\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5286   - loss: 325.376  - train AUC: 0.5398   - loss: 324.957 \nScore:  - val AUC: 0.5340   - loss: 32421.627 - train AUC: 0.5406   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8228   - loss: 32421.627\nCum CV train: 0.8491   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_88', 'var_88_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003772 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1041\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5290   - loss: 325.484  - train AUC: 0.5280   - loss: 325.519 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002034 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1041\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5130   - loss: 325.831  - train AUC: 0.5299   - loss: 325.459 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002750 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1041\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5113   - loss: 326.011  - train AUC: 0.5296   - loss: 325.433 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002182 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1041\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5225   - loss: 325.668  - train AUC: 0.5296   - loss: 325.440 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002223 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1041\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5179   - loss: 325.764  - train AUC: 0.5320   - loss: 325.397 \nScore:  - val AUC: 0.5179   - loss: 32421.627 - train AUC: 0.5336   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8233   - loss: 32421.627\nCum CV train: 0.8499   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_89', 'var_89_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002133 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 271\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5278   - loss: 325.253  - train AUC: 0.5411   - loss: 324.636 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002065 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 271\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5365   - loss: 324.659  - train AUC: 0.5403   - loss: 324.750 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.000988 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 271\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5370   - loss: 325.053  - train AUC: 0.5404   - loss: 324.612 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002017 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 271\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5383   - loss: 324.743  - train AUC: 0.5396   - loss: 324.734 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001031 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 271\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5293   - loss: 324.998  - train AUC: 0.5408   - loss: 324.712 \nScore:  - val AUC: 0.5335   - loss: 32421.627 - train AUC: 0.5417   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8250   - loss: 32421.627\nCum CV train: 0.8516   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_90', 'var_90_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001112 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 522\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5291   - loss: 325.156  - train AUC: 0.5319   - loss: 325.161 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002121 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 522\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5223   - loss: 325.426  - train AUC: 0.5345   - loss: 325.054 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002188 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 522\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5264   - loss: 325.317  - train AUC: 0.5322   - loss: 325.121 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002069 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 522\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5339   - loss: 325.070  - train AUC: 0.5326   - loss: 325.147 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001180 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 522\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5264   - loss: 325.472  - train AUC: 0.5336   - loss: 325.050 \nScore:  - val AUC: 0.5268   - loss: 32421.627 - train AUC: 0.5345   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8263   - loss: 32421.627\nCum CV train: 0.8529   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_91', 'var_91_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002235 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 607\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5316   - loss: 325.131  - train AUC: 0.5422   - loss: 324.736 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002117 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 607\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5358   - loss: 324.938  - train AUC: 0.5414   - loss: 324.771 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002253 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 607\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5403   - loss: 324.852  - train AUC: 0.5404   - loss: 324.808 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002466 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 607\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5398   - loss: 324.961  - train AUC: 0.5428   - loss: 324.714 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002307 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 607\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5316   - loss: 325.091  - train AUC: 0.5406   - loss: 324.840 \nScore:  - val AUC: 0.5348   - loss: 32421.627 - train AUC: 0.5430   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8281   - loss: 32421.627\nCum CV train: 0.8545   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_92', 'var_92_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002406 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 270\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5423   - loss: 324.197  - train AUC: 0.5501   - loss: 323.725 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001024 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 270\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5409   - loss: 324.004  - train AUC: 0.5492   - loss: 323.901 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.001996 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 270\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5410   - loss: 324.302  - train AUC: 0.5507   - loss: 323.660 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001071 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 270\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5426   - loss: 323.894  - train AUC: 0.5484   - loss: 323.861 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002261 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 270\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5437   - loss: 324.031  - train AUC: 0.5487   - loss: 323.807 \nScore:  - val AUC: 0.5415   - loss: 32421.627 - train AUC: 0.5510   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8311   - loss: 32421.627\nCum CV train: 0.8573   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_93', 'var_93_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002884 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 555\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5292   - loss: 325.034  - train AUC: 0.5374   - loss: 325.034 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002203 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 555\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5286   - loss: 325.213  - train AUC: 0.5405   - loss: 324.952 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002317 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 555\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5285   - loss: 325.201  - train AUC: 0.5384   - loss: 325.012 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002070 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 555\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5267   - loss: 325.567  - train AUC: 0.5382   - loss: 324.941 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001016 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 555\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5275   - loss: 325.351  - train AUC: 0.5379   - loss: 325.015 \nScore:  - val AUC: 0.5276   - loss: 32421.627 - train AUC: 0.5422   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8322   - loss: 32421.627\nCum CV train: 0.8585   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_94', 'var_94_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.000963 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 273\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5499   - loss: 323.625  - train AUC: 0.5466   - loss: 323.971 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.000980 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 273\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5401   - loss: 324.216  - train AUC: 0.5468   - loss: 323.974 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002225 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 273\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5393   - loss: 324.054  - train AUC: 0.5480   - loss: 323.950 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003608 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 273\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5439   - loss: 324.287  - train AUC: 0.5462   - loss: 323.860 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002310 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 273\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5401   - loss: 324.461  - train AUC: 0.5479   - loss: 323.853 \nScore:  - val AUC: 0.5424   - loss: 32421.627 - train AUC: 0.5484   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8347   - loss: 32421.627\nCum CV train: 0.8607   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_95', 'var_95_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002621 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 292\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5339   - loss: 325.184  - train AUC: 0.5419   - loss: 324.776 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002236 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 292\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5271   - loss: 325.322  - train AUC: 0.5432   - loss: 324.729 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003123 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 292\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5243   - loss: 325.523  - train AUC: 0.5448   - loss: 324.697 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002727 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 292\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5429   - loss: 324.530  - train AUC: 0.5406   - loss: 324.905 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002790 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 292\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5426   - loss: 324.628  - train AUC: 0.5408   - loss: 324.870 \nScore:  - val AUC: 0.5335   - loss: 32421.627 - train AUC: 0.5436   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8362   - loss: 32421.627\nCum CV train: 0.8622   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_97', 'var_97_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002382 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 521\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5205   - loss: 325.505  - train AUC: 0.5219   - loss: 325.674 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003440 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 521\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5129   - loss: 325.724  - train AUC: 0.5211   - loss: 325.657 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002279 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 521\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5131   - loss: 325.882  - train AUC: 0.5226   - loss: 325.613 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002350 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 521\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5181   - loss: 325.875  - train AUC: 0.5220   - loss: 325.596 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002465 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 521\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5128   - loss: 325.848  - train AUC: 0.5222   - loss: 325.576 \nScore:  - val AUC: 0.5148   - loss: 32421.627 - train AUC: 0.5240   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8369   - loss: 32421.627\nCum CV train: 0.8630   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_99', 'var_99_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002102 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1044\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5556   - loss: 323.335  - train AUC: 0.5559   - loss: 323.398 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002054 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1044\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5524   - loss: 323.690  - train AUC: 0.5573   - loss: 323.240 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002303 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1044\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5484   - loss: 323.828  - train AUC: 0.5575   - loss: 323.213 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002206 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1044\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5543   - loss: 323.329  - train AUC: 0.5563   - loss: 323.356 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002070 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1044\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5544   - loss: 323.363  - train AUC: 0.5569   - loss: 323.320 \nScore:  - val AUC: 0.5525   - loss: 32421.627 - train AUC: 0.5581   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8404   - loss: 32421.627\nCum CV train: 0.8660   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_101', 'var_101_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002098 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 523\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Info] [binary:BoostFromScore]: pavg=0.100494 -> initscore=-2.191750\n[LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements\n - val AUC: 0.5085   - loss: 325.983  - train AUC: 0.5177   - loss: 325.931 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002425 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 523\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Info] [binary:BoostFromScore]: pavg=0.100494 -> initscore=-2.191750\n[LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements\n - val AUC: 0.5053   - loss: 326.061  - train AUC: 0.5141   - loss: 325.935 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001120 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 523\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Info] [binary:BoostFromScore]: pavg=0.100487 -> initscore=-2.191820\n[LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements\n - val AUC: 0.5120   - loss: 326.083  - train AUC: 0.5166   - loss: 325.935 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001078 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 523\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Info] [binary:BoostFromScore]: pavg=0.100487 -> initscore=-2.191820\n[LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements\n - val AUC: 0.5060   - loss: 326.112  - train AUC: 0.5172   - loss: 325.966 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001011 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 523\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Info] [binary:BoostFromScore]: pavg=0.100487 -> initscore=-2.191820\n[LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements\n - val AUC: 0.5111   - loss: 326.087  - train AUC: 0.5173   - loss: 325.870 \nScore:  - val AUC: 0.5079   - loss: 32421.627 - train AUC: 0.5192   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8404   - loss: 32421.627\nCum CV train: 0.8662   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_102', 'var_102_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001122 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 1034\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5176   - loss: 325.651  - train AUC: 0.5316   - loss: 325.246 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001117 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 1034\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5122   - loss: 325.583  - train AUC: 0.5315   - loss: 325.270 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002646 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1034\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5242   - loss: 325.615  - train AUC: 0.5292   - loss: 325.263 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001140 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 1034\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5192   - loss: 325.341  - train AUC: 0.5320   - loss: 325.278 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002606 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1034\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5167   - loss: 325.622  - train AUC: 0.5294   - loss: 325.231 \nScore:  - val AUC: 0.5174   - loss: 32421.627 - train AUC: 0.5350   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8412   - loss: 32421.627\nCum CV train: 0.8672   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_104', 'var_104_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002287 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 531\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5247   - loss: 325.731  - train AUC: 0.5319   - loss: 325.517 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002612 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 531\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5301   - loss: 325.646  - train AUC: 0.5318   - loss: 325.537 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002330 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 531\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5244   - loss: 325.882  - train AUC: 0.5327   - loss: 325.522 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002149 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 531\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5256   - loss: 325.710  - train AUC: 0.5327   - loss: 325.500 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002184 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 531\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5153   - loss: 326.085  - train AUC: 0.5350   - loss: 325.464 \nScore:  - val AUC: 0.5229   - loss: 32421.627 - train AUC: 0.5350   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8416   - loss: 32421.627\nCum CV train: 0.8679   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_105', 'var_105_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002307 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 287\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5270   - loss: 325.718  - train AUC: 0.5300   - loss: 325.564 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002446 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 287\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5189   - loss: 325.731  - train AUC: 0.5320   - loss: 325.521 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002469 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 287\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5221   - loss: 325.701  - train AUC: 0.5306   - loss: 325.558 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002230 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 287\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5204   - loss: 325.827  - train AUC: 0.5299   - loss: 325.596 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002040 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 287\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5211   - loss: 325.938  - train AUC: 0.5293   - loss: 325.548 \nScore:  - val AUC: 0.5216   - loss: 32421.627 - train AUC: 0.5317   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8422   - loss: 32421.627\nCum CV train: 0.8686   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_106', 'var_106_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002232 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 275\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5281   - loss: 325.349  - train AUC: 0.5351   - loss: 325.149 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002564 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 275\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5290   - loss: 325.423  - train AUC: 0.5363   - loss: 325.126 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003489 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 275\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5256   - loss: 325.649  - train AUC: 0.5356   - loss: 325.144 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002139 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 275\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5315   - loss: 325.128  - train AUC: 0.5343   - loss: 325.195 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002140 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 275\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5302   - loss: 325.253  - train AUC: 0.5354   - loss: 325.124 \nScore:  - val AUC: 0.5282   - loss: 32421.627 - train AUC: 0.5370   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8432   - loss: 32421.627\nCum CV train: 0.8697   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_107', 'var_107_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002375 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 525\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5414   - loss: 324.201  - train AUC: 0.5436   - loss: 324.470 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002409 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 525\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5440   - loss: 324.517  - train AUC: 0.5421   - loss: 324.438 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002310 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 525\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5418   - loss: 324.457  - train AUC: 0.5431   - loss: 324.435 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002551 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 525\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5282   - loss: 325.502  - train AUC: 0.5445   - loss: 324.280 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001073 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 525\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5438   - loss: 324.392  - train AUC: 0.5423   - loss: 324.462 \nScore:  - val AUC: 0.5391   - loss: 32421.627 - train AUC: 0.5442   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8452   - loss: 32421.627\nCum CV train: 0.8715   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_108', 'var_108_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001067 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 369\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5570   - loss: 323.985  - train AUC: 0.5505   - loss: 323.717 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002087 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 369\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5390   - loss: 324.278  - train AUC: 0.5501   - loss: 323.790 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002305 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 369\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5442   - loss: 324.182  - train AUC: 0.5532   - loss: 323.692 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002184 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 369\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5433   - loss: 324.079  - train AUC: 0.5493   - loss: 323.787 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002483 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 369\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5490   - loss: 323.471  - train AUC: 0.5518   - loss: 323.859 \nScore:  - val AUC: 0.5446   - loss: 32421.627 - train AUC: 0.5533   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8478   - loss: 32421.627\nCum CV train: 0.8738   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_109', 'var_109_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002372 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1037\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5465   - loss: 322.875  - train AUC: 0.5486   - loss: 322.823 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002388 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1037\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5458   - loss: 322.762  - train AUC: 0.5496   - loss: 322.871 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002165 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1037\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5470   - loss: 322.542  - train AUC: 0.5533   - loss: 322.780 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002706 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1037\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5404   - loss: 323.436  - train AUC: 0.5529   - loss: 322.601 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002644 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1037\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5470   - loss: 323.371  - train AUC: 0.5474   - loss: 322.772 \nScore:  - val AUC: 0.5451   - loss: 32421.627 - train AUC: 0.5527   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8512   - loss: 32421.627\nCum CV train: 0.8766   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_110', 'var_110_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001244 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 271\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5587   - loss: 323.170  - train AUC: 0.5632   - loss: 322.375 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001307 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 271\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5571   - loss: 322.611  - train AUC: 0.5635   - loss: 322.474 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002311 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 271\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5625   - loss: 322.132  - train AUC: 0.5623   - loss: 322.568 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001101 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 271\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5571   - loss: 322.791  - train AUC: 0.5618   - loss: 322.462 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002377 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 271\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5573   - loss: 322.681  - train AUC: 0.5628   - loss: 322.488 \nScore:  - val AUC: 0.5578   - loss: 32421.627 - train AUC: 0.5637   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8548   - loss: 32421.627\nCum CV train: 0.8797   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_111', 'var_111_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003751 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 538\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5199   - loss: 325.585  - train AUC: 0.5300   - loss: 325.389 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002305 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 538\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5233   - loss: 325.550  - train AUC: 0.5291   - loss: 325.398 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003870 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 538\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5222   - loss: 325.711  - train AUC: 0.5277   - loss: 325.366 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002644 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 538\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5291   - loss: 325.368  - train AUC: 0.5297   - loss: 325.417 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002084 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 538\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5229   - loss: 325.618  - train AUC: 0.5291   - loss: 325.387 \nScore:  - val AUC: 0.5229   - loss: 32421.627 - train AUC: 0.5306   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8555   - loss: 32421.627\nCum CV train: 0.8804   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_112', 'var_112_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002591 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 277\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5295   - loss: 325.509  - train AUC: 0.5342   - loss: 325.348 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.001964 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 277\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5320   - loss: 325.376  - train AUC: 0.5376   - loss: 325.288 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001075 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 277\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5339   - loss: 325.403  - train AUC: 0.5323   - loss: 325.435 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002134 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 277\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5206   - loss: 325.913  - train AUC: 0.5339   - loss: 325.388 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001060 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 277\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5293   - loss: 325.581  - train AUC: 0.5336   - loss: 325.350 \nScore:  - val AUC: 0.5285   - loss: 32421.627 - train AUC: 0.5358   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8562   - loss: 32421.627\nCum CV train: 0.8813   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_113', 'var_113_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001119 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 268\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5133   - loss: 325.918  - train AUC: 0.5211   - loss: 325.700 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001112 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 268\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5129   - loss: 325.749  - train AUC: 0.5209   - loss: 325.748 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002392 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 268\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5088   - loss: 325.873  - train AUC: 0.5204   - loss: 325.758 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002320 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 268\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.4982   - loss: 326.098  - train AUC: 0.5218   - loss: 325.685 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002846 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 268\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5134   - loss: 325.936  - train AUC: 0.5203   - loss: 325.735 \nScore:  - val AUC: 0.5090   - loss: 32421.627 - train AUC: 0.5230   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8566   - loss: 32421.627\nCum CV train: 0.8817   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_114', 'var_114_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002505 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1050\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5258   - loss: 325.467  - train AUC: 0.5296   - loss: 325.438 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002016 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1050\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5257   - loss: 325.452  - train AUC: 0.5287   - loss: 325.463 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002336 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1050\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5220   - loss: 325.531  - train AUC: 0.5314   - loss: 325.416 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002475 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1050\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5137   - loss: 326.014  - train AUC: 0.5325   - loss: 325.316 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002534 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1050\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5252   - loss: 325.650  - train AUC: 0.5280   - loss: 325.411 \nScore:  - val AUC: 0.5219   - loss: 32421.627 - train AUC: 0.5320   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8573   - loss: 32421.627\nCum CV train: 0.8825   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_115', 'var_115_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002064 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 530\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5440   - loss: 324.501  - train AUC: 0.5492   - loss: 324.350 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002082 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 530\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5412   - loss: 324.654  - train AUC: 0.5504   - loss: 324.287 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002665 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 530\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5486   - loss: 324.370  - train AUC: 0.5489   - loss: 324.327 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003007 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 530\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5476   - loss: 324.377  - train AUC: 0.5512   - loss: 324.297 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002065 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 530\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5417   - loss: 324.685  - train AUC: 0.5501   - loss: 324.259 \nScore:  - val AUC: 0.5439   - loss: 32421.627 - train AUC: 0.5524   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8589   - loss: 32421.627\nCum CV train: 0.8840   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_116', 'var_116_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002257 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 532\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5122   - loss: 326.092  - train AUC: 0.5328   - loss: 325.459 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002131 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 532\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5250   - loss: 325.771  - train AUC: 0.5308   - loss: 325.542 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002324 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 532\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5250   - loss: 325.702  - train AUC: 0.5343   - loss: 325.448 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001257 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 532\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5272   - loss: 325.689  - train AUC: 0.5328   - loss: 325.459 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002407 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 532\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5289   - loss: 325.582  - train AUC: 0.5308   - loss: 325.531 \nScore:  - val AUC: 0.5227   - loss: 32421.627 - train AUC: 0.5351   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8593   - loss: 32421.627\nCum CV train: 0.8846   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_118', 'var_118_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002457 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 523\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5402   - loss: 324.796  - train AUC: 0.5445   - loss: 324.527 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002394 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 523\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5387   - loss: 324.790  - train AUC: 0.5446   - loss: 324.510 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002460 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 523\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5383   - loss: 324.877  - train AUC: 0.5449   - loss: 324.520 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001125 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 523\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5403   - loss: 324.591  - train AUC: 0.5486   - loss: 324.390 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001165 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 523\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5469   - loss: 324.506  - train AUC: 0.5457   - loss: 324.470 \nScore:  - val AUC: 0.5404   - loss: 32421.627 - train AUC: 0.5466   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8608   - loss: 32421.627\nCum CV train: 0.8861   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_119', 'var_119_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003945 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 270\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5260   - loss: 324.847  - train AUC: 0.5388   - loss: 324.633 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002357 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 270\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5291   - loss: 324.929  - train AUC: 0.5376   - loss: 324.625 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003250 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 270\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5322   - loss: 324.739  - train AUC: 0.5321   - loss: 324.876 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002451 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 270\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5265   - loss: 325.106  - train AUC: 0.5343   - loss: 324.779 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001208 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 270\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5246   - loss: 325.212  - train AUC: 0.5343   - loss: 324.695 \nScore:  - val AUC: 0.5274   - loss: 32421.627 - train AUC: 0.5372   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8621   - loss: 32421.627\nCum CV train: 0.8873   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_120', 'var_120_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001155 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 265\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Info] [binary:BoostFromScore]: pavg=0.100494 -> initscore=-2.191750\n[LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements\n - val AUC: 0.5127   - loss: 326.011  - train AUC: 0.5175   - loss: 325.985 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001230 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 265\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Info] [binary:BoostFromScore]: pavg=0.100494 -> initscore=-2.191750\n[LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements\n - val AUC: 0.5159   - loss: 325.927  - train AUC: 0.5206   - loss: 325.867 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001256 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 265\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Info] [binary:BoostFromScore]: pavg=0.100487 -> initscore=-2.191820\n[LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements\n - val AUC: 0.5151   - loss: 325.987  - train AUC: 0.5244   - loss: 325.728 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001119 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 265\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Info] [binary:BoostFromScore]: pavg=0.100487 -> initscore=-2.191820\n[LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements\n - val AUC: 0.5154   - loss: 325.910  - train AUC: 0.5266   - loss: 325.671 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001060 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 265\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Info] [binary:BoostFromScore]: pavg=0.100487 -> initscore=-2.191820\n[LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements\n - val AUC: 0.5151   - loss: 326.009  - train AUC: 0.5182   - loss: 325.890 \nScore:  - val AUC: 0.5141   - loss: 32421.627 - train AUC: 0.5248   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8623   - loss: 32421.627\nCum CV train: 0.8876   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_121', 'var_121_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002762 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 276\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5356   - loss: 324.795  - train AUC: 0.5445   - loss: 324.536 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002701 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 276\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5340   - loss: 324.746  - train AUC: 0.5455   - loss: 324.406 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002135 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 276\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5392   - loss: 324.819  - train AUC: 0.5452   - loss: 324.404 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001118 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 276\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5392   - loss: 324.463  - train AUC: 0.5464   - loss: 324.480 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002268 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 276\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5302   - loss: 325.235  - train AUC: 0.5446   - loss: 324.379 \nScore:  - val AUC: 0.5351   - loss: 32421.627 - train AUC: 0.5477   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8640   - loss: 32421.627\nCum CV train: 0.8893   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_122', 'var_122_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001195 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 1037\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5404   - loss: 324.257  - train AUC: 0.5438   - loss: 324.410 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003520 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1037\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5372   - loss: 324.917  - train AUC: 0.5441   - loss: 324.264 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002724 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1037\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5379   - loss: 324.444  - train AUC: 0.5436   - loss: 324.422 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002425 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1037\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5407   - loss: 324.454  - train AUC: 0.5441   - loss: 324.381 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002334 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1037\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5400   - loss: 324.945  - train AUC: 0.5428   - loss: 324.349 \nScore:  - val AUC: 0.5388   - loss: 32421.627 - train AUC: 0.5448   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8658   - loss: 32421.627\nCum CV train: 0.8909   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_123', 'var_123_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002460 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 268\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5418   - loss: 324.119  - train AUC: 0.5462   - loss: 323.999 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002202 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 268\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5430   - loss: 324.149  - train AUC: 0.5471   - loss: 323.948 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003660 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 268\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5321   - loss: 324.712  - train AUC: 0.5477   - loss: 323.903 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002668 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 268\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5453   - loss: 323.932  - train AUC: 0.5470   - loss: 324.022 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003261 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 268\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5485   - loss: 323.997  - train AUC: 0.5459   - loss: 323.991 \nScore:  - val AUC: 0.5418   - loss: 32421.627 - train AUC: 0.5478   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8675   - loss: 32421.627\nCum CV train: 0.8924   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_125', 'var_125_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002449 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 312\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5256   - loss: 325.446  - train AUC: 0.5279   - loss: 325.501 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002193 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 312\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5227   - loss: 325.598  - train AUC: 0.5294   - loss: 325.441 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002729 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 312\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5183   - loss: 325.688  - train AUC: 0.5301   - loss: 325.411 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002425 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 312\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5206   - loss: 325.776  - train AUC: 0.5299   - loss: 325.402 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002786 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 312\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5262   - loss: 325.565  - train AUC: 0.5281   - loss: 325.442 \nScore:  - val AUC: 0.5220   - loss: 32421.627 - train AUC: 0.5311   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8680   - loss: 32421.627\nCum CV train: 0.8930   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_127', 'var_127_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001342 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 1039\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5351   - loss: 324.782  - train AUC: 0.5432   - loss: 324.600 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002484 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1039\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5429   - loss: 324.722  - train AUC: 0.5444   - loss: 324.488 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002375 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1039\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5473   - loss: 324.379  - train AUC: 0.5446   - loss: 324.601 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003031 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1039\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5321   - loss: 325.186  - train AUC: 0.5467   - loss: 324.405 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.004389 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1039\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5399   - loss: 324.649  - train AUC: 0.5437   - loss: 324.582 \nScore:  - val AUC: 0.5387   - loss: 32421.627 - train AUC: 0.5459   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8696   - loss: 32421.627\nCum CV train: 0.8945   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_128', 'var_128_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002364 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 526\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5248   - loss: 325.402  - train AUC: 0.5309   - loss: 325.374 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002844 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 526\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5170   - loss: 325.862  - train AUC: 0.5315   - loss: 325.231 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001146 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 526\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5262   - loss: 325.448  - train AUC: 0.5319   - loss: 325.227 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001281 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 526\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5258   - loss: 325.343  - train AUC: 0.5341   - loss: 325.215 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002386 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 526\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5145   - loss: 325.828  - train AUC: 0.5318   - loss: 325.257 \nScore:  - val AUC: 0.5209   - loss: 32421.627 - train AUC: 0.5336   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8704   - loss: 32421.627\nCum CV train: 0.8954   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_129', 'var_129_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003139 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 268\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5035   - loss: 326.017  - train AUC: 0.5166   - loss: 325.953 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002338 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 268\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5008   - loss: 326.114  - train AUC: 0.5128   - loss: 325.947 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002340 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 268\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5089   - loss: 326.159  - train AUC: 0.5141   - loss: 325.904 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001261 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 268\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5031   - loss: 326.080  - train AUC: 0.5188   - loss: 325.917 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001112 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 268\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5099   - loss: 326.129  - train AUC: 0.5200   - loss: 325.857 \nScore:  - val AUC: 0.5049   - loss: 32421.627 - train AUC: 0.5203   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8705   - loss: 32421.627\nCum CV train: 0.8956   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_130', 'var_130_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002668 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 287\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5255   - loss: 325.074  - train AUC: 0.5352   - loss: 325.082 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002164 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 287\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5208   - loss: 325.232  - train AUC: 0.5340   - loss: 325.083 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002342 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 287\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5319   - loss: 325.210  - train AUC: 0.5377   - loss: 324.922 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002278 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 287\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5185   - loss: 325.566  - train AUC: 0.5347   - loss: 324.953 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002662 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 287\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5314   - loss: 325.403  - train AUC: 0.5338   - loss: 324.958 \nScore:  - val AUC: 0.5247   - loss: 32421.627 - train AUC: 0.5367   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8715   - loss: 32421.627\nCum CV train: 0.8966   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_131', 'var_131_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001066 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 555\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5198   - loss: 325.655  - train AUC: 0.5388   - loss: 325.101 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002199 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 555\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5282   - loss: 325.701  - train AUC: 0.5381   - loss: 325.112 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003967 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 555\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5275   - loss: 325.502  - train AUC: 0.5387   - loss: 325.108 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002327 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 555\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5329   - loss: 325.300  - train AUC: 0.5376   - loss: 325.171 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002317 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 555\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5318   - loss: 325.395  - train AUC: 0.5381   - loss: 325.084 \nScore:  - val AUC: 0.5269   - loss: 32421.627 - train AUC: 0.5412   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8722   - loss: 32421.627\nCum CV train: 0.8975   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_132', 'var_132_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002473 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 277\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5204   - loss: 325.674  - train AUC: 0.5318   - loss: 325.335 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002181 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 277\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5152   - loss: 325.843  - train AUC: 0.5314   - loss: 325.324 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002571 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 277\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5313   - loss: 325.351  - train AUC: 0.5293   - loss: 325.419 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001063 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 277\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5218   - loss: 325.543  - train AUC: 0.5318   - loss: 325.369 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001192 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 277\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5150   - loss: 325.677  - train AUC: 0.5326   - loss: 325.348 \nScore:  - val AUC: 0.5199   - loss: 32421.627 - train AUC: 0.5332   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8727   - loss: 32421.627\nCum CV train: 0.8981   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_133', 'var_133_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002688 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 566\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5395   - loss: 324.324  - train AUC: 0.5530   - loss: 323.714 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002754 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 566\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5493   - loss: 323.650  - train AUC: 0.5512   - loss: 323.887 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002335 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 566\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5529   - loss: 323.613  - train AUC: 0.5508   - loss: 323.863 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002514 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 566\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5435   - loss: 324.318  - train AUC: 0.5532   - loss: 323.719 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003727 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 566\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5435   - loss: 324.290  - train AUC: 0.5555   - loss: 323.551 \nScore:  - val AUC: 0.5450   - loss: 32421.627 - train AUC: 0.5553   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8748   - loss: 32421.627\nCum CV train: 0.8999   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_134', 'var_134_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002435 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1036\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5190   - loss: 325.501  - train AUC: 0.5292   - loss: 325.377 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001281 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 1036\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5080   - loss: 325.589  - train AUC: 0.5279   - loss: 325.428 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002454 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1036\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5141   - loss: 325.825  - train AUC: 0.5277   - loss: 325.351 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001174 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 1036\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5200   - loss: 325.564  - train AUC: 0.5286   - loss: 325.361 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002419 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1036\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5125   - loss: 325.855  - train AUC: 0.5268   - loss: 325.419 \nScore:  - val AUC: 0.5144   - loss: 32421.627 - train AUC: 0.5310   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8751   - loss: 32421.627\nCum CV train: 0.9004   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_135', 'var_135_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002779 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 266\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5238   - loss: 325.097  - train AUC: 0.5304   - loss: 324.908 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001213 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 266\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5263   - loss: 324.823  - train AUC: 0.5291   - loss: 324.998 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002383 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 266\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5218   - loss: 325.095  - train AUC: 0.5304   - loss: 324.963 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001179 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 266\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5314   - loss: 324.987  - train AUC: 0.5286   - loss: 324.944 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002396 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 266\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5153   - loss: 325.528  - train AUC: 0.5322   - loss: 324.842 \nScore:  - val AUC: 0.5230   - loss: 32421.627 - train AUC: 0.5318   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8760   - loss: 32421.627\nCum CV train: 0.9012   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_137', 'var_137_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002573 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 266\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5217   - loss: 325.400  - train AUC: 0.5368   - loss: 325.102 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002456 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 266\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5264   - loss: 325.317  - train AUC: 0.5351   - loss: 325.185 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002618 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 266\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5259   - loss: 325.425  - train AUC: 0.5353   - loss: 325.131 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001265 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 266\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5243   - loss: 325.631  - train AUC: 0.5314   - loss: 325.114 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002671 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 266\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5267   - loss: 325.165  - train AUC: 0.5359   - loss: 325.168 \nScore:  - val AUC: 0.5245   - loss: 32421.627 - train AUC: 0.5384   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8768   - loss: 32421.627\nCum CV train: 0.9020   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_138', 'var_138_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003946 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 268\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5207   - loss: 325.762  - train AUC: 0.5241   - loss: 325.679 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001276 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 268\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5204   - loss: 325.733  - train AUC: 0.5262   - loss: 325.609 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001131 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 268\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5123   - loss: 325.945  - train AUC: 0.5250   - loss: 325.656 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002339 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 268\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5193   - loss: 325.824  - train AUC: 0.5258   - loss: 325.629 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003997 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 268\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5189   - loss: 325.929  - train AUC: 0.5233   - loss: 325.671 \nScore:  - val AUC: 0.5172   - loss: 32421.627 - train AUC: 0.5265   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8771   - loss: 32421.627\nCum CV train: 0.9024   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_139', 'var_139_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001296 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5732   - loss: 321.230  - train AUC: 0.5777   - loss: 321.127 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003638 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5785   - loss: 321.273  - train AUC: 0.5781   - loss: 321.030 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002449 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5792   - loss: 321.159  - train AUC: 0.5773   - loss: 321.124 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001479 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5720   - loss: 321.651  - train AUC: 0.5781   - loss: 321.022 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002743 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5735   - loss: 321.309  - train AUC: 0.5792   - loss: 321.061 \nScore:  - val AUC: 0.5749   - loss: 32421.627 - train AUC: 0.5791   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8813   - loss: 32421.627\nCum CV train: 0.9058   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_140', 'var_140_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001128 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 268\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5171   - loss: 326.009  - train AUC: 0.5185   - loss: 325.933 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002391 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 268\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5094   - loss: 326.040  - train AUC: 0.5198   - loss: 325.926 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002489 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 268\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5070   - loss: 326.161  - train AUC: 0.5210   - loss: 325.878 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002696 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 268\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5152   - loss: 325.991  - train AUC: 0.5192   - loss: 325.901 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002867 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 268\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5125   - loss: 326.086  - train AUC: 0.5219   - loss: 325.869 \nScore:  - val AUC: 0.5116   - loss: 32421.627 - train AUC: 0.5217   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8814   - loss: 32421.627\nCum CV train: 0.9060   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_141', 'var_141_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001174 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 1034\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5322   - loss: 325.088  - train AUC: 0.5377   - loss: 324.661 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002608 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1034\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5322   - loss: 324.807  - train AUC: 0.5404   - loss: 324.669 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001290 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 1034\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5306   - loss: 325.046  - train AUC: 0.5388   - loss: 324.657 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001230 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 1034\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5314   - loss: 324.868  - train AUC: 0.5395   - loss: 324.712 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001259 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 1034\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5286   - loss: 325.290  - train AUC: 0.5375   - loss: 324.668 \nScore:  - val AUC: 0.5302   - loss: 32421.627 - train AUC: 0.5407   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8825   - loss: 32421.627\nCum CV train: 0.9070   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_142', 'var_142_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001369 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 1035\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5197   - loss: 325.633  - train AUC: 0.5301   - loss: 325.499 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002942 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1035\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5292   - loss: 325.686  - train AUC: 0.5233   - loss: 325.627 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003937 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1035\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5205   - loss: 325.678  - train AUC: 0.5278   - loss: 325.480 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001179 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 1035\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5176   - loss: 325.823  - train AUC: 0.5299   - loss: 325.438 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002569 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1035\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5139   - loss: 325.937  - train AUC: 0.5272   - loss: 325.533 \nScore:  - val AUC: 0.5195   - loss: 32421.627 - train AUC: 0.5296   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8828   - loss: 32421.627\nCum CV train: 0.9075   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_143', 'var_143_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002463 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1041\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5069   - loss: 325.901  - train AUC: 0.5189   - loss: 325.897 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002413 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1041\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5097   - loss: 325.968  - train AUC: 0.5188   - loss: 325.900 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002294 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1041\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.4999   - loss: 326.259  - train AUC: 0.5172   - loss: 325.828 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003598 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1041\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5097   - loss: 325.989  - train AUC: 0.5146   - loss: 325.896 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002426 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1041\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5048   - loss: 326.124  - train AUC: 0.5193   - loss: 325.795 \nScore:  - val AUC: 0.5048   - loss: 32421.627 - train AUC: 0.5222   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8829   - loss: 32421.627\nCum CV train: 0.9077   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_144', 'var_144_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003492 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1054\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5153   - loss: 325.805  - train AUC: 0.5282   - loss: 325.547 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002530 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1054\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5243   - loss: 325.967  - train AUC: 0.5238   - loss: 325.611 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002087 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1054\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5243   - loss: 325.645  - train AUC: 0.5261   - loss: 325.648 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002771 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1054\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5142   - loss: 325.862  - train AUC: 0.5283   - loss: 325.569 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002805 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1054\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5238   - loss: 325.898  - train AUC: 0.5259   - loss: 325.581 \nScore:  - val AUC: 0.5188   - loss: 32421.627 - train AUC: 0.5286   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8833   - loss: 32421.627\nCum CV train: 0.9081   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_145', 'var_145_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002721 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 526\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5306   - loss: 325.039  - train AUC: 0.5314   - loss: 325.057 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.004036 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 526\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5232   - loss: 325.303  - train AUC: 0.5334   - loss: 324.965 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001300 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 526\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5240   - loss: 325.181  - train AUC: 0.5358   - loss: 324.904 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002610 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 526\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5261   - loss: 325.124  - train AUC: 0.5316   - loss: 325.029 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002661 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 526\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5296   - loss: 325.157  - train AUC: 0.5319   - loss: 324.975 \nScore:  - val AUC: 0.5263   - loss: 32421.627 - train AUC: 0.5340   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8840   - loss: 32421.627\nCum CV train: 0.9089   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_146', 'var_146_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002955 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 532\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5556   - loss: 323.182  - train AUC: 0.5620   - loss: 322.664 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001086 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 532\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5531   - loss: 322.955  - train AUC: 0.5622   - loss: 322.784 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002384 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 532\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5610   - loss: 322.866  - train AUC: 0.5596   - loss: 322.827 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002386 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 532\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5541   - loss: 323.209  - train AUC: 0.5630   - loss: 322.612 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002026 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 532\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5635   - loss: 322.660  - train AUC: 0.5624   - loss: 322.688 \nScore:  - val AUC: 0.5573   - loss: 32421.627 - train AUC: 0.5627   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8867   - loss: 32421.627\nCum CV train: 0.9111   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_147', 'var_147_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002439 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1035\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5392   - loss: 324.059  - train AUC: 0.5447   - loss: 323.867 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002374 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1035\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5443   - loss: 323.828  - train AUC: 0.5425   - loss: 323.984 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001109 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 1035\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5411   - loss: 324.041  - train AUC: 0.5453   - loss: 323.848 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002536 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1035\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5403   - loss: 324.368  - train AUC: 0.5438   - loss: 323.814 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002421 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1035\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5385   - loss: 324.096  - train AUC: 0.5448   - loss: 323.866 \nScore:  - val AUC: 0.5399   - loss: 32421.627 - train AUC: 0.5455   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8884   - loss: 32421.627\nCum CV train: 0.9124   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_148', 'var_148_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002318 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 594\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5613   - loss: 323.477  - train AUC: 0.5568   - loss: 323.916 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002297 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 594\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5482   - loss: 324.132  - train AUC: 0.5551   - loss: 323.851 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002613 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 594\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5589   - loss: 323.506  - train AUC: 0.5588   - loss: 323.862 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002186 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 594\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5447   - loss: 324.470  - train AUC: 0.5590   - loss: 323.718 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002563 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 594\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5400   - loss: 324.824  - train AUC: 0.5613   - loss: 323.565 \nScore:  - val AUC: 0.5497   - loss: 32421.627 - train AUC: 0.5605   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8899   - loss: 32421.627\nCum CV train: 0.9137   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_149', 'var_149_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002537 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 523\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5539   - loss: 324.265  - train AUC: 0.5549   - loss: 324.249 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001327 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 523\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5501   - loss: 324.365  - train AUC: 0.5545   - loss: 324.284 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002325 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 523\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5484   - loss: 324.588  - train AUC: 0.5547   - loss: 324.307 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002593 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 523\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5485   - loss: 324.669  - train AUC: 0.5529   - loss: 324.344 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001251 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 523\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5500   - loss: 324.748  - train AUC: 0.5535   - loss: 324.327 \nScore:  - val AUC: 0.5496   - loss: 32421.627 - train AUC: 0.5553   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8913   - loss: 32421.627\nCum CV train: 0.9149   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_150', 'var_150_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002588 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 274\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5350   - loss: 325.259  - train AUC: 0.5341   - loss: 325.252 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002276 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 274\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5213   - loss: 325.533  - train AUC: 0.5318   - loss: 325.320 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002258 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 274\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5206   - loss: 325.603  - train AUC: 0.5308   - loss: 325.328 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002535 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 274\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5238   - loss: 325.493  - train AUC: 0.5312   - loss: 325.377 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002627 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 274\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5239   - loss: 325.687  - train AUC: 0.5293   - loss: 325.321 \nScore:  - val AUC: 0.5248   - loss: 32421.627 - train AUC: 0.5338   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8918   - loss: 32421.627\nCum CV train: 0.9155   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_151', 'var_151_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002354 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 270\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5203   - loss: 325.524  - train AUC: 0.5299   - loss: 325.224 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002580 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 270\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5220   - loss: 325.300  - train AUC: 0.5292   - loss: 325.310 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002331 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 270\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5295   - loss: 325.206  - train AUC: 0.5285   - loss: 325.232 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001309 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 270\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5247   - loss: 325.600  - train AUC: 0.5281   - loss: 325.216 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002626 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 270\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5220   - loss: 325.379  - train AUC: 0.5316   - loss: 325.161 \nScore:  - val AUC: 0.5231   - loss: 32421.627 - train AUC: 0.5313   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8923   - loss: 32421.627\nCum CV train: 0.9160   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_152', 'var_152_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002380 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 527\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.4990   - loss: 326.149  - train AUC: 0.5193   - loss: 325.846 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002503 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 527\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5091   - loss: 325.854  - train AUC: 0.5167   - loss: 325.904 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001112 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 527\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5062   - loss: 325.974  - train AUC: 0.5183   - loss: 325.884 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001165 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 527\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5085   - loss: 326.130  - train AUC: 0.5212   - loss: 325.758 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001155 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 527\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5082   - loss: 326.055  - train AUC: 0.5181   - loss: 325.827 \nScore:  - val AUC: 0.5063   - loss: 32421.627 - train AUC: 0.5226   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8924   - loss: 32421.627\nCum CV train: 0.9162   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_153', 'var_153_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002669 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 273\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5068   - loss: 326.085  - train AUC: 0.5284   - loss: 325.806 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002342 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 273\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Info] [binary:BoostFromScore]: pavg=0.100494 -> initscore=-2.191750\n[LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements\n - val AUC: 0.5067   - loss: 326.082  - train AUC: 0.5122   - loss: 326.069 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002520 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 273\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5029   - loss: 326.191  - train AUC: 0.5273   - loss: 325.787 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002424 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 273\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Info] [binary:BoostFromScore]: pavg=0.100487 -> initscore=-2.191820\n[LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements\n - val AUC: 0.5104   - loss: 326.120  - train AUC: 0.5175   - loss: 326.024 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001123 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 273\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.4937   - loss: 326.336  - train AUC: 0.5247   - loss: 325.871 \nScore:  - val AUC: 0.5035   - loss: 32421.627 - train AUC: 0.5311   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8924   - loss: 32421.627\nCum CV train: 0.9163   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_154', 'var_154_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001402 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 268\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5465   - loss: 324.121  - train AUC: 0.5470   - loss: 323.827 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001131 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 268\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5452   - loss: 323.827  - train AUC: 0.5474   - loss: 323.946 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003851 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 268\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5395   - loss: 324.058  - train AUC: 0.5506   - loss: 323.798 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001170 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 268\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5462   - loss: 324.024  - train AUC: 0.5471   - loss: 323.884 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002376 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 268\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5427   - loss: 324.397  - train AUC: 0.5461   - loss: 323.863 \nScore:  - val AUC: 0.5439   - loss: 32421.627 - train AUC: 0.5491   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8937   - loss: 32421.627\nCum CV train: 0.9174   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_155', 'var_155_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001238 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 523\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5356   - loss: 324.549  - train AUC: 0.5397   - loss: 324.328 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002646 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 523\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5341   - loss: 324.890  - train AUC: 0.5394   - loss: 324.263 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002565 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 523\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5302   - loss: 324.611  - train AUC: 0.5408   - loss: 324.357 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002358 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 523\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5379   - loss: 324.411  - train AUC: 0.5396   - loss: 324.382 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001270 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 523\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5263   - loss: 324.736  - train AUC: 0.5400   - loss: 324.406 \nScore:  - val AUC: 0.5324   - loss: 32421.627 - train AUC: 0.5415   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8949   - loss: 32421.627\nCum CV train: 0.9184   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_156', 'var_156_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003203 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 539\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5097   - loss: 325.887  - train AUC: 0.5303   - loss: 325.539 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002336 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 539\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5167   - loss: 325.951  - train AUC: 0.5305   - loss: 325.511 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002368 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 539\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5197   - loss: 325.834  - train AUC: 0.5280   - loss: 325.631 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002424 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 539\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5133   - loss: 325.997  - train AUC: 0.5282   - loss: 325.527 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003240 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 539\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5130   - loss: 325.958  - train AUC: 0.5310   - loss: 325.558 \nScore:  - val AUC: 0.5139   - loss: 32421.627 - train AUC: 0.5327   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8951   - loss: 32421.627\nCum CV train: 0.9189   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_157', 'var_157_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001294 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 269\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5356   - loss: 324.705  - train AUC: 0.5354   - loss: 324.911 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001336 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 269\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5310   - loss: 324.977  - train AUC: 0.5345   - loss: 324.935 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002564 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 269\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5249   - loss: 325.410  - train AUC: 0.5351   - loss: 324.839 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001158 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 269\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5267   - loss: 325.008  - train AUC: 0.5349   - loss: 324.919 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002538 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 269\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5279   - loss: 325.152  - train AUC: 0.5347   - loss: 324.851 \nScore:  - val AUC: 0.5288   - loss: 32421.627 - train AUC: 0.5358   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8960   - loss: 32421.627\nCum CV train: 0.9196   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_159', 'var_159_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002684 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 524\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5178   - loss: 325.993  - train AUC: 0.5203   - loss: 325.848 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001444 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 524\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5008   - loss: 326.275  - train AUC: 0.5258   - loss: 325.754 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002494 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 524\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5093   - loss: 326.154  - train AUC: 0.5253   - loss: 325.782 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002844 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 524\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5100   - loss: 326.083  - train AUC: 0.5240   - loss: 325.802 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002539 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 524\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5160   - loss: 325.980  - train AUC: 0.5235   - loss: 325.862 \nScore:  - val AUC: 0.5097   - loss: 32421.627 - train AUC: 0.5272   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8960   - loss: 32421.627\nCum CV train: 0.9198   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_160', 'var_160_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001163 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 265\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Info] [binary:BoostFromScore]: pavg=0.100494 -> initscore=-2.191750\n[LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements\n - val AUC: 0.5018   - loss: 326.082  - train AUC: 0.5147   - loss: 326.016 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002429 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 265\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Info] [binary:BoostFromScore]: pavg=0.100494 -> initscore=-2.191750\n[LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements\n - val AUC: 0.5053   - loss: 326.105  - train AUC: 0.5083   - loss: 326.130 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002560 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 265\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.4988   - loss: 326.167  - train AUC: 0.5169   - loss: 325.921 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002345 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 265\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Info] [binary:BoostFromScore]: pavg=0.100487 -> initscore=-2.191820\n[LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements\n - val AUC: 0.5111   - loss: 326.024  - train AUC: 0.5157   - loss: 325.970 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002305 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 265\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Info] [binary:BoostFromScore]: pavg=0.100487 -> initscore=-2.191820\n[LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements\n - val AUC: 0.5095   - loss: 326.071  - train AUC: 0.5138   - loss: 326.014 \nScore:  - val AUC: 0.5053   - loss: 32421.627 - train AUC: 0.5184   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8960   - loss: 32421.627\nCum CV train: 0.9199   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_162', 'var_162_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002163 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1046\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5289   - loss: 325.421  - train AUC: 0.5399   - loss: 324.858 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002540 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1046\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5280   - loss: 325.134  - train AUC: 0.5389   - loss: 324.952 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002358 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1046\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5294   - loss: 325.241  - train AUC: 0.5398   - loss: 324.907 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002434 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1046\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5326   - loss: 325.356  - train AUC: 0.5385   - loss: 324.860 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002889 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1046\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5339   - loss: 325.095  - train AUC: 0.5363   - loss: 324.997 \nScore:  - val AUC: 0.5299   - loss: 32421.627 - train AUC: 0.5412   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8967   - loss: 32421.627\nCum CV train: 0.9205   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_163', 'var_163_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.004002 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 269\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5314   - loss: 324.860  - train AUC: 0.5395   - loss: 324.631 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001232 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 269\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5320   - loss: 324.747  - train AUC: 0.5403   - loss: 324.649 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001225 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 269\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5377   - loss: 324.482  - train AUC: 0.5417   - loss: 324.621 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002422 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 269\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5206   - loss: 325.708  - train AUC: 0.5426   - loss: 324.432 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002628 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 269\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5441   - loss: 324.763  - train AUC: 0.5387   - loss: 324.609 \nScore:  - val AUC: 0.5321   - loss: 32421.627 - train AUC: 0.5421   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8976   - loss: 32421.627\nCum CV train: 0.9214   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_164', 'var_164_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001196 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 525\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5486   - loss: 322.871  - train AUC: 0.5430   - loss: 323.880 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001362 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 525\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5395   - loss: 323.628  - train AUC: 0.5468   - loss: 323.669 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002689 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 525\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5387   - loss: 324.045  - train AUC: 0.5458   - loss: 323.576 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002451 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 525\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5374   - loss: 324.149  - train AUC: 0.5453   - loss: 323.595 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002820 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 525\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5332   - loss: 324.456  - train AUC: 0.5452   - loss: 323.569 \nScore:  - val AUC: 0.5385   - loss: 32421.627 - train AUC: 0.5473   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.8990   - loss: 32421.627\nCum CV train: 0.9225   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_165', 'var_165_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.000918 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 269\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5492   - loss: 323.782  - train AUC: 0.5572   - loss: 323.198 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002590 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 269\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5538   - loss: 323.662  - train AUC: 0.5575   - loss: 323.109 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003973 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 269\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5554   - loss: 323.477  - train AUC: 0.5567   - loss: 323.225 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002634 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 269\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5510   - loss: 323.330  - train AUC: 0.5575   - loss: 323.235 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002468 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 269\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5534   - loss: 323.240  - train AUC: 0.5585   - loss: 323.181 \nScore:  - val AUC: 0.5521   - loss: 32421.627 - train AUC: 0.5585   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.9010   - loss: 32421.627\nCum CV train: 0.9241   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_166', 'var_166_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002600 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 563\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5501   - loss: 323.829  - train AUC: 0.5587   - loss: 323.237 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.004510 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 563\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5536   - loss: 323.556  - train AUC: 0.5561   - loss: 323.374 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002414 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 563\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5532   - loss: 323.394  - train AUC: 0.5593   - loss: 323.309 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002641 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 563\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5481   - loss: 324.116  - train AUC: 0.5592   - loss: 323.183 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002204 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 563\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5532   - loss: 323.173  - train AUC: 0.5575   - loss: 323.433 \nScore:  - val AUC: 0.5511   - loss: 32421.627 - train AUC: 0.5603   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.9027   - loss: 32421.627\nCum CV train: 0.9256   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_167', 'var_167_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002328 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 522\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5298   - loss: 325.228  - train AUC: 0.5372   - loss: 324.906 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002514 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 522\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5358   - loss: 325.084  - train AUC: 0.5351   - loss: 324.954 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001080 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 522\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5213   - loss: 325.468  - train AUC: 0.5374   - loss: 324.931 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002419 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 522\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5284   - loss: 325.195  - train AUC: 0.5368   - loss: 324.946 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001127 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 522\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5347   - loss: 324.823  - train AUC: 0.5354   - loss: 325.037 \nScore:  - val AUC: 0.5295   - loss: 32421.627 - train AUC: 0.5374   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.9034   - loss: 32421.627\nCum CV train: 0.9262   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_168', 'var_168_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002593 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 270\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5123   - loss: 325.939  - train AUC: 0.5223   - loss: 325.728 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003486 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 270\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5157   - loss: 325.859  - train AUC: 0.5230   - loss: 325.774 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002410 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 270\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5200   - loss: 325.735  - train AUC: 0.5247   - loss: 325.733 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002350 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 270\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5133   - loss: 325.851  - train AUC: 0.5232   - loss: 325.765 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002779 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 270\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5129   - loss: 325.997  - train AUC: 0.5233   - loss: 325.719 \nScore:  - val AUC: 0.5144   - loss: 32421.627 - train AUC: 0.5257   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.9037   - loss: 32421.627\nCum CV train: 0.9265   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_169', 'var_169_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002724 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1078\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5521   - loss: 324.193  - train AUC: 0.5504   - loss: 324.524 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002411 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1078\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5457   - loss: 324.783  - train AUC: 0.5522   - loss: 324.350 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002318 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1078\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5479   - loss: 324.778  - train AUC: 0.5522   - loss: 324.331 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002455 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1078\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5428   - loss: 324.847  - train AUC: 0.5535   - loss: 324.307 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002320 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1078\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5457   - loss: 324.575  - train AUC: 0.5504   - loss: 324.489 \nScore:  - val AUC: 0.5461   - loss: 32421.627 - train AUC: 0.5540   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.9048   - loss: 32421.627\nCum CV train: 0.9274   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_170', 'var_170_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001203 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 527\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5401   - loss: 324.070  - train AUC: 0.5491   - loss: 323.493 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002392 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 527\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5431   - loss: 324.111  - train AUC: 0.5475   - loss: 323.577 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002632 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 527\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5408   - loss: 323.749  - train AUC: 0.5495   - loss: 323.526 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002753 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 527\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5407   - loss: 323.634  - train AUC: 0.5469   - loss: 323.662 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002777 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 527\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5400   - loss: 323.911  - train AUC: 0.5490   - loss: 323.558 \nScore:  - val AUC: 0.5402   - loss: 32421.627 - train AUC: 0.5500   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.9062   - loss: 32421.627\nCum CV train: 0.9286   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_171', 'var_171_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.004217 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 524\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5057   - loss: 325.961  - train AUC: 0.5271   - loss: 325.615 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002432 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 524\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5223   - loss: 325.656  - train AUC: 0.5239   - loss: 325.713 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003588 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 524\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5125   - loss: 325.939  - train AUC: 0.5247   - loss: 325.650 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002604 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 524\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5185   - loss: 325.795  - train AUC: 0.5254   - loss: 325.662 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002419 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 524\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5041   - loss: 326.142  - train AUC: 0.5267   - loss: 325.605 \nScore:  - val AUC: 0.5115   - loss: 32421.627 - train AUC: 0.5292   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.9063   - loss: 32421.627\nCum CV train: 0.9289   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_172', 'var_172_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002562 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 524\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5373   - loss: 324.903  - train AUC: 0.5457   - loss: 324.439 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001215 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 524\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5416   - loss: 324.759  - train AUC: 0.5425   - loss: 324.591 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001058 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 524\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5439   - loss: 324.584  - train AUC: 0.5431   - loss: 324.536 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001350 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 524\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5420   - loss: 324.389  - train AUC: 0.5435   - loss: 324.584 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002436 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 524\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5382   - loss: 324.877  - train AUC: 0.5446   - loss: 324.480 \nScore:  - val AUC: 0.5403   - loss: 32421.627 - train AUC: 0.5447   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.9074   - loss: 32421.627\nCum CV train: 0.9298   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_173', 'var_173_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002422 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5385   - loss: 324.704  - train AUC: 0.5424   - loss: 324.508 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001277 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5411   - loss: 324.742  - train AUC: 0.5429   - loss: 324.409 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003614 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5385   - loss: 324.755  - train AUC: 0.5437   - loss: 324.437 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001121 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5366   - loss: 324.423  - train AUC: 0.5443   - loss: 324.506 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002539 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5390   - loss: 324.642  - train AUC: 0.5440   - loss: 324.453 \nScore:  - val AUC: 0.5385   - loss: 32421.627 - train AUC: 0.5446   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.9083   - loss: 32421.627\nCum CV train: 0.9306   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_174', 'var_174_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002340 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 268\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5484   - loss: 322.183  - train AUC: 0.5587   - loss: 322.452 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001163 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 268\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5461   - loss: 323.552  - train AUC: 0.5595   - loss: 322.050 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001105 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 268\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5557   - loss: 322.164  - train AUC: 0.5566   - loss: 322.434 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001200 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 268\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5551   - loss: 322.371  - train AUC: 0.5569   - loss: 322.369 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001305 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 268\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5604   - loss: 322.632  - train AUC: 0.5555   - loss: 322.270 \nScore:  - val AUC: 0.5529   - loss: 32421.627 - train AUC: 0.5585   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.9102   - loss: 32421.627\nCum CV train: 0.9321   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_175', 'var_175_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002917 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 274\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5212   - loss: 325.643  - train AUC: 0.5284   - loss: 325.584 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002031 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 274\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5245   - loss: 325.609  - train AUC: 0.5258   - loss: 325.585 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001090 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 274\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5200   - loss: 325.750  - train AUC: 0.5271   - loss: 325.546 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002607 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 274\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5154   - loss: 325.888  - train AUC: 0.5278   - loss: 325.502 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002244 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 274\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5281   - loss: 325.610  - train AUC: 0.5252   - loss: 325.557 \nScore:  - val AUC: 0.5210   - loss: 32421.627 - train AUC: 0.5285   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.9105   - loss: 32421.627\nCum CV train: 0.9324   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_176', 'var_176_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001427 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 1035\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5082   - loss: 326.028  - train AUC: 0.5223   - loss: 325.883 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002411 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1035\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5083   - loss: 326.149  - train AUC: 0.5256   - loss: 325.809 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002468 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1035\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.4995   - loss: 326.200  - train AUC: 0.5216   - loss: 325.884 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002389 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1035\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5071   - loss: 326.137  - train AUC: 0.5227   - loss: 325.876 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002553 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1035\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5003   - loss: 326.239  - train AUC: 0.5224   - loss: 325.838 \nScore:  - val AUC: 0.5043   - loss: 32421.627 - train AUC: 0.5283   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.9106   - loss: 32421.627\nCum CV train: 0.9326   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_177', 'var_177_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002256 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 273\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5327   - loss: 324.880  - train AUC: 0.5413   - loss: 324.145 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002075 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 273\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5338   - loss: 324.200  - train AUC: 0.5429   - loss: 324.262 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002444 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 273\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5352   - loss: 324.435  - train AUC: 0.5431   - loss: 324.143 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002674 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 273\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5336   - loss: 324.546  - train AUC: 0.5401   - loss: 324.272 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001136 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 273\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5334   - loss: 324.389  - train AUC: 0.5406   - loss: 324.309 \nScore:  - val AUC: 0.5337   - loss: 32421.627 - train AUC: 0.5431   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.9117   - loss: 32421.627\nCum CV train: 0.9335   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_178', 'var_178_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001217 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5227   - loss: 325.785  - train AUC: 0.5296   - loss: 325.478 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002451 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5172   - loss: 325.747  - train AUC: 0.5303   - loss: 325.485 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002662 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5180   - loss: 325.857  - train AUC: 0.5278   - loss: 325.583 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002586 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5240   - loss: 325.629  - train AUC: 0.5293   - loss: 325.492 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002792 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5175   - loss: 325.889  - train AUC: 0.5296   - loss: 325.449 \nScore:  - val AUC: 0.5194   - loss: 32421.627 - train AUC: 0.5313   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.9119   - loss: 32421.627\nCum CV train: 0.9338   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_179', 'var_179_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002475 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 273\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5522   - loss: 322.942  - train AUC: 0.5500   - loss: 323.401 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002455 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 273\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5436   - loss: 323.532  - train AUC: 0.5528   - loss: 323.150 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002067 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 273\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5383   - loss: 323.583  - train AUC: 0.5530   - loss: 323.236 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002379 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 273\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5449   - loss: 323.687  - train AUC: 0.5512   - loss: 323.237 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002683 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 273\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5425   - loss: 323.887  - train AUC: 0.5520   - loss: 323.118 \nScore:  - val AUC: 0.5438   - loss: 32421.627 - train AUC: 0.5537   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.9131   - loss: 32421.627\nCum CV train: 0.9347   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_180', 'var_180_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001234 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5341   - loss: 324.970  - train AUC: 0.5330   - loss: 324.907 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001223 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5285   - loss: 325.052  - train AUC: 0.5352   - loss: 324.896 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002405 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5237   - loss: 325.324  - train AUC: 0.5338   - loss: 324.878 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002563 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5297   - loss: 324.900  - train AUC: 0.5320   - loss: 324.986 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002200 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5271   - loss: 325.172  - train AUC: 0.5318   - loss: 324.899 \nScore:  - val AUC: 0.5280   - loss: 32421.627 - train AUC: 0.5347   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.9138   - loss: 32421.627\nCum CV train: 0.9353   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_181', 'var_181_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001280 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 287\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5131   - loss: 325.956  - train AUC: 0.5265   - loss: 325.725 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002320 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 287\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5095   - loss: 326.031  - train AUC: 0.5265   - loss: 325.757 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002735 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 287\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5147   - loss: 326.000  - train AUC: 0.5261   - loss: 325.749 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002615 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 287\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5182   - loss: 325.929  - train AUC: 0.5264   - loss: 325.758 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002251 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 287\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5115   - loss: 326.136  - train AUC: 0.5250   - loss: 325.722 \nScore:  - val AUC: 0.5132   - loss: 32421.627 - train AUC: 0.5283   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.9139   - loss: 32421.627\nCum CV train: 0.9355   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_182', 'var_182_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.000866 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 266\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Info] [binary:BoostFromScore]: pavg=0.100494 -> initscore=-2.191750\n[LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements\n - val AUC: 0.4991   - loss: 326.047  - train AUC: 0.5127   - loss: 326.044 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002245 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 266\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Info] [binary:BoostFromScore]: pavg=0.100494 -> initscore=-2.191750\n[LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements\n - val AUC: 0.5054   - loss: 326.062  - train AUC: 0.5153   - loss: 325.957 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002438 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 266\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Info] [binary:BoostFromScore]: pavg=0.100487 -> initscore=-2.191820\n[LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements\n - val AUC: 0.5040   - loss: 326.101  - train AUC: 0.5165   - loss: 325.957 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002394 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 266\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Info] [binary:BoostFromScore]: pavg=0.100487 -> initscore=-2.191820\n[LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements\n - val AUC: 0.5058   - loss: 326.073  - train AUC: 0.5128   - loss: 325.995 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002552 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 266\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Info] [binary:BoostFromScore]: pavg=0.100487 -> initscore=-2.191820\n[LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements\n - val AUC: 0.5099   - loss: 326.017  - train AUC: 0.5210   - loss: 325.859 \nScore:  - val AUC: 0.5048   - loss: 32421.627 - train AUC: 0.5195   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.9139   - loss: 32421.627\nCum CV train: 0.9356   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_183', 'var_183_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001117 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 268\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.4956   - loss: 326.216  - train AUC: 0.5166   - loss: 326.018 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003898 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 268\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Info] [binary:BoostFromScore]: pavg=0.100494 -> initscore=-2.191750\n[LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements\n - val AUC: 0.5038   - loss: 326.096  - train AUC: 0.5045   - loss: 326.110 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002489 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 268\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Info] [binary:BoostFromScore]: pavg=0.100487 -> initscore=-2.191820\n[LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements\n - val AUC: 0.5021   - loss: 326.114  - train AUC: 0.5171   - loss: 325.961 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001420 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 268\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Info] [binary:BoostFromScore]: pavg=0.100487 -> initscore=-2.191820\n[LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements\n - val AUC: 0.5061   - loss: 326.123  - train AUC: 0.5080   - loss: 326.089 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002542 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 268\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Info] [binary:BoostFromScore]: pavg=0.100487 -> initscore=-2.191820\n[LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements\n - val AUC: 0.5068   - loss: 326.147  - train AUC: 0.5057   - loss: 326.079 \nScore:  - val AUC: 0.5017   - loss: 32421.627 - train AUC: 0.5179   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.9140   - loss: 32421.627\nCum CV train: 0.9357   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_184', 'var_184_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001082 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5561   - loss: 323.392  - train AUC: 0.5512   - loss: 323.827 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002942 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5506   - loss: 323.745  - train AUC: 0.5503   - loss: 323.758 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002518 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5426   - loss: 324.220  - train AUC: 0.5547   - loss: 323.543 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002255 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5466   - loss: 323.900  - train AUC: 0.5508   - loss: 323.753 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002474 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5460   - loss: 324.099  - train AUC: 0.5532   - loss: 323.624 \nScore:  - val AUC: 0.5478   - loss: 32421.627 - train AUC: 0.5534   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.9154   - loss: 32421.627\nCum CV train: 0.9368   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_186', 'var_186_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002513 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 528\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5294   - loss: 325.380  - train AUC: 0.5358   - loss: 325.249 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001034 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 528\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5284   - loss: 325.450  - train AUC: 0.5331   - loss: 325.343 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002182 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 528\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5331   - loss: 325.383  - train AUC: 0.5329   - loss: 325.308 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002476 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 528\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5305   - loss: 325.492  - train AUC: 0.5346   - loss: 325.328 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001145 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 528\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5221   - loss: 325.754  - train AUC: 0.5351   - loss: 325.277 \nScore:  - val AUC: 0.5279   - loss: 32421.627 - train AUC: 0.5359   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.9157   - loss: 32421.627\nCum CV train: 0.9371   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_187', 'var_187_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001169 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 264\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5139   - loss: 325.573  - train AUC: 0.5273   - loss: 325.467 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002576 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 264\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5096   - loss: 325.618  - train AUC: 0.5236   - loss: 325.570 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001301 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 264\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5161   - loss: 325.720  - train AUC: 0.5235   - loss: 325.498 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001088 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 264\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5137   - loss: 325.691  - train AUC: 0.5257   - loss: 325.468 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002596 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 264\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5110   - loss: 325.978  - train AUC: 0.5238   - loss: 325.466 \nScore:  - val AUC: 0.5125   - loss: 32421.627 - train AUC: 0.5274   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.9160   - loss: 32421.627\nCum CV train: 0.9375   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_188', 'var_188_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001135 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 1037\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5333   - loss: 324.524  - train AUC: 0.5352   - loss: 324.751 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002433 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1037\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5342   - loss: 324.711  - train AUC: 0.5411   - loss: 324.527 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001133 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 1037\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5311   - loss: 324.922  - train AUC: 0.5362   - loss: 324.683 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002614 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1037\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5267   - loss: 325.253  - train AUC: 0.5372   - loss: 324.608 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002426 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1037\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5317   - loss: 324.820  - train AUC: 0.5379   - loss: 324.640 \nScore:  - val AUC: 0.5309   - loss: 32421.627 - train AUC: 0.5388   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.9169   - loss: 32421.627\nCum CV train: 0.9382   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_189', 'var_189_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002232 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 283\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5094   - loss: 326.102  - train AUC: 0.5188   - loss: 325.890 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.004081 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 283\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5103   - loss: 326.059  - train AUC: 0.5194   - loss: 325.939 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002283 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 283\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5072   - loss: 326.178  - train AUC: 0.5178   - loss: 325.878 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.003772 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 283\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5102   - loss: 326.194  - train AUC: 0.5185   - loss: 325.909 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002128 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 283\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5049   - loss: 326.173  - train AUC: 0.5221   - loss: 325.867 \nScore:  - val AUC: 0.5072   - loss: 32421.627 - train AUC: 0.5226   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.9169   - loss: 32421.627\nCum CV train: 0.9383   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_190', 'var_190_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001253 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 527\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5589   - loss: 322.712  - train AUC: 0.5555   - loss: 323.391 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001297 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 527\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5604   - loss: 323.123  - train AUC: 0.5547   - loss: 323.372 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002565 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 527\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5480   - loss: 323.602  - train AUC: 0.5585   - loss: 323.187 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002498 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 527\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5413   - loss: 324.229  - train AUC: 0.5597   - loss: 323.128 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001128 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 527\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5551   - loss: 324.014  - train AUC: 0.5566   - loss: 323.128 \nScore:  - val AUC: 0.5521   - loss: 32421.627 - train AUC: 0.5577   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.9184   - loss: 32421.627\nCum CV train: 0.9396   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_191', 'var_191_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002634 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 271\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5446   - loss: 324.058  - train AUC: 0.5488   - loss: 324.030 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001137 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 271\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5433   - loss: 324.047  - train AUC: 0.5500   - loss: 323.995 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001224 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 271\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5464   - loss: 324.216  - train AUC: 0.5467   - loss: 324.018 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002895 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 271\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5459   - loss: 324.035  - train AUC: 0.5489   - loss: 323.956 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001213 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 271\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5427   - loss: 324.712  - train AUC: 0.5475   - loss: 323.939 \nScore:  - val AUC: 0.5442   - loss: 32421.627 - train AUC: 0.5496   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.9194   - loss: 32421.627\nCum CV train: 0.9404   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_192', 'var_192_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002728 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 536\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5405   - loss: 325.079  - train AUC: 0.5500   - loss: 324.754 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002971 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 536\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5505   - loss: 324.753  - train AUC: 0.5469   - loss: 324.902 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002446 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 536\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5411   - loss: 325.135  - train AUC: 0.5504   - loss: 324.755 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002480 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 536\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5413   - loss: 325.178  - train AUC: 0.5497   - loss: 324.789 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002477 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 536\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5444   - loss: 325.117  - train AUC: 0.5492   - loss: 324.786 \nScore:  - val AUC: 0.5429   - loss: 32421.627 - train AUC: 0.5508   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.9201   - loss: 32421.627\nCum CV train: 0.9410   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_193', 'var_193_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001234 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 1036\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5126   - loss: 325.946  - train AUC: 0.5267   - loss: 325.669 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002727 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1036\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5163   - loss: 325.884  - train AUC: 0.5225   - loss: 325.765 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001413 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 1036\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5144   - loss: 325.935  - train AUC: 0.5227   - loss: 325.732 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002440 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1036\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5106   - loss: 325.983  - train AUC: 0.5239   - loss: 325.702 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001278 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 1036\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5196   - loss: 325.858  - train AUC: 0.5228   - loss: 325.719 \nScore:  - val AUC: 0.5139   - loss: 32421.627 - train AUC: 0.5267   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.9202   - loss: 32421.627\nCum CV train: 0.9412   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_194', 'var_194_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001280 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 271\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5193   - loss: 325.797  - train AUC: 0.5298   - loss: 325.454 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001321 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 271\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5138   - loss: 325.828  - train AUC: 0.5287   - loss: 325.518 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002594 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 271\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5210   - loss: 325.560  - train AUC: 0.5279   - loss: 325.565 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001197 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 271\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5207   - loss: 325.809  - train AUC: 0.5271   - loss: 325.518 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002595 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 271\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5196   - loss: 325.713  - train AUC: 0.5271   - loss: 325.500 \nScore:  - val AUC: 0.5184   - loss: 32421.627 - train AUC: 0.5297   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.9204   - loss: 32421.627\nCum CV train: 0.9415   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_195', 'var_195_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002605 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 278\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5190   - loss: 325.623  - train AUC: 0.5340   - loss: 325.313 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002412 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 278\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5307   - loss: 325.419  - train AUC: 0.5340   - loss: 325.298 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002519 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 278\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5241   - loss: 325.658  - train AUC: 0.5342   - loss: 325.291 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002534 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 278\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5328   - loss: 325.207  - train AUC: 0.5340   - loss: 325.332 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002123 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 278\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5291   - loss: 325.649  - train AUC: 0.5332   - loss: 325.302 \nScore:  - val AUC: 0.5268   - loss: 32421.627 - train AUC: 0.5349   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.9208   - loss: 32421.627\nCum CV train: 0.9418   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_196', 'var_196_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002500 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5233   - loss: 325.413  - train AUC: 0.5300   - loss: 325.311 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001254 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5201   - loss: 325.388  - train AUC: 0.5320   - loss: 325.222 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002321 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5139   - loss: 325.942  - train AUC: 0.5320   - loss: 325.154 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002552 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5179   - loss: 325.644  - train AUC: 0.5294   - loss: 325.248 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001351 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 267\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n[LightGBM] [Warning] No further splits with positive gain, best gain: -inf\n - val AUC: 0.5233   - loss: 325.349  - train AUC: 0.5308   - loss: 325.335 \nScore:  - val AUC: 0.5197   - loss: 32421.627 - train AUC: 0.5327   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.9210   - loss: 32421.627\nCum CV train: 0.9421   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_197', 'var_197_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001008 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 285\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5360   - loss: 324.996  - train AUC: 0.5348   - loss: 325.020 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002220 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 285\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5297   - loss: 325.212  - train AUC: 0.5355   - loss: 325.013 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002652 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 285\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5257   - loss: 325.398  - train AUC: 0.5377   - loss: 324.888 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002476 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 285\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5275   - loss: 325.341  - train AUC: 0.5358   - loss: 324.978 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002400 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 285\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5346   - loss: 325.044  - train AUC: 0.5347   - loss: 325.048 \nScore:  - val AUC: 0.5302   - loss: 32421.627 - train AUC: 0.5366   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.9216   - loss: 32421.627\nCum CV train: 0.9426   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_198', 'var_198_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002484 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1039\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5537   - loss: 323.405  - train AUC: 0.5543   - loss: 323.266 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001226 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 1039\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5538   - loss: 323.177  - train AUC: 0.5529   - loss: 323.404 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002596 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1039\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5435   - loss: 323.661  - train AUC: 0.5559   - loss: 323.286 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001218 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 1039\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5478   - loss: 323.827  - train AUC: 0.5566   - loss: 323.151 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002676 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 1039\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5509   - loss: 323.544  - train AUC: 0.5550   - loss: 323.261 \nScore:  - val AUC: 0.5492   - loss: 32421.627 - train AUC: 0.5567   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.9230   - loss: 32421.627\nCum CV train: 0.9437   - loss: 32421.627\n**************************************************\n\nTraining on: ['var_199', 'var_199_count']\n\nsetting:  4\nFold: 1[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002438 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 265\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5314   - loss: 325.352  - train AUC: 0.5295   - loss: 325.513 \nFold: 2[LightGBM] [Info] Number of positive: 16079, number of negative: 143921\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002748 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 265\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5249   - loss: 325.499  - train AUC: 0.5308   - loss: 325.507 \nFold: 3[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001307 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 265\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5238   - loss: 325.671  - train AUC: 0.5298   - loss: 325.447 \nFold: 4[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.001135 seconds.\nYou can set `force_row_wise=true` to remove the overhead.\nAnd if memory is not enough, you can set `force_col_wise=true`.\n[LightGBM] [Info] Total Bins 265\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5166   - loss: 325.850  - train AUC: 0.5321   - loss: 325.405 \nFold: 5[LightGBM] [Info] Number of positive: 16078, number of negative: 143922\n[LightGBM] [Info] Auto-choosing col-wise multi-threading, the overhead of testing was 0.002380 seconds.\nYou can set `force_col_wise=true` to remove the overhead.\n[LightGBM] [Info] Total Bins 265\n[LightGBM] [Info] Number of data points in the train set: 160000, number of used features: 2\n - val AUC: 0.5227   - loss: 325.740  - train AUC: 0.5312   - loss: 325.412 \nScore:  - val AUC: 0.5235   - loss: 32421.627 - train AUC: 0.5333   - loss: 32421.627\n\nbest setting:  4\nCum CV val  : 0.9233   - loss: 32421.627\nCum CV train: 0.9440   - loss: 32421.627\n**************************************************\n\n","output_type":"stream"}],"execution_count":15},{"cell_type":"markdown","source":"# 得到集成在train和val上的效果","metadata":{}},{"cell_type":"code","source":"preds_oof_cum = np.zeros(preds_oof.shape[0])\npreds_train_cum = np.zeros(preds_train.shape[0])\nfor i in range(len(features)):\n    preds_oof_cum += preds_oof[:,i]\n    preds_train_cum += preds_train[:,i]\n    print(\"var_{} Cum val: {:<8.5f}\".format(i,roc_auc_score(target_train, preds_oof_cum)), end=\"\")\n    print(\" - train: {:<8.5f}\".format(roc_auc_score(target_train, preds_train_cum)))","metadata":{"trusted":true,"execution":{"iopub.status.busy":"2025-09-19T14:30:34.494883Z","iopub.execute_input":"2025-09-19T14:30:34.495196Z","iopub.status.idle":"2025-09-19T14:31:11.023708Z","shell.execute_reply.started":"2025-09-19T14:30:34.495167Z","shell.execute_reply":"2025-09-19T14:31:11.022837Z"}},"outputs":[{"name":"stdout","text":"var_0 Cum val: 0.54774  - train: 0.55891 \nvar_1 Cum val: 0.57384  - train: 0.58248 \nvar_2 Cum val: 0.59655  - train: 0.60574 \nvar_3 Cum val: 0.59778  - train: 0.60831 \nvar_4 Cum val: 0.59839  - train: 0.61025 \nvar_5 Cum val: 0.60777  - train: 0.61998 \nvar_6 Cum val: 0.62808  - train: 0.63917 \nvar_7 Cum val: 0.63028  - train: 0.64221 \nvar_8 Cum val: 0.64102  - train: 0.65318 \nvar_9 Cum val: 0.64346  - train: 0.65661 \nvar_10 Cum val: 0.66046  - train: 0.67287 \nvar_11 Cum val: 0.67165  - train: 0.68431 \nvar_12 Cum val: 0.67186  - train: 0.68480 \nvar_13 Cum val: 0.67277  - train: 0.68637 \nvar_14 Cum val: 0.67303  - train: 0.68739 \nvar_15 Cum val: 0.68081  - train: 0.69539 \nvar_16 Cum val: 0.68168  - train: 0.69706 \nvar_17 Cum val: 0.68346  - train: 0.69942 \nvar_18 Cum val: 0.69425  - train: 0.71003 \nvar_19 Cum val: 0.70364  - train: 0.71907 \nvar_20 Cum val: 0.70490  - train: 0.72107 \nvar_21 Cum val: 0.70694  - train: 0.72364 \nvar_22 Cum val: 0.70707  - train: 0.72475 \nvar_23 Cum val: 0.71803  - train: 0.73498 \nvar_24 Cum val: 0.71927  - train: 0.73709 \nvar_25 Cum val: 0.71929  - train: 0.73727 \nvar_26 Cum val: 0.72041  - train: 0.73906 \nvar_27 Cum val: 0.72333  - train: 0.74225 \nvar_28 Cum val: 0.72923  - train: 0.74795 \nvar_29 Cum val: 0.73427  - train: 0.75290 \nvar_30 Cum val: 0.73743  - train: 0.75633 \nvar_31 Cum val: 0.74063  - train: 0.75969 \nvar_32 Cum val: 0.74082  - train: 0.76036 \nvar_33 Cum val: 0.74073  - train: 0.76093 \nvar_34 Cum val: 0.74692  - train: 0.76660 \nvar_35 Cum val: 0.74693  - train: 0.76662 \nvar_36 Cum val: 0.74694  - train: 0.76729 \nvar_37 Cum val: 0.74846  - train: 0.76905 \nvar_38 Cum val: 0.75521  - train: 0.77539 \nvar_39 Cum val: 0.75643  - train: 0.77696 \nvar_40 Cum val: 0.75650  - train: 0.77737 \nvar_41 Cum val: 0.75649  - train: 0.77780 \nvar_42 Cum val: 0.75871  - train: 0.78008 \nvar_43 Cum val: 0.76145  - train: 0.78283 \nvar_44 Cum val: 0.76171  - train: 0.78348 \nvar_45 Cum val: 0.76379  - train: 0.78586 \nvar_46 Cum val: 0.76569  - train: 0.78791 \nvar_47 Cum val: 0.77250  - train: 0.79429 \nvar_48 Cum val: 0.77295  - train: 0.79512 \nvar_49 Cum val: 0.77372  - train: 0.79616 \nvar_50 Cum val: 0.77609  - train: 0.79871 \nvar_51 Cum val: 0.77630  - train: 0.79967 \nvar_52 Cum val: 0.77743  - train: 0.80127 \nvar_53 Cum val: 0.77763  - train: 0.80184 \nvar_54 Cum val: 0.77790  - train: 0.80240 \nvar_55 Cum val: 0.77802  - train: 0.80285 \nvar_56 Cum val: 0.77822  - train: 0.80360 \nvar_57 Cum val: 0.77882  - train: 0.80456 \nvar_58 Cum val: 0.77910  - train: 0.80520 \nvar_59 Cum val: 0.77926  - train: 0.80578 \nvar_60 Cum val: 0.78001  - train: 0.80682 \nvar_61 Cum val: 0.78310  - train: 0.80980 \nvar_62 Cum val: 0.78332  - train: 0.81057 \nvar_63 Cum val: 0.78376  - train: 0.81128 \nvar_64 Cum val: 0.78551  - train: 0.81314 \nvar_65 Cum val: 0.78656  - train: 0.81444 \nvar_66 Cum val: 0.78671  - train: 0.81494 \nvar_67 Cum val: 0.78686  - train: 0.81517 \nvar_68 Cum val: 0.78767  - train: 0.81616 \nvar_69 Cum val: 0.79076  - train: 0.81915 \nvar_70 Cum val: 0.79650  - train: 0.82411 \nvar_71 Cum val: 0.79689  - train: 0.82493 \nvar_72 Cum val: 0.80084  - train: 0.82834 \nvar_73 Cum val: 0.80084  - train: 0.82864 \nvar_74 Cum val: 0.80642  - train: 0.83373 \nvar_75 Cum val: 0.81481  - train: 0.84084 \nvar_76 Cum val: 0.81625  - train: 0.84256 \nvar_77 Cum val: 0.81730  - train: 0.84370 \nvar_78 Cum val: 0.81747  - train: 0.84405 \nvar_79 Cum val: 0.81836  - train: 0.84504 \nvar_80 Cum val: 0.82141  - train: 0.84776 \nvar_81 Cum val: 0.82280  - train: 0.84912 \nvar_82 Cum val: 0.82333  - train: 0.84994 \nvar_83 Cum val: 0.82501  - train: 0.85161 \nvar_84 Cum val: 0.82632  - train: 0.85287 \nvar_85 Cum val: 0.82805  - train: 0.85454 \nvar_86 Cum val: 0.83110  - train: 0.85727 \nvar_87 Cum val: 0.83217  - train: 0.85846 \nvar_88 Cum val: 0.83473  - train: 0.86072 \nvar_89 Cum val: 0.83621  - train: 0.86220 \nvar_90 Cum val: 0.83687  - train: 0.86297 \nvar_91 Cum val: 0.84039  - train: 0.86605 \nvar_92 Cum val: 0.84043  - train: 0.86621 \nvar_93 Cum val: 0.84116  - train: 0.86721 \nvar_94 Cum val: 0.84163  - train: 0.86791 \nvar_95 Cum val: 0.84215  - train: 0.86859 \nvar_96 Cum val: 0.84320  - train: 0.86968 \nvar_97 Cum val: 0.84518  - train: 0.87147 \nvar_98 Cum val: 0.84779  - train: 0.87378 \nvar_99 Cum val: 0.85117  - train: 0.87664 \nvar_100 Cum val: 0.85483  - train: 0.87970 \nvar_101 Cum val: 0.85549  - train: 0.88041 \nvar_102 Cum val: 0.85623  - train: 0.88127 \nvar_103 Cum val: 0.85657  - train: 0.88174 \nvar_104 Cum val: 0.85726  - train: 0.88252 \nvar_105 Cum val: 0.85888  - train: 0.88402 \nvar_106 Cum val: 0.85927  - train: 0.88461 \nvar_107 Cum val: 0.86081  - train: 0.88605 \nvar_108 Cum val: 0.86211  - train: 0.88727 \nvar_109 Cum val: 0.86234  - train: 0.88760 \nvar_110 Cum val: 0.86399  - train: 0.88928 \nvar_111 Cum val: 0.86576  - train: 0.89094 \nvar_112 Cum val: 0.86749  - train: 0.89244 \nvar_113 Cum val: 0.86801  - train: 0.89304 \nvar_114 Cum val: 0.86964  - train: 0.89453 \nvar_115 Cum val: 0.87039  - train: 0.89536 \nvar_116 Cum val: 0.87045  - train: 0.89559 \nvar_117 Cum val: 0.87148  - train: 0.89663 \nvar_118 Cum val: 0.87221  - train: 0.89748 \nvar_119 Cum val: 0.87271  - train: 0.89811 \nvar_120 Cum val: 0.87480  - train: 0.89991 \nvar_121 Cum val: 0.87508  - train: 0.90036 \nvar_122 Cum val: 0.87602  - train: 0.90121 \nvar_123 Cum val: 0.87682  - train: 0.90203 \nvar_124 Cum val: 0.87710  - train: 0.90238 \nvar_125 Cum val: 0.88135  - train: 0.90581 \nvar_126 Cum val: 0.88143  - train: 0.90600 \nvar_127 Cum val: 0.88245  - train: 0.90703 \nvar_128 Cum val: 0.88283  - train: 0.90748 \nvar_129 Cum val: 0.88292  - train: 0.90772 \nvar_130 Cum val: 0.88325  - train: 0.90814 \nvar_131 Cum val: 0.88405  - train: 0.90888 \nvar_132 Cum val: 0.88672  - train: 0.91108 \nvar_133 Cum val: 0.88835  - train: 0.91243 \nvar_134 Cum val: 0.88987  - train: 0.91366 \nvar_135 Cum val: 0.89126  - train: 0.91491 \nvar_136 Cum val: 0.89185  - train: 0.91550 \nvar_137 Cum val: 0.89234  - train: 0.91599 \nvar_138 Cum val: 0.89243  - train: 0.91618 \nvar_139 Cum val: 0.89242  - train: 0.91635 \nvar_140 Cum val: 0.89373  - train: 0.91736 \nvar_141 Cum val: 0.89492  - train: 0.91842 \nvar_142 Cum val: 0.89512  - train: 0.91885 \nvar_143 Cum val: 0.89595  - train: 0.91956 \nvar_144 Cum val: 0.89600  - train: 0.91978 \nvar_145 Cum val: 0.89604  - train: 0.91988 \nvar_146 Cum val: 0.89669  - train: 0.92053 \nvar_147 Cum val: 0.89756  - train: 0.92139 \nvar_148 Cum val: 0.89904  - train: 0.92252 \nvar_149 Cum val: 0.90095  - train: 0.92413 \nvar_150 Cum val: 0.90274  - train: 0.92560 \nvar_151 Cum val: 0.90344  - train: 0.92622 \nvar_152 Cum val: 0.90369  - train: 0.92649 \nvar_153 Cum val: 0.90479  - train: 0.92744 \nvar_154 Cum val: 0.90618  - train: 0.92861 \nvar_155 Cum val: 0.90635  - train: 0.92889 \nvar_156 Cum val: 0.90738  - train: 0.92977 \nvar_157 Cum val: 0.90833  - train: 0.93058 \nvar_158 Cum val: 0.91022  - train: 0.93205 \nvar_159 Cum val: 0.91055  - train: 0.93239 \nvar_160 Cum val: 0.91056  - train: 0.93255 \nvar_161 Cum val: 0.91166  - train: 0.93346 \nvar_162 Cum val: 0.91191  - train: 0.93378 \nvar_163 Cum val: 0.91311  - train: 0.93473 \nvar_164 Cum val: 0.91380  - train: 0.93531 \nvar_165 Cum val: 0.91388  - train: 0.93551 \nvar_166 Cum val: 0.91393  - train: 0.93559 \nvar_167 Cum val: 0.91396  - train: 0.93567 \nvar_168 Cum val: 0.91535  - train: 0.93679 \nvar_169 Cum val: 0.91566  - train: 0.93711 \nvar_170 Cum val: 0.91603  - train: 0.93752 \nvar_171 Cum val: 0.91690  - train: 0.93821 \nvar_172 Cum val: 0.91691  - train: 0.93832 \nvar_173 Cum val: 0.91840  - train: 0.93956 \nvar_174 Cum val: 0.91944  - train: 0.94040 \nvar_175 Cum val: 0.92008  - train: 0.94100 \nvar_176 Cum val: 0.92021  - train: 0.94121 \nvar_177 Cum val: 0.92037  - train: 0.94145 \nvar_178 Cum val: 0.92076  - train: 0.94181 \nvar_179 Cum val: 0.92103  - train: 0.94211 \nvar_180 Cum val: 0.92157  - train: 0.94258 \nvar_181 Cum val: 0.92300  - train: 0.94368 \nvar_182 Cum val: 0.92333  - train: 0.94400 \n","output_type":"stream"}],"execution_count":16},{"cell_type":"markdown","source":"# 评估集成的效果","metadata":{}},{"cell_type":"code","source":"preds_train_cum = (preds_train_cum - preds_train_cum.min()) / (preds_train_cum.max() - preds_train_cum.min())\npreds_oof_cum=(preds_oof_cum - preds_oof_cum.min()) / (preds_oof_cum.max() - preds_oof_cum.min())","metadata":{"trusted":true,"execution":{"iopub.status.busy":"2025-09-19T14:31:11.024916Z","iopub.execute_input":"2025-09-19T14:31:11.025488Z","iopub.status.idle":"2025-09-19T14:31:11.032478Z","shell.execute_reply.started":"2025-09-19T14:31:11.025444Z","shell.execute_reply":"2025-09-19T14:31:11.031739Z"}},"outputs":[],"execution_count":17},{"cell_type":"code","source":"# 1. 评估训练集\ntrain_auc = evaluate_binary_classifier(\n    y_true=target_train,  # 真实的训练标签\n    y_pred=(preds_train_cum > 0.5).astype(int),  # 二值化预测值（标签）\n    y_proba=preds_train_cum,  # 概率值\n    model_name=\"lightgbm_e\",  # 模型名称\n    title_suffix=\"(Training Set)\",  # 标题后缀\n    save_results=True\n)\n\n# 2. 评估验证集\nval_auc = evaluate_binary_classifier(\n    y_true=target_train,  # 真实的验证标签\n    y_pred=(preds_oof_cum > 0.5).astype(int),  # 二值化预测值（标签）\n    y_proba=preds_oof_cum,  # 概率值\n    model_name=\"lightgbm_e\",  # 模型名称\n    title_suffix=\"(Validation Set)\",  # 标题后缀\n    save_results=True\n)","metadata":{"trusted":true,"execution":{"iopub.status.busy":"2025-09-19T14:31:11.034152Z","iopub.execute_input":"2025-09-19T14:31:11.034512Z","iopub.status.idle":"2025-09-19T14:31:15.060107Z","shell.execute_reply.started":"2025-09-19T14:31:11.034490Z","shell.execute_reply":"2025-09-19T14:31:15.059026Z"}},"outputs":[{"name":"stdout","text":"\n=== (Training Set) ===\nAccuracy: 0.9255\nAUC: 0.9440\n\n分类报告:\n              precision    recall  f1-score   support\n\n           0       0.92      1.00      0.96    179902\n           1       0.95      0.27      0.42     20098\n\n    accuracy                           0.93    200000\n   macro avg       0.94      0.64      0.69    200000\nweighted avg       0.93      0.93      0.91    200000\n\n","output_type":"stream"},{"output_type":"display_data","data":{"text/plain":"<Figure size 600x400 with 1 Axes>","image/png":"iVBORw0KGgoAAAANSUhEUgAAAjYAAAGJCAYAAACZwnkIAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8pXeV/AAAACXBIWXMAAA9hAAAPYQGoP6dpAABb5klEQVR4nO3de1zO9/8/8MdVdBAdlEqkcpiU45IWhmhdrLGGOW5yDCtUc2pziM348N2cx8eGbBizTTOHSM4rpwjlsJAwyrEuhUq9fn/49f54u67OcXF53G+3943r/Xq+X+/n9e66up6936/361IIIQSIiIiIdICethMgIiIiqiwsbIiIiEhnsLAhIiIincHChoiIiHQGCxsiIiLSGSxsiIiISGewsCEiIiKdwcKGiIiIdAYLGyIiItIZLGyoXCIiIqBQKHDlyhVpXadOndCpU6dy9depUyc0bdq0cpIrwb59+6BQKPDbb7+9lP29DJ999hnee++9l7pPTa+B0ir8Gezbt6/S83qVnD17FlWqVEFiYqK2U3mtVeR3S1kpFAqEh4dLj8PDw6FQKHDnzp2Xsn9HR0cMHjz4pexLV7GwodfKjRs3EB4ejoSEBG2n8spISUnBjz/+iC+++ALA0w8BhUJR4vLsL+83zZkzZ9C7d284ODjAyMgIderUwXvvvYfFixeXq7/169djwYIFautdXFzg6+uLadOmldhHaX5mr2IxGBsbi/DwcGRkZJQqfvDgwbLnU716ddSvXx+9e/fG77//joKCAq3k9TK9yrnpgiraToB0x65du174Pm7cuIEZM2bA0dERLVu2fOH7ex0sXLgQTk5O8PLyAgB8+eWXGD58uNR+7NgxLFq0CF988QWaNGkirW/evHmF9vvpp5+iX79+MDQ0LPO2HTp0wKNHj2BgYFChHMojNjYWXl5eqFevHkaMGAFbW1tcu3YNhw8fxsKFCzFmzJgy97l+/XokJiYiODhYrW3UqFF4//33cenSJTRo0KDIPn7++WfZ459++gnR0dFq65/9Gb4KYmNjMWPGDAwePBjm5ual2sbQ0BA//vgjAODRo0dITU3FX3/9hd69e6NTp074888/YWpqKsWX53dLefIqzKdKlRf70VhcbhcuXICeHs85VAQLG6o02viQetPl5eVh3bp1GDVqlLTu+UtSRkZGWLRoEd57771iT+dnZ2fDxMSk1PvW19eHvr5+mXMGAD09PRgZGZVr24qaNWsWzMzMcOzYMbUPlVu3blX6/ry9vWFhYYE1a9Zg5syZRcZ98sknsseHDx9GdHS02vryEELg8ePHMDY2rnBflaFKlSpqz+vrr7/GnDlzEBYWhhEjRmDjxo1S24v+3VJQUIDc3FwYGRlp7XVZqDx/KJAcy0KqNJqug6empqJHjx4wMTGBtbU1QkJCsHPnziJPqZ89exZeXl6oVq0a6tSpg7lz50pt+/btg7u7OwBgyJAh0qnsiIgIKWbp0qWoX78+jI2N0aZNGxw8eLDI6/P5+fn44osvYGtrCxMTE/To0QPXrl1Te05NmzbF6dOn0bFjR1SrVg0NGzaUxufs378fHh4eMDY2RuPGjbF79+4yH7eCggIsWLAArq6uMDIygo2NDUaOHIn79++XuO2hQ4dw584deHt7l2mfheMGzp49iwEDBsDCwgLt27cHAJw+fRqDBw9G/fr1YWRkBFtbWwwdOhR3796V9aFpjI2joyM++OADHDp0CG3atIGRkRHq16+Pn376SbatpjE2hce6uNdAobK+rp516dIluLq6avwr3traWm3d2rVr4ebmBmNjY9SsWRP9+vWTvU46deqEbdu2ITU1VXpNOjo6Su1Vq1aVzkJU1OrVq9G5c2dYW1vD0NAQLi4uWLZsmVpc4c9h586daN26NYyNjfHf//4XQNmO3ZEjR9C1a1eYmZmhWrVq6NixI/7++2+pPTw8HBMmTAAAODk5Sc+/POOuAGDy5Mnw8fHBpk2b8M8//0jrNb2HFy9eDFdXV1SrVg0WFhZo3bo11q9fX6q8FAoFgoKCsG7dOri6usLQ0BBRUVFSm6bLtHfu3EGfPn1gamoKS0tLjBs3Do8fP5bar1y5ovb7qNCzfZaUm6YxNpcvX8bHH3+MmjVrolq1anjnnXewbds2WUzhe+rXX3/FrFmzULduXRgZGaFLly64ePFikcdcF/GMDb0w2dnZ6Ny5M27evIlx48bB1tYW69evx969ezXG379/H127dkXPnj3Rp08f/Pbbb5g0aRKaNWuGbt26oUmTJpg5cyamTZuGgIAAvPvuuwCAtm3bAgCWLVuGoKAgvPvuuwgJCcGVK1fg5+cHCwsL1K1bV21/s2bNgkKhwKRJk3Dr1i0sWLAA3t7eSEhIkP1le//+fXzwwQfo168fPv74Yyxbtgz9+vXDunXrEBwcjFGjRmHAgAGYN28eevfujWvXrqFGjRqlPk4jR45EREQEhgwZgrFjxyIlJQVLlizByZMn8ffff6Nq1apFbhsbGwuFQoFWrVqVen/P+vjjj9GoUSN88803EEIAAKKjo3H58mUMGTIEtra2SEpKwooVK5CUlITDhw9DoVAU2+fFixfRu3dvDBs2DP7+/li1ahUGDx4MNzc3uLq6FrttSa8BoOyvq+c5ODggLi4OiYmJJQ5YnzVrFqZOnYo+ffpg+PDhuH37NhYvXowOHTrg5MmTMDc3x5dffonMzExcv34d8+fPBwBUr15d1o+bmxv+/PNPqFQq2SWWslq2bBlcXV3Ro0cPVKlSBX/99Rc+++wzFBQUIDAwUBZ74cIF9O/fHyNHjsSIESPQuHHjMh27PXv2oFu3bnBzc8P06dOhp6cnFVYHDx5EmzZt0LNnT/zzzz/45ZdfMH/+fFhZWQEAatWqVe7n+Omnn2LXrl2Ijo7GW2+9pTHmhx9+wNixY9G7d2+pwDh9+jSOHDmCAQMGlCqvPXv24Ndff0VQUBCsrKxkxagmffr0gaOjI2bPno3Dhw9j0aJFuH//vlrRXpKyHrP09HS0bdsWDx8+xNixY2FpaYk1a9agR48e+O233/DRRx/J4ufMmQM9PT2MHz8emZmZmDt3LgYOHIgjR46UKc/XmiAqh9WrVwsAIiUlRVrXsWNH0bFjR+nxt99+KwCIyMhIad2jR4+Es7OzACD27t0r2xaA+Omnn6R1OTk5wtbWVvTq1Utad+zYMQFArF69WpZPTk6OsLS0FO7u7iIvL09aHxERIQDI8tq7d68AIOrUqSNUKpW0/tdffxUAxMKFC9XyWr9+vbTu/PnzAoDQ09MThw8fltbv3LlTY27FOXjwoAAg1q1bJ1sfFRWlcf3zPvnkE2FpaVlszKZNm9SO9/Tp0wUA0b9/f7X4hw8fqq375ZdfBABx4MABaZ2m14CDg4Na3K1bt4ShoaH4/PPPpXWFP4PyvAbK8rrSZNeuXUJfX1/o6+sLT09PMXHiRLFz506Rm5sri7ty5YrQ19cXs2bNkq0/c+aMqFKlimy9r6+vcHBwKHKf69evFwDEkSNHis3tWYGBgeL5X9GafjZKpVLUr19ftq7w5xAVFSVbX9pjV1BQIBo1aiSUSqUoKCiQ7d/JyUm899570rp58+apvQ6K4+/vL0xMTIpsP3nypAAgQkJCpHXP/2758MMPhaura7H7KS6vwvdvUlKSxrbp06dLjwvfKz169JDFffbZZwKAOHXqlBBCiJSUlCLf/8/3WVxuDg4Owt/fX3ocHBwsAIiDBw9K6x48eCCcnJyEo6OjyM/PF0L87z3VpEkTkZOTI8UuXLhQABBnzpxR25eu4qUoemGioqJQp04d9OjRQ1pnZGSEESNGaIyvXr267Lq7gYEB2rRpg8uXL5e4r+PHj+Pu3bsYMWKEbODfwIEDYWFhoXGbQYMGyc6s9O7dG7Vr18b27dvV8urXr5/0uHHjxjA3N0eTJk3g4eEhrS/8f2nyLbRp0yaYmZnhvffew507d6TFzc0N1atXL/EsxN27d4t8fqXx7NicQs+erXr8+DHu3LmDd955BwBw4sSJEvt0cXGRzqYBT/8Sbdy4camOS2leA2V9XT3vvffeQ1xcHHr06IFTp05h7ty5UCqVqFOnDrZs2SLF/fHHHygoKECfPn1kPxtbW1s0atSo1GeIAEg/o4reMvzszyYzMxN37txBx44dcfnyZWRmZspinZycoFQqZetKe+wSEhKQnJyMAQMG4O7du9Jzz87ORpcuXXDgwIFKu3vpeYVnux48eFBkjLm5Oa5fv45jx46Vez8dO3aEi4tLqeOfPyNWOMj8+d8XlW379u1o06aNdKkYeHqMAgICcOXKFZw9e1YWP2TIENmYpML3Yll+L73ueCmKXpjU1FQ0aNBA7dJFw4YNNcbXrVtXLdbCwgKnT58u1b409V2lSpUiTzE3atRI9lihUKBhw4Zq4wM05WVmZgZ7e3u1dQBKNTamUHJyMjIzMzWO7QBKN5hV/P9LSOXh5OSktu7evXuYMWMGNmzYoLb/5z88NalXr57aOgsLi1Idl9K8Bsr6utLE3d0df/zxB3Jzc3Hq1Cls3rwZ8+fPR+/evZGQkAAXFxckJydDCKH2OilU3CXC5xX+jEq6jFeSv//+G9OnT0dcXBwePnwoa8vMzJReg4Dmn21pj11ycjIAwN/fv8hcMjMzK1RUFyUrKwsAir2cO2nSJOzevRtt2rRBw4YN4ePjgwEDBqBdu3al3o+m41Oc518HDRo0gJ6eXrnHE5VWamqq7A+oQoV3x6WmpsouqT7//iv8GZXl99LrjoUNvTKKusOmIh/claGovCoj34KCAlhbW2PdunUa20saq2BpaVmhX1ia7pLp06cPYmNjMWHCBLRs2RLVq1dHQUEBunbtWqq/0ityXF72a8DAwADu7u5wd3fHW2+9hSFDhmDTpk2YPn06CgoKoFAosGPHDo15PT+OpjiFP6PC8RTlcenSJXTp0gXOzs747rvvYG9vDwMDA2zfvh3z589X+9lU5A6owr7mzZtX5LQKZXn+ZVE4mWFxhWqTJk1w4cIFbN26FVFRUfj999/x/fffY9q0aZgxY0ap9lPRO8SeLw6LKlrz8/MrtJ+yelV/j75MLGzohXFwcMDZs2chhJC96SsyQr+oXx4ODg5S34XzuQDAkydPcOXKFY1zthT+VVpICIGLFy9WeH6XsmjQoAF2796Ndu3alesXrbOzM9atW6f213p53b9/HzExMZgxY4ZsUrnnj5U2vYjXFQC0bt0aAHDz5k0AT382Qgg4OTkVOYi1UElnYlJSUqCnp1diP8X566+/kJOTgy1btsj+Ki/LJbHSHrvC+XZMTU1LvOOuomehnvfzzz9DoVCUOJO2iYkJ+vbti759+yI3Nxc9e/bErFmzEBYWBiMjo0rPKzk5WXaW5+LFiygoKJDOCBeeGXl+0r3Cs8nPKktuDg4OuHDhgtr68+fPS+0kxzE29MIolUr8+++/snELjx8/xg8//FDuPgvnWXn+l0fr1q1haWmJH374AU+ePJHWr1u3rsgzGj/99JPsOv5vv/2GmzdvSnffvAx9+vRBfn4+vvrqK7W2J0+elDgzqaenJ4QQiI+Pr5R8Cv/ae/6vO02z6mpLRV9Xe/fu1fjXa+FYicaNGwN4eveKvr4+ZsyYoRYvhJDd/m5iYlLsZbr4+Hi4urpWqPjU9LPJzMzE6tWrS91HaY+dm5sbGjRogP/7v/+TLg096/bt29L/i3pPlsecOXOwa9cu9O3bt8hLgADUph4wMDCAi4sLhBDIy8ur9LyAp1NJPKtwlurC3xempqawsrLCgQMHZHHff/+9Wl9lye3999/H0aNHERcXJ63Lzs7GihUr4OjoWKZxQm8KnrGhF2bkyJFYsmQJ+vfvj3HjxqF27dpYt26dNAFWef6iatCgAczNzbF8+XLUqFEDJiYm8PDwgJOTE8LDwzFmzBh07twZffr0wZUrVxAREaFxTAEA1KxZE+3bt8eQIUOQnp6OBQsWoGHDhqUehFoZOnbsiJEjR2L27NlISEiAj48PqlatiuTkZGzatAkLFy5E7969i9y+ffv2sLS0xO7du9G5c+cK52NqaooOHTpg7ty5yMvLQ506dbBr1y6kpKRUuO/KUtHX1ZgxY/Dw4UN89NFHcHZ2Rm5uLmJjY7Fx40Y4OjpiyJAhAJ6+1r7++muEhYVJUwfUqFEDKSkp2Lx5MwICAjB+/HgATwuBjRs3IjQ0FO7u7qhevTq6d+8O4Okkivv378dnn31Woeft4+MDAwMDdO/eHSNHjkRWVhZ++OEHWFtbS2eZSlLaY6enp4cff/wR3bp1g6urK4YMGYI6derg33//xd69e2Fqaoq//vpLeu7A0xmv+/Xrh6pVq6J79+7FTvb45MkTrF27FsDTwio1NRVbtmzB6dOn4eXlhRUrVpR4LGxtbdGuXTvY2Njg3LlzWLJkCXx9faWxOeXJqzgpKSno0aMHunbtiri4OKxduxYDBgxAixYtpJjhw4djzpw5GD58OFq3bo0DBw7I5uMpVJbcJk+ejF9++QXdunXD2LFjUbNmTaxZswYpKSn4/fffOUuxJi/3JizSFaW53VsIIS5fvix8fX2FsbGxqFWrlvj888/F77//LgDIbpXu2LGjxts3/f391W6j/fPPP4WLi4uoUqWK2u2VixYtEg4ODsLQ0FC0adNG/P3338LNzU107dpViim8LfKXX34RYWFhwtraWhgbGwtfX1+Rmpoq21dReTk4OAhfX1+19QBEYGCgpkNWrBUrVgg3NzdhbGwsatSoIZo1ayYmTpwobty4UeK2Y8eOFQ0bNiyyvbjbvW/fvq0Wf/36dfHRRx8Jc3NzYWZmJj7++GNx48YNtVtWi7rdW9Nxef61UdTt3qV9DZT2daXJjh07xNChQ4Wzs7OoXr26MDAwEA0bNhRjxowR6enpavG///67aN++vTAxMREmJibC2dlZBAYGigsXLkgxWVlZYsCAAcLc3FwAkOW7Y8cOAUAkJycXm9fzNN3uvWXLFtG8eXNhZGQkHB0dxX/+8x+xatWqUv8chCjbsTt58qTo2bOnsLS0FIaGhsLBwUH06dNHxMTEyOK++uorUadOHaGnp1fird/+/v4CgLRUq1ZNODo6il69eonffvtNun35Wc+/fv773/+KDh06SHk1aNBATJgwQWRmZpYqr+Lep8+/zgvfK2fPnhW9e/cWNWrUEBYWFiIoKEg8evRItu3Dhw/FsGHDhJmZmahRo4bo06ePuHXrllqfxeX2/O3eQghx6dIl0bt3b2Fubi6MjIxEmzZtxNatW2Uxhe+pTZs2ydYXdxu6rlII8QaNKKJXwoIFCxASEoLr16+jTp06L3RfBQUFqFWrFnr27FmhS2CvssuXL8PZ2Rk7duxAly5dtJ2O1rzM11VZ+Pn5QaFQYPPmzdpOpUiv6rEjKg8WNvRCPXr0SG1elFatWiE/P1/jKdqKePz4MQwNDWWXIgpn9F27di0GDhxYqft7lYwePRoXL15EdHS0tlN5KV7m66oizp07h2bNmiEhIaHEWY5fltfl2BGVF8fY0AvVs2dP1KtXDy1btkRmZibWrl2L8+fPF3l7c0UcPnwYISEh+Pjjj2FpaYkTJ05g5cqVaNq0KT7++ONK319x8vPzZQMsNalevXql3TKr6fuCdNnLfF1VRJMmTWSD2V8Fr8uxIyo37V4JI103f/584erqKkxMTISRkZF4++23xYYNG17IvlJSUkT37t2FjY2NqFq1qrCxsRFDhgzROG7iRSu8rl3c8vw1dyq9l/m60jU8dqTreCmK6AV4/PgxDh06VGxM/fr1Ub9+/ZeUERHRm4GFDREREekM3gBPREREOoODh1+igoIC3LhxAzVq1Kj06b6JiIh0mRACDx48gJ2dXbETE7KweYlu3Lih9o3QREREVHrXrl1D3bp1i2xnYfMSFU71fe3aNZiammo5GyIioteHSqWCvb299FlaJG3ekvXNN9+I1q1bi+rVq4tatWqJDz/8UJw/f14W8+jRI/HZZ5+JmjVrChMTE9GzZ0+RlpYmi0lNTRXvv/++NEX4+PHjRV5enixm7969olWrVsLAwEA0aNBA4/TSS5YskU3Hf+TIkTLnUpzMzEwBQG3abyIiIipeaT9DtTp4eP/+/QgMDMThw4cRHR2NvLw8+Pj4IDs7W4oJCQnBX3/9hU2bNmH//v24ceMGevbsKbXn5+fD19dX+iK7NWvWICIiAtOmTZNiUlJS4OvrCy8vLyQkJCA4OBjDhw/Hzp07pZjCL7CbPn06Tpw4gRYtWkCpVOLWrVulzoWIiIi07CUVWqVS+GVh+/fvF0IIkZGRIapWrSr7Uq9z584JACIuLk4IIcT27duFnp6e7MzJsmXLhKmpqcjJyRFCCDFx4kS1L9fr27evUCqV0uM2bdrIvhQtPz9f2NnZidmzZ5c6l5LwjI1uycnJEQ0aNBB///23tlN5YXbs2CFatGih8YsJiYheptfijM3zMjMzAQA1a9YEAMTHxyMvLw/e3t5SjLOzM+rVq4e4uDgAQFxcHJo1awYbGxspRqlUQqVSISkpSYp5to/CmMI+cnNzER8fL4vR09ODt7e3FFOaXJ6Xk5MDlUolW94EBw4cQPfu3WFnZweFQoHIyEi1mPT0dAwePBh2dnaoVq0aunbtiuTkZI39CSHQrVs3tb5OnTqF/v37w97eHsbGxmjSpAkWLlwo23bw4MFQKBRqi6urqxTj6OioMSYwMLDY57l8+XI4OTmhbdu20roTJ07gvffeg7m5OSwtLREQEICsrCypPSIiQuO+FAqF7Ozg82bNmoW2bduiWrVqMDc3Lzavu3fvom7dulAoFMjIyJDWnzx5Eq1atUL16tXRvXt33Lt3T2p78uQJ3NzccPToUVlfXbt2RdWqVTndPhG9Nl6ZwqagoADBwcFo166d9GVxaWlpMDAwUPtFbmNjg7S0NCnm2aKmsL2wrbgYlUqFR48e4c6dO8jPz9cY82wfJeXyvNmzZ8PMzExa3pQ7orKzs9GiRQssXbpUY7sQAn5+frh8+TL+/PNPnDx5Eg4ODvD29pZdhiy0YMECjbfHx8fHw9raGmvXrkVSUhK+/PJLhIWFYcmSJVLMwoULcfPmTWm5du0aatasKfvuqGPHjsliCr9IsrjvlxJCYMmSJRg2bJi07saNG/D29kbDhg1x5MgRREVFISkpCYMHD5Zi+vbtK9vXzZs3oVQq0bFjR1hbWxe5v9zcXHz88ccYPXp0kTGFhg0bhubNm6utHz58ODp37owTJ04gMzMT33zzjdT27bffol27dmjTpo3adoMHD8aiRYtK3C8R0SvhZZw+Ko1Ro0YJBwcHce3aNWndunXrhIGBgVqsu7u7mDhxohBCiBEjRggfHx9Ze3Z2tgAgtm/fLoQQolGjRuKbb76RxWzbtk0AEA8fPhT//vuvACBiY2NlMRMmTBBt2rQpdS7Pe/z4scjMzJSWa9euvXGXogCIzZs3y9ZduHBBABCJiYnSuvz8fFGrVi3xww8/yGJPnjwp6tSpI27evKmxr+d99tlnwsvLq8j2zZs3C4VCIa5cuVJkzLhx40SDBg1EQUFBkTHHjh0Tenp6QqVSSev++9//Cmtra9llm9OnTwsAIjk5WWM/t27dElWrVhU//fRTcU9Lsnr1amFmZlZk+/fffy86duwoYmJiBABx//59qc3Y2FicO3dOinv//feFEEJcunRJNGrUSPZcnpWamioAiIsXL5YqRyKiF+G1uhQVFBSErVu3Yu/evbJ7021tbZGbmys7nQ48vYxha2srxaSnp6u1F7YVF2NqagpjY2NYWVlBX19fY8yzfZSUy/MMDQ1hamoqW+jpJToAMDIyktbp6enB0NBQ9v1KDx8+xIABA7B06dIij/HzMjMzpUuZmqxcuRLe3t5wcHDQ2J6bm4u1a9di6NChxU6iePDgQbz11luy2w5zcnJgYGAgmzjK2NgYAIr83qiffvoJ1apVQ+/evYt9XqVx9uxZzJw5Ez/99JPGyatatGiB6OhoPHnyBDExMdJZnVGjRmHu3LlF3kJZr1492NjY4ODBgxXOkYjoRdNqYSOEQFBQEDZv3ow9e/bAyclJ1u7m5oaqVasiJiZGWnfhwgVcvXoVnp6eAABPT0+cOXNGNj4hOjoapqamcHFxkWKe7aMwprAPAwMDuLm5yWIKCgoQExMjxZQmFyqdwrFJYWFhuH//PnJzc/Gf//wH169fx82bN6W4kJAQtG3bFh9++GGp+o2NjcXGjRsREBCgsf3GjRvYsWMHhg8fXmQfkZGRyMjIkF0+0iQ1NRV2dnaydZ07d0ZaWhrmzZuH3Nxc3L9/H5MnTwYA2fN61sqVKzFgwACpACqvnJwc9O/fH/PmzUO9evU0xvz444/47bff0KBBAxgYGCAsLAw///wzqlWrBnd3dyiVSjRs2BBTpkxR29bOzg6pqakVypGI6KV4OSeQNBs9erQwMzMT+/btEzdv3pSWhw8fSjGjRo0S9erVE3v27BHHjx8Xnp6ewtPTU2p/8uSJaNq0qfDx8REJCQkiKipK1KpVS4SFhUkxly9fFtWqVRMTJkwQ586dE0uXLhX6+voiKipKitmwYYMwNDQUERER4uzZsyIgIECYm5vL7rYqKZeSvIl3RaGIy0fHjx8XLVq0EACEvr6+UCqVolu3bqJr165CCCH+/PNP0bBhQ/HgwYMS+xJCiDNnzggrKyvx1VdfFZnLN998IywtLaW75TTx8fERH3zwQYnPKyAgQLqU86x169YJGxsboa+vLwwMDMT48eOFjY2NmDNnjlpsbGysACCOHz9e4v4KFXUpKiQkRPTt21d6vHfvXrVLUc+7c+eOcHJyEteuXRMfffSRCA8PF1lZWaJJkyZiy5Ytsti2bdsWecmViOhlKO1nqFYLGwAal2cnzyucFM/CwkJUq1ZNfPTRR+LmzZuyfq5cuSK6desmjI2NhZWVlfj88881TtDXsmVLYWBgIOrXr69xgr7FixeLevXqCQMDA9GmTRtx+PBhWXtpcikOCxt1GRkZ4tatW0KIp7fcf/bZZ0KIp+NcFAqF0NfXlxYAQk9PT3Ts2FHWR1JSkrC2thZffPFFkfspKCgQDRs2FMHBwUXGXLlyRejp6YnIyMgSn9cXX3xRbFGblpYmHjx4ILKysoSenp749ddf1WKGDh0qWrZsWeK+nlVUYdOiRQuhp6cnHSs9PT2paJw2bZrGvgYNGiQWLlwohBDCwsJCJCUlCSGEGD9+vAgNDZXFOjs7i3nz5pUpVyKiyvRaFDZvGhY2Rfvnn3+Enp6e2LlzpxBCiJs3b4ozZ87IFgBi4cKF4vLly9J2iYmJwtraWkyYMKHY/gvPYJw5c6bImOnTpwtbW1u1oliTTZs2CQsLi2IHGAshxMqVK0W1atXUzpw8ePBAVK9eXSxevLjEfT2rqMLm4sWLsmO1atUqaUB8enq6Wvzu3buFu7u7NNDZ1NRUJCQkCCGeFpXjxo2TYh89eiSqVq0qdu/eXaZciYgqU2k/Q/ldUVTpsrKycPHiRelxSkoKEhISULNmTWn8x6ZNm1CrVi3Uq1cPZ86cwbhx4+Dn5wcfHx8ATwdraxowXK9ePWksVmJiIjp37gylUonQ0FDptnt9fX3UqlVLtt3KlSvh4eEhTSXwvIKCAqxevRr+/v6oUqXkt4WXlxeysrKQlJQk63PJkiVo27YtqlevjujoaEyYMAFz5sxRmyZg48aNePLkCT755BO1vo8ePYpBgwYhJiYGderUAQBcvXoV9+7dw9WrV5Gfn4+EhAQAQMOGDVG9enU0aNBA1sedO3cAAE2aNFHb9+PHjxEUFIRffvlFGmTcrl07LF26FIGBgfj999/x3XffSfGHDx+GoaEhx5IR0WtBIYQQ2k7iTaFSqWBmZobMzMwXcofUmFX7Kr3P8rh+PgGR80LU1ju3VcJ72NPBtKd2/46TURvxUHUfJmaWaNzWB+7dP4V+lapF9rtkmBfeD/wK9d9uDwA48mcEjm1ZoxZXw9IG/nM3SI9zHmZh9ee98W6/ILh2/EBj31cTj2HL/IkYOOsnWNiWbr6hqOUzYFrLDm17jZDWRf/4Da6cPoK8nEewsLVHK2VfOLf1Udv2t2+CYGplC58A9YG6hcdv0H9+ganV0+Ju98o5OB+7Uy3Wb8J81HVuWWQfIxb/BcNq1WVtsb//gIInT9C+7//mxMlI/xe7fvgaGWnX8JZHF3QcOA6K/1/07P3pWwAKeA0KLdVxedEWD+2k7RSISAtK+xnKwuYlelMKmzfFnWuX8Od3E/Dp7HUwMKrYXU2vqkcPMrH2y0HoO3U5TGvV1nY6AFjYEL2pSvsZ+krMY0P0OrKyb4C2vQKguqP5Vm5doLqbhk6fjHtlihoiopJwjA1RBTRp31XbKbxQNo6NYePYWNtpEBGVGs/YEBERkc5gYUNEREQ6g4UNERER6QwWNkRERKQzWNgQERGRzmBhQ0RERDqDhQ0RERHpDBY2REREpDNY2BAREZHOYGFDREREOoOFDREREekMFjZERESkM1jYEBERkc5gYUNEREQ6g4UNERER6QwWNkRERKQzWNgQERGRzmBhQ0RERDqDhQ0RERHpDBY2REREpDNY2BAREZHOYGFDREREOkOrhc2BAwfQvXt32NnZQaFQIDIyUtauUCg0LvPmzZNiHB0d1drnzJkj6+f06dN49913YWRkBHt7e8ydO1ctl02bNsHZ2RlGRkZo1qwZtm/fLmsXQmDatGmoXbs2jI2N4e3tjeTk5Mo7GERERFRhWi1ssrOz0aJFCyxdulRj+82bN2XLqlWroFAo0KtXL1nczJkzZXFjxoyR2lQqFXx8fODg4ID4+HjMmzcP4eHhWLFihRQTGxuL/v37Y9iwYTh58iT8/Pzg5+eHxMREKWbu3LlYtGgRli9fjiNHjsDExARKpRKPHz+u5KNCRERE5VVFmzvv1q0bunXrVmS7ra2t7PGff/4JLy8v1K9fX7a+Ro0aarGF1q1bh9zcXKxatQoGBgZwdXVFQkICvvvuOwQEBAAAFi5ciK5du2LChAkAgK+++grR0dFYsmQJli9fDiEEFixYgClTpuDDDz8EAPz000+wsbFBZGQk+vXrp3HfOTk5yMnJkR6rVKoSjggRERFVxGszxiY9PR3btm3DsGHD1NrmzJkDS0tLtGrVCvPmzcOTJ0+ktri4OHTo0AEGBgbSOqVSiQsXLuD+/ftSjLe3t6xPpVKJuLg4AEBKSgrS0tJkMWZmZvDw8JBiNJk9ezbMzMykxd7evnxPnoiIiErltSls1qxZgxo1aqBnz56y9WPHjsWGDRuwd+9ejBw5Et988w0mTpwotaelpcHGxka2TeHjtLS0YmOebX92O00xmoSFhSEzM1Narl27VpanTERERGWk1UtRZbFq1SoMHDgQRkZGsvWhoaHS/5s3bw4DAwOMHDkSs2fPhqGh4ctOU8bQ0FDrORAREb1JXoszNgcPHsSFCxcwfPjwEmM9PDzw5MkTXLlyBcDTcTrp6emymMLHheNyiop5tv3Z7TTFEBERkfa9FoXNypUr4ebmhhYtWpQYm5CQAD09PVhbWwMAPD09ceDAAeTl5Ukx0dHRaNy4MSwsLKSYmJgYWT/R0dHw9PQEADg5OcHW1lYWo1KpcOTIESmGiIiItE+rl6KysrJw8eJF6XFKSgoSEhJQs2ZN1KtXD8DTAmLTpk349ttv1baPi4vDkSNH4OXlhRo1aiAuLg4hISH45JNPpKJlwIABmDFjBoYNG4ZJkyYhMTERCxcuxPz586V+xo0bh44dO+Lbb7+Fr68vNmzYgOPHj0u3hCsUCgQHB+Prr79Go0aN4OTkhKlTp8LOzg5+fn4v8AgRERFRWWi1sDl+/Di8vLykx4XjZfz9/REREQEA2LBhA4QQ6N+/v9r2hoaG2LBhA8LDw5GTkwMnJyeEhITIxt2YmZlh165dCAwMhJubG6ysrDBt2jTpVm8AaNu2LdavX48pU6bgiy++QKNGjRAZGYmmTZtKMRMnTkR2djYCAgKQkZGB9u3bIyoqSm3MDxEREWmPQgghtJ3Em0KlUsHMzAyZmZkwNTWt9P7HrNpX6X0SvWoWD+2k7RSISAtK+xn6WoyxISIiIioNFjZERESkM1jYEBERkc5gYUNEREQ6g4UNERER6QwWNkRERKQzWNgQERGRzmBhQ0RERDqDhQ0RERHpDBY2REREpDNY2BAREZHOYGFDREREOoOFDREREekMFjZERESkM1jYEBERkc5gYUNEREQ6g4UNERER6QwWNkRERKQzWNgQERGRzmBhQ0RERDqDhQ0RERHpDBY2REREpDNY2BAREZHOYGFDREREOoOFDREREekMFjZERESkM7Ra2Bw4cADdu3eHnZ0dFAoFIiMjZe2DBw+GQqGQLV27dpXF3Lt3DwMHDoSpqSnMzc0xbNgwZGVlyWJOnz6Nd999F0ZGRrC3t8fcuXPVctm0aROcnZ1hZGSEZs2aYfv27bJ2IQSmTZuG2rVrw9jYGN7e3khOTq6cA0FERESVQquFTXZ2Nlq0aIGlS5cWGdO1a1fcvHlTWn755RdZ+8CBA5GUlITo6Ghs3boVBw4cQEBAgNSuUqng4+MDBwcHxMfHY968eQgPD8eKFSukmNjYWPTv3x/Dhg3DyZMn4efnBz8/PyQmJkoxc+fOxaJFi7B8+XIcOXIEJiYmUCqVePz4cSUeESIiIqoIhRBCaDsJAFAoFNi8eTP8/PykdYMHD0ZGRobamZxC586dg4uLC44dO4bWrVsDAKKiovD+++/j+vXrsLOzw7Jly/Dll18iLS0NBgYGAIDJkycjMjIS58+fBwD07dsX2dnZ2Lp1q9T3O++8g5YtW2L58uUQQsDOzg6ff/45xo8fDwDIzMyEjY0NIiIi0K9fv1I9R5VKBTMzM2RmZsLU1LSsh6hEY1btq/Q+iV41i4d20nYKRKQFpf0MfeXH2Ozbtw/W1tZo3LgxRo8ejbt370ptcXFxMDc3l4oaAPD29oaenh6OHDkixXTo0EEqagBAqVTiwoULuH//vhTj7e0t269SqURcXBwAICUlBWlpabIYMzMzeHh4SDGa5OTkQKVSyRYiIiJ6cV7pwqZr16746aefEBMTg//85z/Yv38/unXrhvz8fABAWloarK2tZdtUqVIFNWvWRFpamhRjY2Mjiyl8XFLMs+3PbqcpRpPZs2fDzMxMWuzt7cv0/ImIiKhsqmg7geI8e4mnWbNmaN68ORo0aIB9+/ahS5cuWsysdMLCwhAaGio9VqlULG6IiIheoFf6jM3z6tevDysrK1y8eBEAYGtri1u3bslinjx5gnv37sHW1laKSU9Pl8UUPi4p5tn2Z7fTFKOJoaEhTE1NZQsRERG9OK9VYXP9+nXcvXsXtWvXBgB4enoiIyMD8fHxUsyePXtQUFAADw8PKebAgQPIy8uTYqKjo9G4cWNYWFhIMTExMbJ9RUdHw9PTEwDg5OQEW1tbWYxKpcKRI0ekGCIiItI+rRY2WVlZSEhIQEJCAoCng3QTEhJw9epVZGVlYcKECTh8+DCuXLmCmJgYfPjhh2jYsCGUSiUAoEmTJujatStGjBiBo0eP4u+//0ZQUBD69esHOzs7AMCAAQNgYGCAYcOGISkpCRs3bsTChQtll4jGjRuHqKgofPvttzh//jzCw8Nx/PhxBAUFAXh6x1ZwcDC+/vprbNmyBWfOnMGgQYNgZ2cnu4uLiIiItEurY2yOHz8OLy8v6XFhseHv749ly5bh9OnTWLNmDTIyMmBnZwcfHx989dVXMDQ0lLZZt24dgoKC0KVLF+jp6aFXr15YtGiR1G5mZoZdu3YhMDAQbm5usLKywrRp02Rz3bRt2xbr16/HlClT8MUXX6BRo0aIjIxE06ZNpZiJEyciOzsbAQEByMjIQPv27REVFQUjI6MXeYiIiIioDF6ZeWzeBJzHhqjiOI8N0ZtJZ+axISIiIiotFjZERESkM1jYEBERkc5gYUNEREQ6g4UNERER6QwWNkRERKQzWNgQERGRzmBhQ0RERDqDhQ0RERHpDBY2REREpDNY2BAREZHOYGFDREREOoOFDREREekMFjZERESkM1jYEBERkc5gYUNEREQ6g4UNERER6QwWNkRERKQzWNgQERGRzmBhQ0RERDqDhQ0RERHpDBY2REREpDNY2BAREZHOYGFDREREOoOFDREREekMFjZERESkM1jYEBERkc7QamFz4MABdO/eHXZ2dlAoFIiMjJTa8vLyMGnSJDRr1gwmJiaws7PDoEGDcOPGDVkfjo6OUCgUsmXOnDmymNOnT+Pdd9+FkZER7O3tMXfuXLVcNm3aBGdnZxgZGaFZs2bYvn27rF0IgWnTpqF27dowNjaGt7c3kpOTK+9gEBERUYVptbDJzs5GixYtsHTpUrW2hw8f4sSJE5g6dSpOnDiBP/74AxcuXECPHj3UYmfOnImbN29Ky5gxY6Q2lUoFHx8fODg4ID4+HvPmzUN4eDhWrFghxcTGxqJ///4YNmwYTp48CT8/P/j5+SExMVGKmTt3LhYtWoTly5fjyJEjMDExgVKpxOPHjyv5qBAREVF5KYQQQttJAIBCocDmzZvh5+dXZMyxY8fQpk0bpKamol69egCenrEJDg5GcHCwxm2WLVuGL7/8EmlpaTAwMAAATJ48GZGRkTh//jwAoG/fvsjOzsbWrVul7d555x20bNkSy5cvhxACdnZ2+PzzzzF+/HgAQGZmJmxsbBAREYF+/fqV6jmqVCqYmZkhMzMTpqampdqmLMas2lfpfRK9ahYP7aTtFIhIC0r7GfpajbHJzMyEQqGAubm5bP2cOXNgaWmJVq1aYd68eXjy5InUFhcXhw4dOkhFDQAolUpcuHAB9+/fl2K8vb1lfSqVSsTFxQEAUlJSkJaWJosxMzODh4eHFKNJTk4OVCqVbCEiIqIXp4q2Eyitx48fY9KkSejfv7+sUhs7dizefvtt1KxZE7GxsQgLC8PNmzfx3XffAQDS0tLg5OQk68vGxkZqs7CwQFpamrTu2Zi0tDQp7tntNMVoMnv2bMyYMaOcz5iIiIjK6rUobPLy8tCnTx8IIbBs2TJZW2hoqPT/5s2bw8DAACNHjsTs2bNhaGj4slOVCQsLk+WnUqlgb2+vxYyIiIh02yt/KaqwqElNTUV0dHSJY1M8PDzw5MkTXLlyBQBga2uL9PR0WUzhY1tb22Jjnm1/djtNMZoYGhrC1NRUthAREdGL80oXNoVFTXJyMnbv3g1LS8sSt0lISICenh6sra0BAJ6enjhw4ADy8vKkmOjoaDRu3BgWFhZSTExMjKyf6OhoeHp6AgCcnJxga2sri1GpVDhy5IgUQ0RERNqn1UtRWVlZuHjxovQ4JSUFCQkJqFmzJmrXro3evXvjxIkT2Lp1K/Lz86XxLDVr1oSBgQHi4uJw5MgReHl5oUaNGoiLi0NISAg++eQTqWgZMGAAZsyYgWHDhmHSpElITEzEwoULMX/+fGm/48aNQ8eOHfHtt9/C19cXGzZswPHjx6VbwhUKBYKDg/H111+jUaNGcHJywtSpU2FnZ1fsXVxERET0cmm1sDl+/Di8vLykx4XjUfz9/REeHo4tW7YAAFq2bCnbbu/evejUqRMMDQ2xYcMGhIeHIycnB05OTggJCZGNazEzM8OuXbsQGBgINzc3WFlZYdq0aQgICJBi2rZti/Xr12PKlCn44osv0KhRI0RGRqJp06ZSzMSJE5GdnY2AgABkZGSgffv2iIqKgpGR0Ys4NERERFQOr8w8Nm8CzmNDVHGcx4bozaST89gQERERFYeFDREREekMFjZERESkM1jYEBERkc5gYUNEREQ6g4UNERER6QwWNkRERKQzWNgQERGRzmBhQ0RERDqDhQ0RERHpjHIVNvXr18fdu3fV1mdkZKB+/foVToqIiIioPMpV2Fy5cgX5+flq63NycvDvv/9WOCkiIiKi8ijTt3sXfts2AOzcuRNmZmbS4/z8fMTExMDR0bHSkiMiIiIqizIVNn5+fgAAhUIBf39/WVvVqlXh6OiIb7/9ttKSIyIiIiqLMhU2BQUFAAAnJyccO3YMVlZWLyQpIiIiovIoU2FTKCUlpbLzICIiIqqwchU2ABATE4OYmBjcunVLOpNTaNWqVRVOjIiIiKisylXYzJgxAzNnzkTr1q1Ru3ZtKBSKys6LiIiIqMzKVdgsX74cERER+PTTTys7HyIiIqJyK9c8Nrm5uWjbtm1l50JERERUIeUqbIYPH47169dXdi5EREREFVKuS1GPHz/GihUrsHv3bjRv3hxVq1aVtX/33XeVkhwRERFRWZSrsDl9+jRatmwJAEhMTJS1cSAxERERaUu5Cpu9e/dWdh5EREREFVauMTZEREREr6JynbHx8vIq9pLTnj17yp0QERERUXmVq7ApHF9TKC8vDwkJCUhMTFT7ckwiIiKil6Vcl6Lmz58vW5YsWYJDhw4hODhY7Q6p4hw4cADdu3eHnZ0dFAoFIiMjZe1CCEybNg21a9eGsbExvL29kZycLIu5d+8eBg4cCFNTU5ibm2PYsGHIysqSxZw+fRrvvvsujIyMYG9vj7lz56rlsmnTJjg7O8PIyAjNmjXD9u3by5wLERERaVeljrH55JNPyvQ9UdnZ2WjRogWWLl2qsX3u3LlYtGgRli9fjiNHjsDExARKpRKPHz+WYgYOHIikpCRER0dj69atOHDgAAICAqR2lUoFHx8fODg4ID4+HvPmzUN4eDhWrFghxcTGxqJ///4YNmwYTp48CT8/P/j5+cnu+CpNLkRERKRdCiGEqKzOfv75Z0yaNAk3btwoeyIKBTZv3gw/Pz8AT8+Q2NnZ4fPPP8f48eMBAJmZmbCxsUFERAT69euHc+fOwcXFBceOHUPr1q0BAFFRUXj//fdx/fp12NnZYdmyZfjyyy+RlpYGAwMDAMDkyZMRGRmJ8+fPAwD69u2L7OxsbN26VcrnnXfeQcuWLbF8+fJS5VIaKpUKZmZmyMzMhKmpaZmPUUnGrNpX6X0SvWoWD+2k7RSISAtK+xlarjM2PXv2lC0fffQR3nnnHQwZMgQjR44sd9LPSklJQVpaGry9vaV1ZmZm8PDwQFxcHAAgLi4O5ubmUlEDAN7e3tDT08ORI0ekmA4dOkhFDQAolUpcuHAB9+/fl2Ke3U9hTOF+SpOLJjk5OVCpVLKFiIiIXpxyDR42MzOTPdbT00Pjxo0xc+ZM+Pj4VEpiaWlpAAAbGxvZehsbG6ktLS0N1tbWsvYqVaqgZs2ashgnJye1PgrbLCwskJaWVuJ+SspFk9mzZ2PGjBklP1kiIiKqFOUqbFavXl3ZeeiksLAwhIaGSo9VKhXs7e21mBEREZFuK1dhUyg+Ph7nzp0DALi6uqJVq1aVkhQA2NraAgDS09NRu3ZtaX16erp0u7mtrS1u3bol2+7Jkye4d++etL2trS3S09NlMYWPS4p5tr2kXDQxNDSEoaFhqZ4vERERVVy5xtjcunULnTt3hru7O8aOHYuxY8fCzc0NXbp0we3btyslMScnJ9ja2iImJkZap1KpcOTIEXh6egIAPD09kZGRgfj4eClmz549KCgogIeHhxRz4MAB5OXlSTHR0dFo3LgxLCwspJhn91MYU7if0uRCRERE2leuwmbMmDF48OABkpKScO/ePdy7dw+JiYlQqVQYO3ZsqfvJyspCQkICEhISADwdpJuQkICrV69CoVAgODgYX3/9NbZs2YIzZ85g0KBBsLOzk+6catKkCbp27YoRI0bg6NGj+PvvvxEUFIR+/frBzs4OADBgwAAYGBhg2LBhSEpKwsaNG7Fw4ULZJaJx48YhKioK3377Lc6fP4/w8HAcP34cQUFBAFCqXIiIiEj7ynUpKioqCrt370aTJk2kdS4uLli6dGmZBg8fP34cXl5e0uPCYsPf3x8RERGYOHEisrOzERAQgIyMDLRv3x5RUVEwMjKStlm3bh2CgoLQpUsX6OnpoVevXli0aJHUbmZmhl27diEwMBBubm6wsrLCtGnTZHPdtG3bFuvXr8eUKVPwxRdfoFGjRoiMjETTpk2lmNLkQkRERNpVrnlsatSogYMHD6qNLzl58iQ6duzI25qLwHlsiCqO89gQvZle6Dw2nTt3xrhx42QT8f37778ICQlBly5dytMlERERUYWVq7BZsmQJVCoVHB0d0aBBAzRo0ABOTk5QqVRYvHhxZedIREREVCrlGmNjb2+PEydOYPfu3dLXEjRp0kRt9l4iIiKil6lMZ2z27NkDFxcXqFQqKBQKvPfeexgzZgzGjBkDd3d3uLq64uDBgy8qVyIiIqJilamwWbBgAUaMGKFx0I6ZmRlGjhyJ7777rtKSIyIiIiqLMhU2p06dQteuXYts9/HxkU2WR0RERPQylamwSU9PR9WqVYtsr1KlSqXNPExERERUVmUqbOrUqYPExMQi20+fPi37LiUiIiKil6lMhc3777+PqVOn4vHjx2ptjx49wvTp0/HBBx9UWnJEREREZVGm272nTJmCP/74A2+99RaCgoLQuHFjAMD58+exdOlS5Ofn48svv3whiRIRERGVpEyFjY2NDWJjYzF69GiEhYWh8NsYFAoFlEolli5dChsbmxeSKBEREVFJyjxBn4ODA7Zv34779+/j4sWLEEKgUaNGsLCweBH5EREREZVauWYeBgALCwu4u7tXZi5EREREFVKu74oiIiIiehWxsCEiIiKdwcKGiIiIdAYLGyIiItIZLGyIiIhIZ7CwISIiIp3BwoaIiIh0BgsbIiIi0hksbIiIiEhnsLAhIiIincHChoiIiHQGCxsiIiLSGSxsiIiISGewsCEiIiKd8coXNo6OjlAoFGpLYGAgAKBTp05qbaNGjZL1cfXqVfj6+qJatWqwtrbGhAkT8OTJE1nMvn378Pbbb8PQ0BANGzZERESEWi5Lly6Fo6MjjIyM4OHhgaNHj76w501ERERl98oXNseOHcPNmzelJTo6GgDw8ccfSzEjRoyQxcydO1dqy8/Ph6+vL3JzcxEbG4s1a9YgIiIC06ZNk2JSUlLg6+sLLy8vJCQkIDg4GMOHD8fOnTulmI0bNyI0NBTTp0/HiRMn0KJFCyiVSty6deslHAUiIiIqDYUQQmg7ibIIDg7G1q1bkZycDIVCgU6dOqFly5ZYsGCBxvgdO3bggw8+wI0bN2BjYwMAWL58OSZNmoTbt2/DwMAAkyZNwrZt25CYmCht169fP2RkZCAqKgoA4OHhAXd3dyxZsgQAUFBQAHt7e4wZMwaTJ0/WuO+cnBzk5ORIj1UqFezt7ZGZmQlTU9PKOBwyY1btq/Q+iV41i4d20nYKRKQFKpUKZmZmJX6GvvJnbJ6Vm5uLtWvXYujQoVAoFNL6devWwcrKCk2bNkVYWBgePnwotcXFxaFZs2ZSUQMASqUSKpUKSUlJUoy3t7dsX0qlEnFxcdJ+4+PjZTF6enrw9vaWYjSZPXs2zMzMpMXe3r5iB4CIiIiKVUXbCZRFZGQkMjIyMHjwYGndgAED4ODgADs7O5w+fRqTJk3ChQsX8McffwAA0tLSZEUNAOlxWlpasTEqlQqPHj3C/fv3kZ+frzHm/PnzReYbFhaG0NBQ6XHhGRsiIiJ6MV6rwmblypXo1q0b7OzspHUBAQHS/5s1a4batWujS5cuuHTpEho0aKCNNCWGhoYwNDTUag5ERERvktfmUlRqaip2796N4cOHFxvn4eEBALh48SIAwNbWFunp6bKYwse2trbFxpiamsLY2BhWVlbQ19fXGFPYBxEREWnfa1PYrF69GtbW1vD19S02LiEhAQBQu3ZtAICnpyfOnDkju3spOjoapqamcHFxkWJiYmJk/URHR8PT0xMAYGBgADc3N1lMQUEBYmJipBgiIiLSvteisCkoKMDq1avh7++PKlX+d/Xs0qVL+OqrrxAfH48rV65gy5YtGDRoEDp06IDmzZsDAHx8fODi4oJPP/0Up06dws6dOzFlyhQEBgZKl4lGjRqFy5cvY+LEiTh//jy+//57/PrrrwgJCZH2FRoaih9++AFr1qzBuXPnMHr0aGRnZ2PIkCEv92AQERFRkV6LMTa7d+/G1atXMXToUNl6AwMD7N69GwsWLEB2djbs7e3Rq1cvTJkyRYrR19fH1q1bMXr0aHh6esLExAT+/v6YOXOmFOPk5IRt27YhJCQECxcuRN26dfHjjz9CqVRKMX379sXt27cxbdo0pKWloWXLloiKilIbUExERETa89rNY/M6K+09+OXFeWzoTcB5bIjeTDo5jw0RERFRcVjYEBERkc5gYUNEREQ6g4UNERER6QwWNkRERKQzWNgQERGRzmBhQ0RERDqDhQ0RERHpDBY2REREpDNY2BAREZHOYGFDREREOoOFDREREekMFjZERESkM1jYEBERkc5gYUNEREQ6g4UNERER6QwWNkRERKQzWNgQERGRzmBhQ0RERDqDhQ0RERHpDBY2REREpDNY2BAREZHOYGFDREREOoOFDREREekMFjZERESkM1jYEBERkc54pQub8PBwKBQK2eLs7Cy1P378GIGBgbC0tET16tXRq1cvpKeny/q4evUqfH19Ua1aNVhbW2PChAl48uSJLGbfvn14++23YWhoiIYNGyIiIkItl6VLl8LR0RFGRkbw8PDA0aNHX8hzJiIiovJ7pQsbAHB1dcXNmzel5dChQ1JbSEgI/vrrL2zatAn79+/HjRs30LNnT6k9Pz8fvr6+yM3NRWxsLNasWYOIiAhMmzZNiklJSYGvry+8vLyQkJCA4OBgDB8+HDt37pRiNm7ciNDQUEyfPh0nTpxAixYtoFQqcevWrZdzEIiIiKhUFEIIoe0kihIeHo7IyEgkJCSotWVmZqJWrVpYv349evfuDQA4f/48mjRpgri4OLzzzjvYsWMHPvjgA9y4cQM2NjYAgOXLl2PSpEm4ffs2DAwMMGnSJGzbtg2JiYlS3/369UNGRgaioqIAAB4eHnB3d8eSJUsAAAUFBbC3t8eYMWMwefLkUj8flUoFMzMzZGZmwtTUtLyHpUhjVu2r9D6JXjWLh3bSdgpEpAWl/Qx95c/YJCcnw87ODvXr18fAgQNx9epVAEB8fDzy8vLg7e0txTo7O6NevXqIi4sDAMTFxaFZs2ZSUQMASqUSKpUKSUlJUsyzfRTGFPaRm5uL+Ph4WYyenh68vb2lmKLk5ORApVLJFiIiInpxXunCxsPDAxEREYiKisKyZcuQkpKCd999Fw8ePEBaWhoMDAxgbm4u28bGxgZpaWkAgLS0NFlRU9he2FZcjEqlwqNHj3Dnzh3k5+drjCnsoyizZ8+GmZmZtNjb25f5GBAREVHpVdF2AsXp1q2b9P/mzZvDw8MDDg4O+PXXX2FsbKzFzEonLCwMoaGh0mOVSsXihoiI6AV6pc/YPM/c3BxvvfUWLl68CFtbW+Tm5iIjI0MWk56eDltbWwCAra2t2l1ShY9LijE1NYWxsTGsrKygr6+vMaawj6IYGhrC1NRUthAREdGL81oVNllZWbh06RJq164NNzc3VK1aFTExMVL7hQsXcPXqVXh6egIAPD09cebMGdndS9HR0TA1NYWLi4sU82wfhTGFfRgYGMDNzU0WU1BQgJiYGCmGiIiIXg2vdGEzfvx47N+/H1euXEFsbCw++ugj6Ovro3///jAzM8OwYcMQGhqKvXv3Ij4+HkOGDIGnpyfeeecdAICPjw9cXFzw6aef4tSpU9i5cyemTJmCwMBAGBoaAgBGjRqFy5cvY+LEiTh//jy+//57/PrrrwgJCZHyCA0NxQ8//IA1a9bg3LlzGD16NLKzszFkyBCtHBciIiLS7JUeY3P9+nX0798fd+/eRa1atdC+fXscPnwYtWrVAgDMnz8fenp66NWrF3JycqBUKvH9999L2+vr62Pr1q0YPXo0PD09YWJiAn9/f8ycOVOKcXJywrZt2xASEoKFCxeibt26+PHHH6FUKqWYvn374vbt25g2bRrS0tLQsmVLREVFqQ0oJiIiIu16peex0TWcx4ao4jiPDdGbSWfmsSEiIiIqLRY2REREpDNY2BAREZHOYGFDREREOoOFDREREekMFjZERESkM1jYEBERkc5gYUNEREQ6g4UNERER6QwWNkRERKQzWNgQERGRzmBhQ0RERDqDhQ0RERHpDBY2REREpDNY2BAREZHOYGFDREREOoOFDREREekMFjZERESkM1jYEBERkc5gYUNEREQ6g4UNERER6QwWNkRERKQzWNgQERGRzmBhQ0RERDqDhQ0RERHpDBY2REREpDNY2BAREZHOeKULm9mzZ8Pd3R01atSAtbU1/Pz8cOHCBVlMp06doFAoZMuoUaNkMVevXoWvry+qVasGa2trTJgwAU+ePJHF7Nu3D2+//TYMDQ3RsGFDREREqOWzdOlSODo6wsjICB4eHjh69GilP2ciIiIqv1e6sNm/fz8CAwNx+PBhREdHIy8vDz4+PsjOzpbFjRgxAjdv3pSWuXPnSm35+fnw9fVFbm4uYmNjsWbNGkRERGDatGlSTEpKCnx9feHl5YWEhAQEBwdj+PDh2LlzpxSzceNGhIaGYvr06Thx4gRatGgBpVKJW7duvfgDQURERKWiEEIIbSdRWrdv34a1tTX279+PDh06AHh6xqZly5ZYsGCBxm127NiBDz74ADdu3ICNjQ0AYPny5Zg0aRJu374NAwMDTJo0Cdu2bUNiYqK0Xb9+/ZCRkYGoqCgAgIeHB9zd3bFkyRIAQEFBAezt7TFmzBhMnjy5VPmrVCqYmZkhMzMTpqam5T0MRRqzal+l90n0qlk8tJO2UyAiLSjtZ+grfcbmeZmZmQCAmjVrytavW7cOVlZWaNq0KcLCwvDw4UOpLS4uDs2aNZOKGgBQKpVQqVRISkqSYry9vWV9KpVKxMXFAQByc3MRHx8vi9HT04O3t7cUo0lOTg5UKpVsISIiohenirYTKK2CggIEBwejXbt2aNq0qbR+wIABcHBwgJ2dHU6fPo1JkybhwoUL+OOPPwAAaWlpsqIGgPQ4LS2t2BiVSoVHjx7h/v37yM/P1xhz/vz5InOePXs2ZsyYUf4nTURERGXy2hQ2gYGBSExMxKFDh2TrAwICpP83a9YMtWvXRpcuXXDp0iU0aNDgZacpExYWhtDQUOmxSqWCvb29FjMiIiLSba9FYRMUFIStW7fiwIEDqFu3brGxHh4eAICLFy+iQYMGsLW1Vbt7KT09HQBga2sr/Vu47tkYU1NTGBsbQ19fH/r6+hpjCvvQxNDQEIaGhqV7kkRERFRhr/QYGyEEgoKCsHnzZuzZswdOTk4lbpOQkAAAqF27NgDA09MTZ86ckd29FB0dDVNTU7i4uEgxMTExsn6io6Ph6ekJADAwMICbm5sspqCgADExMVIMERERad8rfcYmMDAQ69evx59//okaNWpIY2LMzMxgbGyMS5cuYf369Xj//fdhaWmJ06dPIyQkBB06dEDz5s0BAD4+PnBxccGnn36KuXPnIi0tDVOmTEFgYKB0NmXUqFFYsmQJJk6ciKFDh2LPnj349ddfsW3bNimX0NBQ+Pv7o3Xr1mjTpg0WLFiA7OxsDBky5OUfGCIiItLolS5sli1bBuDpLd3PWr16NQYPHgwDAwPs3r1bKjLs7e3Rq1cvTJkyRYrV19fH1q1bMXr0aHh6esLExAT+/v6YOXOmFOPk5IRt27YhJCQECxcuRN26dfHjjz9CqVRKMX379sXt27cxbdo0pKWloWXLloiKilIbUExERETa81rNY/O64zw2RBXHeWyI3kw6OY8NERERUXFY2BAREZHOYGFDREREOoOFDREREekMFjZERESkM1jYEBERkc5gYUNEREQ6g4UNERER6QwWNkRERKQzWNgQERGRzmBhQ0RERDqDhQ0RERHpDBY2RESkEz799FN888032k6j3N555x38/vvv2k7jtcfChoiIShQeHg6FQiFbnJ2dpfYrV66otRcumzZtAgCcOnUK/fv3h729PYyNjdGkSRMsXLhQtp9Dhw6hXbt2sLS0hLGxMZydnTF//vwS8zt16hS2b9+OsWPHAgDy8vIwadIkNGvWDCYmJrCzs8OgQYNw48aNYvvJz8/H1KlT4eTkBGNjYzRo0ABfffUVhBCyuHPnzqFHjx4wMzODiYkJ3N3dcfXqVak9NDQUNWvWhL29PdatWyfbdtOmTejevbvavqdMmYLJkyejoKCgxOdLRaui7QSIiOj14Orqit27d0uPq1T530eIvb09bt68KYtfsWIF5s2bh27dugEA4uPjYW1tjbVr18Le3h6xsbEICAiAvr4+goKCAAAmJiYICgpC8+bNYWJigkOHDmHkyJEwMTFBQEBAkbktXrwYH3/8MapXrw4AePjwIU6cOIGpU6eiRYsWuH//PsaNG4cePXrg+PHjRfbzn//8B8uWLcOaNWvg6uqK48ePY8iQITAzM5OKpkuXLqF9+/YYNmwYZsyYAVNTUyQlJcHIyAgA8Ndff2H9+vXYtWsXkpOTMXToUCiVSlhZWSEzMxNffvml7DgW6tatG4YPH44dO3bA19e32J8FFU0hni9D6YVRqVQwMzNDZmYmTE1NK73/Mav2VXqfRK+axUM7aTuFN1J4eDgiIyORkJBQ6m1atWqFt99+GytXriwyJjAwEOfOncOePXuKjOnZsydMTEzw888/a2zPz8+HpaUl1q1bV2xBcOzYMbRp0wapqamoV6+expgPPvgANjY2spx79eoFY2NjrF27FgDQr18/VK1atch85s6dixMnTmDDhg0AABsbG2zduhXu7u4YOXIknJ2dERISonHboUOHIi8vr8i+32Sl/QzlpSgiIiqV5ORk2NnZoX79+hg4cKDs0svz4uPjkZCQgGHDhhXbZ2ZmJmrWrFlk+8mTJxEbG4uOHTsWGXP69GlkZmaidevWJe5LoVDA3Ny8yJi2bdsiJiYG//zzD4Cnl7gOHToknXUqKCjAtm3b8NZbb0GpVMLa2hoeHh6IjIyU+mjRogWOHz+O+/fvIz4+Ho8ePULDhg1x6NAhnDhxQjrzo0mbNm1w8ODBYp8HFY+FDRERlcjDwwMRERGIiorCsmXLkJKSgnfffRcPHjzQGL9y5Uo0adIEbdu2LbLP2NhYbNy4UeMlprp168LQ0BCtW7dGYGAghg8fXmQ/qamp0NfXh7W1dZExjx8/xqRJk9C/f/9i/9qfPHky+vXrB2dnZ1StWhWtWrVCcHAwBg4cCAC4desWsrKyMGfOHHTt2hW7du3CRx99hJ49e2L//v0AAKVSiU8++QTu7u4YPHgw1qxZAxMTE4wePRrLly/HsmXL0LhxY7Rr1w5JSUmy/dvZ2eHatWscZ1MBHGNDREQlKjxjAQDNmzeHh4cHHBwc8Ouvv6qdlXn06BHWr1+PqVOnFtlfYmIiPvzwQ0yfPh0+Pj5q7QcPHkRWVhYOHz6MyZMno2HDhujfv7/Gvh49egRDQ0MoFAqN7Xl5eejTpw+EEFi2bFmxz/PXX3/FunXrsH79eri6uiIhIQHBwcGws7ODv7+/VHB8+OGH0uWkli1bIjY2FsuXL5fOLIWHhyM8PFzqd8aMGfD29kbVqlXx9ddf48yZM9i6dSsGDRqE+Ph4Kc7Y2BgFBQXIycmBsbFxsbmSZixsiIiozMzNzfHWW2/h4sWLam2//fYbHj58iEGDBmnc9uzZs+jSpQsCAgIwZcoUjTFOTk4AgGbNmiE9PR3h4eFFFjZWVlZ4+PAhcnNzYWBgIGsrLGpSU1OxZ8+eEsc3TpgwQTprU7j/1NRUzJ49G/7+/rCyskKVKlXg4uIi265JkyY4dOiQxj7Pnz+PtWvX4uTJk1i1ahU6dOiAWrVqoU+fPhg6dCgePHiAGjVqAADu3bsHExMTFjUVwEtRRERUZllZWbh06RJq166t1rZy5Ur06NEDtWrVUmtLSkqCl5cX/P39MWvWrFLtq/AMRlFatmwJ4GnB9KzCoiY5ORm7d++GpaVlift6+PAh9PTkH436+vrSmRoDAwO4u7vjwoULsph//vkHDg4Oav0JITBy5Eh89913qF69OvLz85GXlyflBzwd/FwoMTERrVq1KjFPKhrP2BARUYnGjx+P7t27w8HBATdu3MD06dOhr6+vdhbl4sWLOHDgALZv367WR2JiIjp37gylUonQ0FCkpaUBeFo4FBZBS5cuRb169aQ5cg4cOID/+7//K3bAba1atfD222/j0KFDUpGTl5eH3r1748SJE9i6dSvy8/Ol/dWsWVM6s9OlSxd89NFH0u3m3bt3x6xZs1CvXj24urri5MmT+O677zB06FBpfxMmTEDfvn3RoUMHeHl5ISoqCn/99Rf27dunltuPP/6IWrVqSfPWtGvXDuHh4Th8+DB27NgBFxcX2WDmgwcParw0R6XH271fIt7uTVRxr/Pt3p0/m63tFMotKfoXZNy4grzHD2FgbAKz2g6o38YHxmbysyCXDu9EenICPD+ZAIVCfuYj5dhuXDmuflu3UQ1zeH4yEQBw/UwsbiQdxaMH96HQ04OxqSXsmrSGnWsbtf6e9W/iYaT9cxJuPUcDAB6p7uPwunkaY1v2GA6LOvUBAHFr58K28dtwcvcGADzJzUHK0WjcTjmLvEdZMDAxhU3D5nBs3Rl6+v87F3Dz3HGkntyPnKxMVDOvBUf3LqjlJL88lfvwAeL/WIa3PxoFQ5P//c5POR6D66djYWBcHU0694apjT0AICcrE3Hr/g/vDBwPo+pmRT7XV92e78NeSL+l/QxlYfMSsbAhqjgWNqRJ/pM8HPnlO7i+1x9mtprnqHnVXYqLQl7OIzh3+kjbqVSItgsbXooiIqLXnn6VqmjS+WPkPc7WdirlVtXYBPYt2mk7jdceCxsiItIJhZeXXlf1Wr6r7RR0Au+KIiIiIp3BwqaMli5dCkdHRxgZGcHDwwNHjx7VdkpERET0/7GwKYONGzciNDQU06dPx4kTJ9CiRQsolUrcunVL26kRERERWNiUyXfffYcRI0ZgyJAhcHFxwfLly1GtWjWsWrVK26kREREROHi41HJzcxEfH4+wsP/dxqanpwdvb2/ExcVp3CYnJ0c2W2ZmZiaAp7esvZAcH72+dwMQldaLev+8DE9yH2s7BaIX7kW9Rwv7LWmWGhY2pXTnzh3k5+fDxsZGtt7Gxgbnz5/XuM3s2bMxY8YMtfX29vYvJEeiN8GKIG1nQETFMVs584X2/+DBA5iZFT2BIQubFygsLAyhoaHS44KCAty7dw+WlpZFfgstvT5UKhXs7e1x7dq1FzLhIhFVDN+jukUIgQcPHsDOzq7YOBY2pWRlZQV9fX2kp6fL1qenp8PW1lbjNoaGhjA0NJSte/Y7QUg3mJqa8pcm0SuM71HdUdyZmkIcPFxKBgYGcHNzQ0xMjLSuoKAAMTEx8PT01GJmREREVIhnbMogNDQU/v7+aN26Ndq0aYMFCxYgOzsbQ4YM0XZqREREBBY2ZdK3b1/cvn0b06ZNQ1paGlq2bImoqCi1AcX0ZjA0NMT06dPVLjcS0auB79E3E7/dm4iIiHQGx9gQERGRzmBhQ0RERDqDhQ0RERHpDBY2REREpDNY2BCV09KlS+Ho6AgjIyN4eHjg6NGj2k6JiAAcOHAA3bt3h52dHRQKBSIjI7WdEr1ELGyIymHjxo0IDQ3F9OnTceLECbRo0QJKpRK3bt3SdmpEb7zs7Gy0aNECS5cu1XYqpAW83ZuoHDw8PODu7o4lS5YAeDoLtb29PcaMGYPJkydrOTsiKqRQKLB582b4+flpOxV6SXjGhqiMcnNzER8fD29vb2mdnp4evL29ERcXp8XMiIiIhQ1RGd25cwf5+flqM07b2NggLS1NS1kRERHAwoaIiIh0CAsbojKysrKCvr4+0tPTZevT09Nha2urpayIiAhgYUNUZgYGBnBzc0NMTIy0rqCgADExMfD09NRiZkRExG/3JiqH0NBQ+Pv7o3Xr1mjTpg0WLFiA7OxsDBkyRNupEb3xsrKycPHiRelxSkoKEhISULNmTdSrV0+LmdHLwNu9icppyZIlmDdvHtLS0tCyZUssWrQIHh4e2k6L6I23b98+eHl5qa339/dHRETEy0+IXioWNkRERKQzOMaGiIiIdAYLGyIiItIZLGyIiIhIZ7CwISIiIp3BwoaIiIh0BgsbIiIi0hksbIiIiEhnsLAhIiIincHChojeKAqFApGRkdpOg4heEBY2RKRT0tLSMGbMGNSvXx+Ghoawt7dH9+7dZV9aSkS6i1+CSUQ648qVK2jXrh3Mzc0xb948NGvWDHl5edi5cycCAwNx/vx5badIRC8Yz9gQkc747LPPoFAocPToUfTq1QtvvfUWXF1dERoaisOHD2vcZtKkSXjrrbdQrVo11K9fH1OnTkVeXp7UfurUKXh5eaFGjRowNTWFm5sbjh8/DgBITU1F9+7dYWFhARMTE7i6umL79u0v5bkSkWY8Y0NEOuHevXuIiorCrFmzYGJiotZubm6ucbsaNWogIiICdnZ2OHPmDEaMGIEaNWpg4sSJAICBAweiVatWWLZsGfT19ZGQkICqVasCAAIDA5Gbm4sDBw7AxMQEZ8+eRfXq1V/YcySikrGwISKdcPHiRQgh4OzsXKbtpkyZIv3f0dER48ePx4YNG6TC5urVq5gwYYLUb6NGjaT4q1evolevXmjWrBkAoH79+hV9GkRUQbwURUQ6QQhRru02btyIdu3awdbWFtWrV8eUKVNw9epVqT00NBTDhw+Ht7c35syZg0uXLkltY8eOxddff4127dph+vTpOH36dIWfBxFVDAsbItIJjRo1gkKhKNMA4bi4OAwcOBDvv/8+tm7dipMnT+LLL79Ebm6uFBMeHo6kpCT4+vpiz549cHFxwebNmwEAw4cPx+XLl/Hpp5/izJkzaN26NRYvXlzpz42ISk8hyvtnDhHRK6Zbt244c+YMLly4oDbOJiMjA+bm5lAoFNi8eTP8/Pzw7bff4vvvv5edhRk+fDh+++03ZGRkaNxH//79kZ2djS1btqi1hYWFYdu2bTxzQ6RFPGNDRDpj6dKlyM/PR5s2bfD7778jOTkZ586dw6JFi+Dp6akW36hRI1y9ehUbNmzApUuXsGjRIulsDAA8evQIQUFB2LdvH1JTU/H333/j2LFjaNKkCQAgODgYO3fuREpKCk6cOIG9e/dKbUSkHRw8TEQ6o379+jhx4gRmzZqFzz//HDdv3kStWrXg5uaGZcuWqcX36NEDISEhCAoKQk5ODnx9fTF16lSEh4cDAPT19XH37l0MGjQI6enpsLKyQs+ePTFjxgwAQH5+PgIDA3H9+nWYmpqia9eumD9//st8ykT0HF6KIiIiIp3BS1FERESkM1jYEBERkc5gYUNEREQ6g4UNERER6QwWNkRERKQzWNgQERGRzmBhQ0RERDqDhQ0RERHpDBY2REREpDNY2BAREZHOYGFDREREOuP/AQ/7Fs8PpGJ0AAAAAElFTkSuQmCC\n"},"metadata":{}},{"output_type":"display_data","data":{"text/plain":"<Figure size 640x480 with 0 Axes>"},"metadata":{}},{"output_type":"display_data","data":{"text/plain":"<Figure size 800x600 with 2 Axes>","image/png":"iVBORw0KGgoAAAANSUhEUgAAAqMAAAIjCAYAAAA3LxKwAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8pXeV/AAAACXBIWXMAAA9hAAAPYQGoP6dpAABwU0lEQVR4nO3deVxN+f8H8NetdEt7SYslIRSNkEnWMRoZWcJ8pywjhGHKliXGFoNMxi4aZskYZmyjGcugEZoZIZFiyBbZbpZUU7Tont8ffp1xVRRdh+7r+X3cx2Pu+bzP53zOla937/P5fK5MEAQBREREREQS0JJ6AERERESkuZiMEhEREZFkmIwSERERkWSYjBIRERGRZJiMEhEREZFkmIwSERERkWSYjBIRERGRZJiMEhEREZFkmIwSERERkWSYjBK9pIsXL6Jr164wMTGBTCZDVFRUpfZ/9epVyGQyREZGVmq/b7P33nsP7733XqX2ef36dejp6eHvv/+u1H5fpF69ehgyZMhLnauOz+FN5Ovri48//ljqYRCRmjEZpbfa5cuX8emnn6J+/frQ09ODsbEx2rVrh+XLl+PRo0dqvbafnx+Sk5Mxf/58bNiwAa6urmq93us0ZMgQyGQyGBsbl/o5Xrx4ETKZDDKZDF999VWF+7916xZCQkKQmJhYCaN9NXPnzoWbmxvatWuHQ4cOiff1opemKigowPLly9GiRQsYGxvD1NQUTZs2xciRI3H+/PkK9/e8n4Xg4GBs374dp0+froSRE9GbSkfqARC9rN27d+N///sf5HI5Bg8ejGbNmqGgoAB//fUXJk+ejLNnz2Lt2rVqufajR48QFxeH6dOnIzAwUC3XsLOzw6NHj1CtWjW19P8iOjo6ePjwIXbu3FmiOrVx40bo6ekhLy/vpfq+desW5syZg3r16sHFxaXc5+3fv/+lrleWu3fvYv369Vi/fj0AwNHRERs2bFCJmTZtGgwNDTF9+vRKvXZKSgq0tF6uHlDZn0NF9OvXD7///jv69++PESNGoLCwEOfPn8euXbvQtm1bNGnSpEL9Pe9noUWLFnB1dcXixYvxww8/VOJdENGbhMkovZVSU1Ph6+sLOzs7xMTEwMbGRmwLCAjApUuXsHv3brVd/+7duwAAU1NTtV1DJpNBT09Pbf2/iFwuR7t27fDTTz+VSEY3bdoELy8vbN++/bWM5eHDh6hevTp0dXUrtd8ff/wROjo66NmzJwDAysoKgwYNUolZuHAhatSoUeL405RKJQoKCir05yWXy19u0EClfw7lFR8fj127dmH+/Pn4/PPPVdpWrVqFzMzMSr/mxx9/jNmzZ2P16tUwNDSs9P6JSHp8TE9vpbCwMOTk5ODbb79VSUSLNWzYEOPGjRPfP378GF988QUaNGgAuVyOevXq4fPPP0d+fr7KefXq1UOPHj3w119/4d1334Wenh7q16+vUpUJCQmBnZ0dAGDy5MmQyWSoV68egCePt4v/+2khISElHu1GR0ejffv2MDU1haGhIRo3bqzyD3xZc0ZjYmLQoUMHGBgYwNTUFL1798a5c+dKvd6lS5cwZMgQmJqawsTEBEOHDsXDhw/L/mCfMWDAAPz+++8qSUZ8fDwuXryIAQMGlIjPyMjApEmT4OzsDENDQxgbG+PDDz9Uecx66NAhtG7dGgAwdOhQ8bF38X2+9957aNasGRISEtCxY0dUr15d/FyenSvp5+cHPT29Evfv6ekJMzMz3Lp167n3FxUVBTc3twonOTKZDIGBgdi4cSOaNm0KuVyOvXv3AgC++uortG3bFhYWFtDX10erVq2wbdu2En08O2c0MjISMpkMf//9N4KCgmBpaQkDAwP06dNH/OWn2LOfQ/H0gi1btmD+/PmoXbs29PT00KVLF1y6dKnEtcPDw1G/fn3o6+vj3XffxZ9//lmueaiXL18GALRr165Em7a2NiwsLFSO3bx5E8OGDYOVlRXkcjmaNm2K7777TmXcz/tZAIAPPvgAubm5iI6Ofu7YiOjtxWSU3ko7d+5E/fr10bZt23LFDx8+HLNmzULLli2xdOlSdOrUCaGhofD19S0Re+nSJXz00Uf44IMPsHjxYpiZmWHIkCE4e/YsAKBv375YunQpAKB///7YsGEDli1bVqHxnz17Fj169EB+fj7mzp2LxYsXo1evXi9cRPPHH3/A09MTd+7cQUhICIKCgnDkyBG0a9cOV69eLRH/8ccf499//0VoaCg+/vhjREZGYs6cOeUeZ9++fSGTyfDLL7+IxzZt2oQmTZqgZcuWJeKvXLmCqKgo9OjRA0uWLMHkyZORnJyMTp06iYmho6Mj5s6dCwAYOXIkNmzYgA0bNqBjx45iP/fv38eHH34IFxcXLFu2DJ07dy51fMuXL4elpSX8/PxQVFQEAPj666+xf/9+rFy5Era2tmXeW2FhIeLj40u9j/KIiYnBhAkT4OPjg+XLl4u/hBTPp5w7dy4WLFgAHR0d/O9//yt3pX7MmDE4ffo0Zs+ejdGjR2Pnzp3lngqycOFC7NixA5MmTcK0adNw9OhRDBw4UCVmzZo1CAwMRO3atREWFoYOHTrA29sbN27ceGH/xb+Ebdy4EY8fP35ubHp6Otq0aYM//vgDgYGBWL58ORo2bAh/f3/x70t5fhacnJygr6//2heYEdFrJBC9ZbKysgQAQu/evcsVn5iYKAAQhg8frnJ80qRJAgAhJiZGPGZnZycAEGJjY8Vjd+7cEeRyuTBx4kTxWGpqqgBAWLRokUqffn5+gp2dXYkxzJ49W3j6r9vSpUsFAMLdu3fLHHfxNb7//nvxmIuLi1CzZk3h/v374rHTp08LWlpawuDBg0tcb9iwYSp99unTR7CwsCjzmk/fh4GBgSAIgvDRRx8JXbp0EQRBEIqKigRra2thzpw5pX4GeXl5QlFRUYn7kMvlwty5c8Vj8fHxJe6tWKdOnQQAQkRERKltnTp1Ujm2b98+AYAwb9484cqVK4KhoaHg7e39wnu8dOmSAEBYuXLlc+OaNm1a4poABC0tLeHs2bMl4h8+fKjyvqCgQGjWrJnw/vvvqxy3s7MT/Pz8xPfff/+9AEDw8PAQlEqleHzChAmCtra2kJmZKR579nM4ePCgAEBwdHQU8vPzxePLly8XAAjJycmCIAhCfn6+YGFhIbRu3VooLCwU4yIjIwUAJe7zWUqlUvzzsbKyEvr37y+Eh4cL165dKxHr7+8v2NjYCPfu3VM57uvrK5iYmIif0/N+Foo1atRI+PDDD587NiJ6e7EySm+d7OxsAICRkVG54vfs2QMACAoKUjk+ceJEAChRsXJyckKHDh3E95aWlmjcuDGuXLny0mN+VvFc019//RVKpbJc59y+fRuJiYkYMmQIzM3NxePvvPMOPvjgA/E+nzZq1CiV9x06dMD9+/fFz7A8BgwYgEOHDkGhUCAmJgYKhaLUR/TAk3mQxYtyioqKcP/+fXEKwsmTJ8t9TblcjqFDh5YrtmvXrvj0008xd+5c9O3bF3p6evj6669feN79+/cBAGZmZuUe19M6deoEJyenEsf19fXF/37w4AGysrLQoUOHct//yJEjVaZ0dOjQAUVFRbh27doLzx06dKjKfNLin+Pin90TJ07g/v37GDFiBHR0/lsyMHDgwHJ9DjKZDPv27cO8efNgZmaGn376CQEBAbCzs4OPj484nUMQBGzfvh09e/aEIAi4d++e+PL09ERWVlaFfh7MzMxw7969cscT0duFySi9dYyNjQEA//77b7nir127Bi0tLTRs2FDluLW1NUxNTUv8I1+3bt0SfZiZmeHBgwcvOeKSfHx80K5dOwwfPhxWVlbw9fXFli1bnpuYFo+zcePGJdocHR1x79495Obmqhx/9l6KE46K3Ev37t1hZGSEzZs3Y+PGjWjdunWJz7KYUqnE0qVL4eDgALlcjho1asDS0hJJSUnIysoq9zVr1apVoUU6X331FczNzZGYmIgVK1agZs2a5T5XEIRyxz7N3t6+1OO7du1CmzZtoKenB3Nzc1haWmLNmjXlvv9X+TN70bnFP0PP/vnp6OiUOte5NHK5HNOnT8e5c+dw69Yt/PTTT2jTpg22bNkiTie4e/cuMjMzsXbtWlhaWqq8in/JuHPnTrmuBzz5M9Lk7bSIqjomo/TWMTY2hq2tLc6cOVOh88r7j5m2tnapx8uTtJR1jeL5jMX09fURGxuLP/74A5988gmSkpLg4+ODDz74oETsq3iVeykml8vRt29frF+/Hjt27CizKgoACxYsQFBQEDp27Igff/wR+/btQ3R0NJo2bVruCjCgWl0sj1OnTonJTXJycrnOKV5s87K/ZJQ2xj///BO9evWCnp4eVq9ejT179iA6OhoDBgwo92f+Kn9mlfHnXRE2Njbw9fVFbGwsHBwcsGXLFjx+/Fj8sx40aBCio6NLfZW2CKosDx48QI0aNdRyD0QkPW7tRG+lHj16YO3atYiLi4O7u/tzY+3s7KBUKnHx4kU4OjqKx9PT05GZmSkuyqgMZmZmpW5vU9ojVi0tLXTp0gVdunTBkiVLsGDBAkyfPh0HDx6Eh4dHqfcBPNmf8lnnz59HjRo1YGBg8Oo3UYoBAwbgu+++g5aWVqmLvopt27YNnTt3xrfffqtyPDMzUyWZqMwqV25uLoYOHQonJye0bdsWYWFh6NOnj7hKuyx169aFvr4+UlNTK20s27dvh56eHvbt26eyddP3339fadd4FcU/Q5cuXVJZFPb48WNcvXoV77zzzkv1W61aNbzzzju4ePEi7t27B0tLSxgZGaGoqKjUn+Wnvehn4fHjx7h+/Tp69er1UmMjojcfK6P0VpoyZQoMDAwwfPhwpKenl2i/fPkyli9fDuDJY2YAJVa8L1myBADg5eVVaeNq0KABsrKykJSUJB67ffs2duzYoRKXkZFR4tziDb+f3W6qmI2NDVxcXLB+/XqVhPfMmTPYv3+/eJ/q0LlzZ3zxxRdYtWoVrK2ty4zT1tYuUYXbunUrbt68qXKsOGmujH0pg4ODkZaWhvXr12PJkiWoV68e/Pz8yvwci1WrVg2urq44ceLEK4+hmLa2NmQymUp1++rVq5X+VbEvy9XVFRYWFli3bp3KaviNGzeWq0J88eJFpKWllTiemZmJuLg4mJmZwdLSEtra2ujXrx+2b99e6hOMp7eqetHPwj///IO8vLxy75xBRG8fVkbprdSgQQNs2rQJPj4+cHR0VPkGpiNHjmDr1q3iHo7NmzeHn58f1q5di8zMTHTq1AnHjx/H+vXr4e3tXea2QS/D19cXwcHB6NOnD8aOHYuHDx9izZo1aNSokcqCjblz5yI2NhZeXl6ws7PDnTt3sHr1atSuXRvt27cvs/9Fixbhww8/hLu7O/z9/fHo0SOsXLkSJiYmCAkJqbT7eJaWlhZmzJjxwrgePXpg7ty5GDp0KNq2bYvk5GRs3LgR9evXV4lr0KABTE1NERERASMjIxgYGMDNza3MeZhliYmJwerVqzF79mxxi6bvv/8e7733HmbOnImwsLDnnt+7d29Mnz4d2dnZ4lzkV+Hl5YUlS5agW7duGDBgAO7cuYPw8HA0bNhQ5RcUqejq6iIkJARjxozB+++/j48//hhXr15FZGQkGjRo8MIq5enTpzFgwAB8+OGH6NChA8zNzXHz5k2sX78et27dwrJly8SpAgsXLsTBgwfh5uaGESNGwMnJCRkZGTh58iT++OMP8ReyF/0sREdHo3r16vjggw/U++EQkXSkWsZPVBkuXLggjBgxQqhXr56gq6srGBkZCe3atRNWrlwp5OXliXGFhYXCnDlzBHt7e6FatWpCnTp1hGnTpqnECMKT7Xa8vLxKXOfZrXTK2tpJEARh//79QrNmzQRdXV2hcePGwo8//lhia6cDBw4IvXv3FmxtbQVdXV3B1tZW6N+/v3DhwoUS13h2y5s//vhDaNeunaCvry8YGxsLPXv2FP755x+VmOLrPbt1VPH2QampqWV+poKgurVTWcra2mnixImCjY2NoK+vL7Rr106Ii4srdUumX3/9VXBychJ0dHRU7rNTp05C06ZNS73m0/1kZ2cLdnZ2QsuWLVW2KRKEJ9shaWlpCXFxcc+9h/T0dEFHR0fYsGFDmTFlbe0UEBBQavy3334rODg4CHK5XGjSpInw/fffl/jzF4Syt3aKj49XiSvetungwYPisbK2dtq6davKuWX9DK1YsUKws7MT5HK58O677wp///230KpVK6Fbt25lfg6C8OTzWrhwodCpUyfBxsZG0NHREczMzIT3339f2LZtW6nxAQEBQp06dYRq1aoJ1tbWQpcuXYS1a9eqxJX1syAIguDm5iYMGjToueMiorebTBDUNLOdiOgt4O/vjwsXLuDPP/+UeiiSUSqVsLS0RN++fbFu3TqphyNKTExEy5YtcfLkyRLfW09EVQfnjBKRRps9ezbi4+M15ht+8vLySszr/eGHH5CRkfHCrwN93RYuXIiPPvqIiShRFcfKKBGRBjl06BAmTJiA//3vf7CwsMDJkyfx7bffwtHREQkJCRXa35WIqDJwARMRkQapV68e6tSpgxUrViAjIwPm5uYYPHgwFi5cyESUiCTByigRERERSYZzRomIiIhIMkxGiYiIiEgyTEaJiIiISDJVcgGTfotAqYdARGryIH6V1EMgIjXRkzArUWfu8OgU/3/reVgZJSIiIiLJVMnKKBEREVGFyFifkwqTUSIiIiKZTOoRaCz+GkBEREREkmFllIiIiIiP6SXDT56IiIiIJMPKKBERERHnjEqGlVEiIiIikgwro0REREScMyoZfvJEREREJBlWRomIiIg4Z1QyTEaJiIiI+JheMvzkiYiIiEgyrIwSERER8TG9ZFgZJSIiIiLJsDJKRERExDmjkuEnT0RERESSYWWUiIiIiHNGJcPKKBERERFJhpVRIiIiIs4ZlQyTUSIiIiI+ppcMfw0gIiIiIsmwMkpERETEx/SS4SdPRERERJJhZZSIiIiIlVHJ8JMnIiIiIsmwMkpERESkxdX0UmFllIiIiIgkw8ooEREREeeMSobJKBERERE3vZcMfw0gIiIiIsmwMkpERETEx/SS4SdPRERERJJhZZSIiIiIc0Ylw8ooEREREUmGlVEiIiIizhmVDD95IiIiIpIMK6NEREREnDMqGSajRERERHxMLxl+8kREREQkGVZGiYiIiPiYXjKsjBIRERGRZFgZJSIiIuKcUcnwkyciIiIiybAySkRERMQ5o5JhZZSIiIjoDRIbG4uePXvC1tYWMpkMUVFRJWLOnTuHXr16wcTEBAYGBmjdujXS0tLE9ry8PAQEBMDCwgKGhobo168f0tPTVfpIS0uDl5cXqlevjpo1a2Ly5Ml4/PixSsyhQ4fQsmVLyOVyNGzYEJGRkSXGEh4ejnr16kFPTw9ubm44fvx4he6XySgRERGRTEt9rwrKzc1F8+bNER4eXmr75cuX0b59ezRp0gSHDh1CUlISZs6cCT09PTFmwoQJ2LlzJ7Zu3YrDhw/j1q1b6Nu3r9heVFQELy8vFBQU4MiRI1i/fj0iIyMxa9YsMSY1NRVeXl7o3LkzEhMTMX78eAwfPhz79u0TYzZv3oygoCDMnj0bJ0+eRPPmzeHp6Yk7d+6U+35lgiAIFfmA3gb6LQKlHgIRqcmD+FVSD4GI1ERPwsmD+j1Xq63vRzs/e+lzZTIZduzYAW9vb/GYr68vqlWrhg0bNpR6TlZWFiwtLbFp0yZ89NFHAIDz58/D0dERcXFxaNOmDX7//Xf06NEDt27dgpWVFQAgIiICwcHBuHv3LnR1dREcHIzdu3fjzJkzKtfOzMzE3r17AQBubm5o3bo1Vq168v/NSqUSderUwZgxYzB16tRy3SMro0RERERqlJ+fj+zsbJVXfn7+S/WlVCqxe/duNGrUCJ6enqhZsybc3NxUHuUnJCSgsLAQHh4e4rEmTZqgbt26iIuLAwDExcXB2dlZTEQBwNPTE9nZ2Th79qwY83QfxTHFfRQUFCAhIUElRktLCx4eHmJMeTAZJSIiIpLJ1PYKDQ2FiYmJyis0NPSlhnnnzh3k5ORg4cKF6NatG/bv348+ffqgb9++OHz4MABAoVBAV1cXpqamKudaWVlBoVCIMU8nosXtxW3Pi8nOzsajR49w7949FBUVlRpT3Ed5cDU9ERERkRpNmzYNQUFBKsfkcvlL9aVUKgEAvXv3xoQJEwAALi4uOHLkCCIiItCpU6dXG6wEmIwSERERqXHTe7lc/tLJ57Nq1KgBHR0dODk5qRx3dHTEX3/9BQCwtrZGQUEBMjMzVaqj6enpsLa2FmOeXfVevNr+6ZhnV+Cnp6fD2NgY+vr60NbWhra2dqkxxX2UBx/TExEREb0ldHV10bp1a6SkpKgcv3DhAuzs7AAArVq1QrVq1XDgwAGxPSUlBWlpaXB3dwcAuLu7Izk5WWXVe3R0NIyNjcVE193dXaWP4pjiPnR1ddGqVSuVGKVSiQMHDogx5cHKKBEREdEbtOl9Tk4OLl26JL5PTU1FYmIizM3NUbduXUyePBk+Pj7o2LEjOnfujL1792Lnzp04dOgQAMDExAT+/v4ICgqCubk5jI2NMWbMGLi7u6NNmzYAgK5du8LJyQmffPIJwsLCoFAoMGPGDAQEBIhV3FGjRmHVqlWYMmUKhg0bhpiYGGzZsgW7d+8WxxYUFAQ/Pz+4urri3XffxbJly5Cbm4uhQ4eW+36ZjBIRERG9QU6cOIHOnTuL74vnm/r5+SEyMhJ9+vRBREQEQkNDMXbsWDRu3Bjbt29H+/btxXOWLl0KLS0t9OvXD/n5+fD09MTq1f9tX6WtrY1du3Zh9OjRcHd3h4GBAfz8/DB37lwxxt7eHrt378aECROwfPly1K5dG9988w08PT3FGB8fH9y9exezZs2CQqGAi4sL9u7dW2JR0/Nwn1Eieqtwn1GiqkvSfUb7fKO2vh/tGK62vqsCVkaJiIiI3qDH9JqGC5iIiIiISDKsjBIREZHGk7EyKhlWRomIiIhIMqyMEhERkcZjZVQ6rIwSERERkWRYGSUiIiJiYVQyrIwSERERkWRYGSUiIiKNxzmj0mEySkRERBqPyah0+JieiIiIiCTDyigRERFpPFZGpcPKKBERERFJhpVRIiIi0nisjEqHlVEiIiIikgwro0REREQsjEqGlVEiIiIikgwro0RERKTxOGdUOqyMEhEREZFkWBklIiIijcfKqHSYjBIREZHGYzIqHT6mJyIiIiLJsDJKREREGo+VUemwMkpEREREkmFllIiIiIiFUcmwMkpEREREkmFllIiIiDQe54xKh5VRIiIiIpIMK6NERESk8VgZlQ6TUSIiItJ4TEalw8f0RERERCQZVkaJiIiIWBiVDCujRERERCQZVkaJiIhI43HOqHRYGSUiIiIiybAySkRERBqPlVHpSJqMFhQUICoqCnFxcVAoFAAAa2trtG3bFr1794aurq6UwyMiIiIiNZPsMf2lS5fg6OgIPz8/nDp1CkqlEkqlEqdOncLgwYPRtGlTXLp0SarhERERkQaRyWRqe9HzSVYZHT16NJydnXHq1CkYGxurtGVnZ2Pw4MEICAjAvn37JBohERERaQomjdKRLBn9+++/cfz48RKJKAAYGxvjiy++gJubmwQjIyIiIqLXRbLH9Kamprh69WqZ7VevXoWpqelrGw8RERFpMJkaX/RckiWjw4cPx+DBg7F06VIkJSUhPT0d6enpSEpKwtKlSzFkyBCMHDlSquERERERSSI2NhY9e/aEra0tZDIZoqKiyowdNWoUZDIZli1bpnI8IyMDAwcOhLGxMUxNTeHv74+cnByVmKSkJHTo0AF6enqoU6cOwsLCSvS/detWNGnSBHp6enB2dsaePXtU2gVBwKxZs2BjYwN9fX14eHjg4sWLFbpfyZLRuXPnIjg4GIsWLYKLiwtsbW1ha2sLFxcXLFq0CMHBwQgJCZFqeERERKRB3qQFTLm5uWjevDnCw8OfG7djxw4cPXoUtra2JdoGDhyIs2fPIjo6Grt27UJsbKxKkS87Oxtdu3aFnZ0dEhISsGjRIoSEhGDt2rVizJEjR9C/f3/4+/vj1KlT8Pb2hre3N86cOSPGhIWFYcWKFYiIiMCxY8dgYGAAT09P5OXllft+ZYIgCOWOVpPU1FSVrZ3s7e1fqT/9FoGVMSwiegM9iF8l9RCISE30JNxwstboHWrr++aaPi99rkwmw44dO+Dt7a3a582bcHNzw759++Dl5YXx48dj/PjxAIBz587ByckJ8fHxcHV1BQDs3bsX3bt3x40bN2Bra4s1a9Zg+vTpUCgU4laaU6dORVRUFM6fPw8A8PHxQW5uLnbt2iVet02bNnBxcUFERAQEQYCtrS0mTpyISZMmAQCysrJgZWWFyMhI+Pr6luse34hvYLK3t4e7uzvc3d1fORElIiIiqih1Vkbz8/ORnZ2t8srPz3/psSqVSnzyySeYPHkymjZtWqI9Li4OpqamYiIKAB4eHtDS0sKxY8fEmI4dO6rs6e7p6YmUlBQ8ePBAjPHw8FDp29PTE3FxcQD+KyY+HWNiYgI3NzcxpjzeiGSUiIiIqKoKDQ2FiYmJyis0NPSl+/vyyy+ho6ODsWPHltquUChQs2ZNlWM6OjowNzcXn0QrFApYWVmpxBS/f1HM0+1Pn1daTHnw60CJiIhI46lzn9Fp06YhKChI5ZhcLn+pvhISErB8+XKcPHmyyuyNysooERERkRq3dpLL5TA2NlZ5vWwy+ueff+LOnTuoW7cudHR0oKOjg2vXrmHixImoV68egCfrb+7cuaNy3uPHj5GRkQFra2sxJj09XSWm+P2LYp5uf/q80mLKg8koERER0Vvik08+QVJSEhITE8WXra0tJk+eLH5rpbu7OzIzM5GQkCCeFxMTA6VSKX6hkLu7O2JjY1FYWCjGREdHo3HjxjAzMxNjDhw4oHL96OhouLu7A3iy5sfa2lolJjs7G8eOHRNjykPyx/R79+6FoaEh2rdvDwAIDw/HunXr4OTkhPDwcPEDISIiIlKXN+mRd05ODi5duiS+T01NRWJiIszNzVG3bl1YWFioxFerVg3W1tZo3LgxAMDR0RHdunXDiBEjEBERgcLCQgQGBsLX11fcBmrAgAGYM2cO/P39ERwcjDNnzmD58uVYunSp2O+4cePQqVMnLF68GF5eXvj5559x4sQJcfsnmUyG8ePHY968eXBwcIC9vT1mzpwJW1vbEqv/n0fyyujkyZORnZ0NAEhOTsbEiRPRvXt3pKamlphfQURERFTVnThxAi1atECLFi0AAEFBQWjRogVmzZpV7j42btyIJk2aoEuXLujevTvat2+vsoeoiYkJ9u/fj9TUVLRq1QoTJ07ErFmzVPYibdu2LTZt2oS1a9eiefPm2LZtG6KiotCsWTMxZsqUKRgzZgxGjhyJ1q1bIycnB3v37oWenl65xyr5PqOGhoY4c+YM6tWrh5CQEJw5cwbbtm3DyZMn0b179wqtxirGfUaJqi7uM0pUdUm5z6jd2J1q6/vaip5q67sqkLwyqquri4cPHwIA/vjjD3Tt2hUAYG5uLlZMiYiIiKhqknzOaPv27REUFIR27drh+PHj2Lx5MwDgwoULqF27tsSjo1fVrmUDTBjsgZZOdWFjaYKPJ6zFzkNJYvujU6VXuT5fugNLf3gyIdqlSW3MG+eNVk3roqhIQNSBRAQv3o7cRwUq5wzq6Yaxg96Hg11NZOfm4ZfoU5iwcAsAwMGuJlZO90WT+tYwMdTH7btZ2Pz7CcxfuwePHysBADo6Wpg8rCsG9XCDbU1TXLiWjhnLf0X0kXPq+GiINNK3677Ggej9SE29ArmeHlxcWmB80CTUs6+vEnc68RRWLl+K5OQkaGtpoXETR6xZ+6346G9swCiknD+PjIz7MDY2gZu7O8YHTULNmlalXZbohd6kOaOaRvJkdNWqVfjss8+wbds2rFmzBrVq1QIA/P777+jWrZvEo6NXZaAvR/KFm/jh1zhsXjKyRHs9j2kq77u2a4qI2QOw40AiAMDG0gS7I8Zg2/6TmLBwC4wN9LBocj+sm/sJBkz+Vjxv7KD3Me6T9/H50igcP3MVBvq6sLP9b4J34eMibNx1HInnryPr34dwblQb4TP7Q0tLhtmrnjyaCfmsJ/p7tcZnX2xCSmo6PmjriM2LR6DzkCU4nXJDDZ8OkeY5EX8cPv0HoqmzM4oeF2Hl8iUYNcIfv/y2G9WrVwfwJBH97NPhGDb8U0ydPhM62tpISTkPLa3/Hua1frcNho8chRqWlriTno4lX4Vh0oRx+GHjz1LdGhG9JMnnjKoD54y+mR6dWlWiMvqsLUtGwLC6HrqPWgkAGNa3HWZ95gX7D6aj+Ee1aUNbnNj6OZr2CsGV6/dgaqSPy/vmo9/4CBw6fqHc4/lyYl+0cqoLD/9lAIAr++fjy2/24estsWLMT18Nx6O8Agyb8cNL3DGpA+eMVi0ZGRno3MEd363/Ea1cWwMABvX/GG3c2yJw7Phy93Mo5gDGjw1A/KlkVKtWTU2jJXWTcs6o/fjdaus7dZmX2vquCiSfM3ry5EkkJyeL73/99Vd4e3vj888/R0FBwXPOpKqmprkRurVvhvVR/32frVxXB4WFRXj6d6ZH+U9+Ltq6NAAAdGnTBFpaMtjWNMWp7TNwae8X+PHLYahtZVrmterXqYEP2jriz4T/ts7QraaDvIJClbhHeQVo26JBZdweEZUi599/AQDGJiYAgPv37yM56TTMLSwweKAvOndsi2F+g3Ay4USZfWRlZmL37p1o7tKCiSi9PDVuek/PJ3ky+umnn+LChSfVrCtXrsDX1xfVq1fH1q1bMWXKlBeen5+fj+zsbJWXoCxS97BJDQb1dMO/D/MQFZMoHjt0PAVWFsaYMLgLqulow9RIH/PG9gYAWFs++cfLvnYNaGnJMGVYV0z+ajsGTP4WZibVsWtNIKrpaKtc42BkEB4cXYqzv4Xg75OXMXfNf78J/xF3DmMHvY8GdS0hk8nwvlsT9H7fBdY1jNV+70SaSKlUIuzLBXBp0RIODo0AADdvXAcARISvQt+P/ofVX38DR0cnjPQfgmvXrqqcv3TxIri5uqBjOzcobt/G8lWrX/ctEFElkDwZvXDhAlxcXAAAW7duRceOHbFp0yZERkZi+/btLzw/NDQUJiYmKq/H6QkvPI/ePIN7t8Hm308gv+CxeOzcFQVGzNqAsZ90QUbcElz9YwGu3rwPxb1sCMonC49kMhl0q+lgYtg2/BF3DseTr8JvWiQa1q2JTq0bqVzjk+Dv4D7gS/hN+x4fdmiKCYO7iG2TFm3D5bQ7OP3LTGQfX4alU/+HH347CqWyys1kIXojLJg3B5cvXkTYV/9tsq38/7/XH33sA+8+/eDo6ITJUz9HPXt7RP2i+m/CkGH+2LxtByLWfQctLS3MmBaMKjjzjF4TmUymthc9n+QLmARBEP/P548//kCPHj0AAHXq1MG9e/deeP60adNKbI5fs0Nw5Q+U1KpdiwZobG+NT6Z+X6Jt894T2Lz3BGqaGyH3UT4E4cmCpdQb9wEAintPtgA7f+W/PWnvPcjBvcwc1LFW/QavG+mZYqyWlhbCZ/THsg0HoFQKuPcgBx8HrYNcVwcWJga4dTcL88b2RurN+2q6ayLNtWDeXMQePoTv1v8Iq6e+w7qGpSUAoH4D1ekx9vUbQHH7lsoxMzNzmJmZo149e9Sv3wBdu3RC0ulENHdpof4bIKJKI3ky6urqinnz5sHDwwOHDx/GmjVrADz56isrqxdv0SGXyyGXy1WOybS0y4imN5WftzsS/klD8oWbZcbcyXgyt2xw7zbIKyjEgaPnAQBxiVcAAA71auLmnUwAgJlxddQwNUTa7Ywy+9PSkqGajja0tGQq1c/8gse4dTcLOjpa8O7igu3RJ1/19ojo/wmCgND5XyDmQDS+jdyA2rXrqLTXqlUbljVr4mpqqsrxa1evon2HjmX2W1zU4FoDelmsYEpH8mR02bJlGDhwIKKiojB9+nQ0bNgQALBt2za0bdtW4tHRqzLQ10WDOpbi+3q1LPBOo1p4kP0Q1xUPAABGBnro+0ELTF2yo9Q+Rvl0xNHTV5DzsABd2jTBgvHemLnyV2TlPAIAXEq7g50HT+OryR8hcN5PyM7Jw9wxvZByNR2HTzyZj+z7oSsKHxfhzKVbyC94jFZOdfHFmF7Ytj9B3Ge0dTM72NY0xemUG6hV0xTTP+0OLS0ZlkT+oc6PiEijLPhiDn7fswvLVq6GQXUD3Lt7FwBgaGQEPT09yGQyDBnqjzXhK9G4cRM0buKI337dgaupV7B46QoAQFLSaZxNTkaLlq1gbGKM62lpWL1yOerUqcuqKNFbSPJk9J133lFZTV9s0aJF0NZmhfNt19LJDvu/GSe+D5vUDwCw4bejGDn7RwDA/zxbQQYZtuwtfbWsazM7zBjlBcPquki5mo7A+T/hp93xKjH+MzcgbFJf/LJiNJRKAX8lXETvgHAx0XxcpETQkA/gYFcTMpkMabczsGZzLFb+GCP2IZdXw+yAHrCvVQM5D/Ox7++z8J/5g5j0EtGr27L5JwCA/5BPVI7PnReK3n36AgAGDR6C/PwCLAoLRVZWFho3boKIdd+hTt26AAB9PT0c+GM/1oSvxKNHD1HD0hLt2ndA2KefQVdX9/XeEFUZLIxKh/uMEtFbhfuMElVdUu4z2nDS72rr+9JXH6qt76pA8spoUVERli5dii1btiAtLa3EfJ+MjLLn/BERERFVBs4ZlY7kWzvNmTMHS5YsgY+PD7KyshAUFIS+fftCS0sLISEhUg+PiIiINIBMpr4XPZ/kyejGjRuxbt06TJw4ETo6Oujfvz+++eYbzJo1C0ePHpV6eERERESkRpInowqFAs7OzgAAQ0NDZGVlAQB69OiB3bvV9z2xRERERMW46b10JE9Ga9eujdu3bwMAGjRogP379wMA4uPjS+wfSkRERERVi+TJaJ8+fXDgwAEAwJgxYzBz5kw4ODhg8ODBGDZsmMSjIyIiIk3AOaPSkXw1/cKFC8X/9vHxQd26dREXFwcHBwf07NlTwpERERERkbpJnow+y93dHe7u7lIPg4iIiDSIlhZLmFKRJBn97bffyh3bq1cvNY6EiIiIiKQkSTLq7e1drjiZTIaioiL1DoaIiIg0Hud2SkeSZFSpVEpxWSIiIqJScQsm6Ui+mp6IiIiINJdkyWhMTAycnJyQnZ1doi0rKwtNmzZFbGysBCMjIiIiTcOtnaQjWTK6bNkyjBgxAsbGxiXaTExM8Omnn2Lp0qUSjIyIiIiIXhfJktHTp0+jW7duZbZ37doVCQkJr3FEREREpKn4daDSkSwZTU9PR7Vq1cps19HRwd27d1/jiIiIiIjodZMsGa1VqxbOnDlTZntSUhJsbGxe44iIiIhIU7EyKh3JktHu3btj5syZyMvLK9H26NEjzJ49Gz169JBgZERERET0ukj2daAzZszAL7/8gkaNGiEwMBCNGzcGAJw/fx7h4eEoKirC9OnTpRoeERERaRAWMKUjWTJqZWWFI0eOYPTo0Zg2bRoEQQDwpEzu6emJ8PBwWFlZSTU8IiIi0iB8nC4dyZJRALCzs8OePXvw4MEDXLp0CYIgwMHBAWZmZlIOi4iIiIheE0mT0WJmZmZo3bq11MMgIiIiDcXCqHT4daBEREREJJk3ojJKREREJCXOGZUOK6NEREREJBlWRomIiEjjsTAqHVZGiYiIiEgyrIwSERGRxuOcUemwMkpERET0BomNjUXPnj1ha2sLmUyGqKgosa2wsBDBwcFwdnaGgYEBbG1tMXjwYNy6dUulj4yMDAwcOBDGxsYwNTWFv78/cnJyVGKSkpLQoUMH6OnpoU6dOggLCysxlq1bt6JJkybQ09ODs7Mz9uzZo9IuCAJmzZoFGxsb6Ovrw8PDAxcvXqzQ/TIZJSIiIo0nk6nvVVG5ublo3rw5wsPDS7Q9fPgQJ0+exMyZM3Hy5En88ssvSElJQa9evVTiBg4ciLNnzyI6Ohq7du1CbGwsRo4cKbZnZ2eja9eusLOzQ0JCAhYtWoSQkBCsXbtWjDly5Aj69+8Pf39/nDp1Ct7e3vD29saZM2fEmLCwMKxYsQIRERE4duwYDAwM4Onpiby8vHLfr0wo/h7OKkS/RaDUQyAiNXkQv0rqIRCRmuhJOHnQLfSw2vo+Nq3TS58rk8mwY8cOeHt7lxkTHx+Pd999F9euXUPdunVx7tw5ODk5IT4+Hq6urgCAvXv3onv37rhx4wZsbW2xZs0aTJ8+HQqFArq6ugCAqVOnIioqCufPnwcA+Pj4IDc3F7t27RKv1aZNG7i4uCAiIgKCIMDW1hYTJ07EpEmTAABZWVmwsrJCZGQkfH19y3WPrIwSERERqVF+fj6ys7NVXvn5+ZXWf1ZWFmQyGUxNTQEAcXFxMDU1FRNRAPDw8ICWlhaOHTsmxnTs2FFMRAHA09MTKSkpePDggRjj4eGhci1PT0/ExcUBAFJTU6FQKFRiTExM4ObmJsaUB5NRIiIi0njqfEwfGhoKExMTlVdoaGiljDsvLw/BwcHo378/jI2NAQAKhQI1a9ZUidPR0YG5uTkUCoUYY2VlpRJT/P5FMU+3P31eaTHlwdX0RERERGo0bdo0BAUFqRyTy+Wv3G9hYSE+/vhjCIKANWvWvHJ/UmEySkRERBpPnVs7yeXySkk+n1aciF67dg0xMTFiVRQArK2tcefOHZX4x48fIyMjA9bW1mJMenq6Skzx+xfFPN1efMzGxkYlxsXFpdz3wsf0RERERG+R4kT04sWL+OOPP2BhYaHS7u7ujszMTCQkJIjHYmJioFQq4ebmJsbExsaisLBQjImOjkbjxo1hZmYmxhw4cECl7+joaLi7uwMA7O3tYW1trRKTnZ2NY8eOiTHlwWSUiIiINN6btLVTTk4OEhMTkZiYCODJQqHExESkpaWhsLAQH330EU6cOIGNGzeiqKgICoUCCoUCBQUFAABHR0d069YNI0aMwPHjx/H3338jMDAQvr6+sLW1BQAMGDAAurq68Pf3x9mzZ7F582YsX75cZTrBuHHjsHfvXixevBjnz59HSEgITpw4gcDAwP//zGQYP3485s2bh99++w3JyckYPHgwbG1tn7v6v8Rnz62diOhtwq2diKouKbd2ahsWq7a+j0zpWKH4Q4cOoXPnziWO+/n5ISQkBPb29qWed/DgQbz33nsAnmx6HxgYiJ07d0JLSwv9+vXDihUrYGhoKMYnJSUhICAA8fHxqFGjBsaMGYPg4GCVPrdu3YoZM2bg6tWrcHBwQFhYGLp37y62C4KA2bNnY+3atcjMzET79u2xevVqNGrUqNz3y2SUiN4qTEaJqi4pk9F2i/5UW99/T+6gtr6rAi5gIiIiIo3Hr6aXDueMEhEREZFkWBklIiIijafOrZ3o+VgZJSIiIiLJsDJKREREGo+VUemwMkpEREREkmFllIiIiDQeC6PSYWWUiIiIiCTDyigRERFpPM4ZlQ6TUSIiItJ4zEWlw8f0RERERCQZVkaJiIhI4/ExvXRYGSUiIiIiybAySkRERBqPhVHpsDJKRERERJJhZZSIiIg0nhZLo5JhZZSIiIiIJMPKKBEREWk8Fkalw2SUiIiINB63dpIOH9MTERERkWRYGSUiIiKNp8XCqGRYGSUiIiIiybAySkRERBqPc0alw8ooEREREUmGlVEiIiLSeCyMSoeVUSIiIiKSDCujREREpPFkYGlUKkxGiYiISONxayfp8DE9EREREUmGlVEiIiLSeNzaSTqsjBIRERGRZFgZJSIiIo3Hwqh0WBklIiIiIsmwMkpEREQaT4ulUcmwMkpEREREkmFllIiIiDQeC6PSYTJKREREGo9bO0mnXMloUlJSuTt85513XnowRERERKRZypWMuri4QCaTQRCEUtuL22QyGYqKiip1gERERETqxsKodMqVjKampqp7HERERESkgcqVjNrZ2al7HERERESS4dZO0nmprZ02bNiAdu3awdbWFteuXQMALFu2DL/++mulDo6IiIhI08TGxqJnz56wtbWFTCZDVFSUSrsgCJg1axZsbGygr68PDw8PXLx4USUmIyMDAwcOhLGxMUxNTeHv74+cnByVmKSkJHTo0AF6enqoU6cOwsLCSoxl69ataNKkCfT09ODs7Iw9e/ZUeCwvUuFkdM2aNQgKCkL37t2RmZkpzhE1NTXFsmXLKtodERERkeRkanxVVG5uLpo3b47w8PBS28PCwrBixQpERETg2LFjMDAwgKenJ/Ly8sSYgQMH4uzZs4iOjsauXbsQGxuLkSNHiu3Z2dno2rUr7OzskJCQgEWLFiEkJARr164VY44cOYL+/fvD398fp06dgre3N7y9vXHmzJkKjeVFZEJZq5LK4OTkhAULFsDb2xtGRkY4ffo06tevjzNnzuC9997DvXv3KtKdWui3CJR6CESkJg/iV0k9BCJSEz0JN5z0XX9KbX3/7Nfipc+VyWTYsWMHvL29ATypRNra2mLixImYNGkSACArKwtWVlaIjIyEr68vzp07BycnJ8THx8PV1RUAsHfvXnTv3h03btyAra0t1qxZg+nTp0OhUEBXVxcAMHXqVERFReH8+fMAAB8fH+Tm5mLXrl3ieNq0aQMXFxdERESUayzlUeHKaGpqKlq0KPmhyuVy5ObmVrQ7IiIiIsnJZDK1vfLz85Gdna3yys/Pf6lxpqamQqFQwMPDQzxmYmICNzc3xMXFAQDi4uJgamoqJqIA4OHhAS0tLRw7dkyM6dixo5iIAoCnpydSUlLw4MEDMebp6xTHFF+nPGMpjwono/b29khMTCxxfO/evXB0dKxod0RERESS05Kp7xUaGgoTExOVV2ho6EuNU6FQAACsrKxUjltZWYltCoUCNWvWVGnX0dGBubm5SkxpfTx9jbJinm5/0VjKo8IF8aCgIAQEBCAvLw+CIOD48eP46aefEBoaim+++aai3RERERFVadOmTUNQUJDKMblcLtFo3jwVTkaHDx8OfX19zJgxAw8fPsSAAQNga2uL5cuXl3tuABEREdGbRJ1fByqXyyst+bS2tgYApKenw8bGRjyenp4OFxcXMebOnTsq5z1+/BgZGRni+dbW1khPT1eJKX7/opin2180lvJ4qa2dBg4ciIsXLyInJwcKhQI3btyAv7//y3RFREREROVkb28Pa2trHDhwQDyWnZ2NY8eOwd3dHQDg7u6OzMxMJCQkiDExMTFQKpVwc3MTY2JjY1FYWCjGREdHo3HjxjAzMxNjnr5OcUzxdcozlvJ46XVrd+7cQUpKCoAnv01YWlq+bFdEREREknqT9rzPycnBpUuXxPepqalITEyEubk56tati/Hjx2PevHlwcHCAvb09Zs6cCVtbW3HFvaOjI7p164YRI0YgIiIChYWFCAwMhK+vL2xtbQEAAwYMwJw5c+Dv74/g4GCcOXMGy5cvx9KlS8Xrjhs3Dp06dcLixYvh5eWFn3/+GSdOnBC3f5LJZC8cS3lUOBn9999/8dlnn+Gnn36CUqkEAGhra8PHxwfh4eEwMTGpaJdERERE9P9OnDiBzp07i++L55v6+fkhMjISU6ZMQW5uLkaOHInMzEy0b98ee/fuhZ6ennjOxo0bERgYiC5dukBLSwv9+vXDihUrxHYTExPs378fAQEBaNWqFWrUqIFZs2ap7EXatm1bbNq0CTNmzMDnn38OBwcHREVFoVmzZmJMecbyIhXeZ9THxwenTp3CypUrxRJsXFwcxo0bBxcXF/z8888V6U4tuM8oUdXFfUaJqi4p9xkdvClJbX3/MOAdtfVdFVT4j33Xrl3Yt28f2rdvLx7z9PTEunXr0K1bt0odHBERERFVbRVORi0sLEp9FG9iYiJOeCUiIiJ6m2i9QXNGNU2FV9PPmDEDQUFBKpuZKhQKTJ48GTNnzqzUwRERERG9Dur8BiZ6vnJVRlu0aKHyYV68eBF169ZF3bp1AQBpaWmQy+W4e/cuPv30U/WMlIiIiIiqnHIloxVZnk9ERET0tmH9UjrlSkZnz56t7nEQERERkQaScBMFIiIiojeDFud2SqbCyWhRURGWLl2KLVu2IC0tDQUFBSrtGRkZlTY4IiIiIqraKryafs6cOViyZAl8fHyQlZWFoKAg9O3bF1paWggJCVHDEImIiIjUSyZT34uer8LJ6MaNG7Fu3TpMnDgROjo66N+/P7755hvMmjULR48eVccYiYiIiKiKqnAyqlAo4OzsDAAwNDREVlYWAKBHjx7YvXt35Y6OiIiI6DXgPqPSqXAyWrt2bdy+fRsA0KBBA+zfvx8AEB8fD7lcXrmjIyIiIqIqrcLJaJ8+fXDgwAEAwJgxYzBz5kw4ODhg8ODBGDZsWKUPkIiIiEjdOGdUOhVeTb9w4ULxv318fGBnZ4cjR47AwcEBPXv2rNTBEREREb0O3NpJOhWujD6rTZs2CAoKgpubGxYsWFAZYyIiIiIiDfHKyWix27dvY+bMmZXVHREREdFrw8f00qm0ZJSIiIiIqKL4daBERESk8bgFk3RYGSUiIiIiyZS7MhoUFPTc9rt3777yYCpL6uGlUg+BiNSk8LFS6iEQkZro6UhXI2N1TjrlTkZPnTr1wpiOHTu+0mCIiIiISLOUOxk9ePCgOsdBREREJBnOGZUOFzARERGRxtNiLioZTpEgIiIiIsmwMkpEREQaj5VR6bAySkRERESSYWWUiIiINB4XMEnnpSqjf/75JwYNGgR3d3fcvHkTALBhwwb89ddflTo4IiIiIqraKpyMbt++HZ6entDX18epU6eQn58PAMjKysKCBQsqfYBERERE6qYlU9+Lnq/Cyei8efMQERGBdevWoVq1auLxdu3a4eTJk5U6OCIiIiKq2io8ZzQlJaXUb1oyMTFBZmZmZYyJiIiI6LXilFHpVLgyam1tjUuXLpU4/tdff6F+/fqVMigiIiKi10lLJlPbi56vwsnoiBEjMG7cOBw7dgwymQy3bt3Cxo0bMWnSJIwePVodYyQiIiKiKqrCj+mnTp0KpVKJLl264OHDh+jYsSPkcjkmTZqEMWPGqGOMRERERGrFjdelIxMEQXiZEwsKCnDp0iXk5OTAyckJhoaGlT22l6bILpR6CESkJga62lIPgYjUxEhPupTw8z0X1Nb3gu6N1NZ3VfDSm97r6urCycmpMsdCREREJAlO7ZROhZPRzp07P/dbCmJiYl5pQERERESkOSqcjLq4uKi8LywsRGJiIs6cOQM/P7/KGhcRERHRa8NV79KpcDK6dOnSUo+HhIQgJyfnlQdERERERJqj0mYKDxo0CN99911ldUdERET02shk6nvR81VaMhoXFwc9Pb3K6o6IiIjotXlTvpu+qKgIM2fOhL29PfT19dGgQQN88cUXeHrzI0EQMGvWLNjY2EBfXx8eHh64ePGiSj8ZGRkYOHAgjI2NYWpqCn9//xJPsJOSktChQwfo6emhTp06CAsLKzGerVu3okmTJtDT04OzszP27NlTsRsqhwo/pu/bt6/Ke0EQcPv2bZw4cQIzZ86stIERERERaZovv/wSa9aswfr169G0aVOcOHECQ4cOhYmJCcaOHQsACAsLw4oVK7B+/XrY29tj5syZ8PT0xD///CMWBgcOHIjbt28jOjoahYWFGDp0KEaOHIlNmzYBALKzs9G1a1d4eHggIiICycnJGDZsGExNTTFy5EgAwJEjR9C/f3+EhoaiR48e2LRpE7y9vXHy5Ek0a9as0u65wvuMDh06VOW9lpYWLC0t8f7776Nr166VNrBXwX1Giaou7jNKVHVJuc/o3OiSX3VeWWZ90LDcsT169ICVlRW+/fZb8Vi/fv2gr6+PH3/8EYIgwNbWFhMnTsSkSZMAAFlZWbCyskJkZCR8fX1x7tw5ODk5IT4+Hq6urgCAvXv3onv37rhx4wZsbW2xZs0aTJ8+HQqFArq6ugCefLFRVFQUzp8/DwDw8fFBbm4udu3aJY6lTZs2cHFxQURExCt/LsUqVBktKirC0KFD4ezsDDMzs0obBBEREVFVlZ+fj/z8fJVjcrkccrm8RGzbtm2xdu1aXLhwAY0aNcLp06fx119/YcmSJQCA1NRUKBQKeHh4iOeYmJjAzc0NcXFx8PX1RVxcHExNTcVEFAA8PDygpaWFY8eOoU+fPoiLi0PHjh3FRBQAPD098eWXX+LBgwcwMzNDXFwcgoKCVMbn6emJqKioyvhYRBX6FURbWxtdu3ZFZmZmpQ6CiIiISErqXMAUGhoKExMTlVdoaGip45g6dSp8fX3RpEkTVKtWDS1atMD48eMxcOBAAIBCoQAAWFlZqZxnZWUltikUCtSsWVOlXUdHB+bm5ioxpfXx9DXKiilurywVnjParFkzXLlyBfb29pU6ECIiIqKqaNq0aSUqjKVVRQFgy5Yt2LhxIzZt2oSmTZsiMTER48ePh62tbZXdz73Cyei8efMwadIkfPHFF2jVqhUMDAxU2o2NjSttcERERESvQ0VXvVdEWY/kSzN58mSxOgoAzs7OuHbtGkJDQ+Hn5wdra2sAQHp6OmxsbMTz0tPTxS8msra2xp07d1T6ffz4MTIyMsTzra2tkZ6erhJT/P5FMcXtlaXcj+nnzp2L3NxcdO/eHadPn0avXr1Qu3ZtmJmZwczMDKamppxHSkRERPQKHj58CC0t1fRMW1sbSqUSAGBvbw9ra2scOHBAbM/OzsaxY8fg7u4OAHB3d0dmZiYSEhLEmJiYGCiVSri5uYkxsbGxKCz8b9F3dHQ0GjduLOZz7u7uKtcpjim+TmUpd2V0zpw5GDVqFA4ePFipAyAiIiKSmgxvxu70PXv2xPz581G3bl00bdoUp06dwpIlSzBs2DAAgEwmw/jx4zFv3jw4ODiIWzvZ2trC29sbAODo6Ihu3bphxIgRiIiIQGFhIQIDA+Hr6wtbW1sAwIABAzBnzhz4+/sjODgYZ86cwfLly1W+aXPcuHHo1KkTFi9eDC8vL/z88884ceIE1q5dW6n3XO6tnbS0tEqdEPsm4tZORFUXt3Yiqrqk3NppYcxltfU99f0G5Y79999/MXPmTOzYsQN37tyBra0t+vfvj1mzZokr3wVBwOzZs7F27VpkZmaiffv2WL16NRo1aiT2k5GRgcDAQOzcuRNaWlro168fVqxYAUNDQzEmKSkJAQEBiI+PR40aNTBmzBgEBwerjGfr1q2YMWMGrl69CgcHB4SFhaF79+6v+ImoqlAymp6eDktLy0odgDowGSWqupiMElVdTEY1U4UWMDVq1AiyF3zJakZGxisNiIiIiOh1U+cCJnq+CiWjc+bMgYmJibrGQkREREQapkLJqK+v71sxZ5SIiIioIl705JfUp9yTM/iHRERERESVrdyV0XKucyIiIiJ663DOqHTKnYwWb7ZKRERERFRZKvx1oERERERVDWcjSofJKBEREWk8LWajkpFud1kiIiIi0nisjBIREZHG4wIm6bAySkRERESSYWWUiIiINB6njEqHlVEiIiIikgwro0RERKTxtMDSqFRYGSUiIiIiybAySkRERBqPc0alw2SUiIiINB63dpIOH9MTERERkWRYGSUiIiKNx68DlQ4ro0REREQkGVZGiYiISOOxMCodVkaJiIiISDKsjBIREZHG45xR6bAySkRERESSYWWUiIiINB4Lo9JhMkpEREQaj4+KpcPPnoiIiIgkw8ooERERaTwZn9NLhpVRIiIiIpIMK6NERESk8VgXlQ4ro0REREQkGVZGiYiISONx03vpsDJKRERERJJhZZSIiIg0Huui0mEySkRERBqPT+mlw8f0RERERCQZVkaJiIhI43HTe+mwMkpEREREkmFllIiIiDQeq3PS4WdPRERERJJhZZSIiIg0HueMSoeVUSIiIqI3yM2bNzFo0CBYWFhAX18fzs7OOHHihNguCAJmzZoFGxsb6Ovrw8PDAxcvXlTpIyMjAwMHDoSxsTFMTU3h7++PnJwclZikpCR06NABenp6qFOnDsLCwkqMZevWrWjSpAn09PTg7OyMPXv2VPr9MhklIiIijSdT46siHjx4gHbt2qFatWr4/fff8c8//2Dx4sUwMzMTY8LCwrBixQpERETg2LFjMDAwgKenJ/Ly8sSYgQMH4uzZs4iOjsauXbsQGxuLkSNHiu3Z2dno2rUr7OzskJCQgEWLFiEkJARr164VY44cOYL+/fvD398fp06dgre3N7y9vXHmzJkK3tXzyQRBECq1xzeAIrtQ6iEQkZoY6GpLPQQiUhMjPelqZFsTb6mt7/+52JY7durUqfj777/x559/ltouCAJsbW0xceJETJo0CQCQlZUFKysrREZGwtfXF+fOnYOTkxPi4+Ph6uoKANi7dy+6d++OGzduwNbWFmvWrMH06dOhUCigq6srXjsqKgrnz58HAPj4+CA3Nxe7du0Sr9+mTRu4uLggIiLipT6L0rAySkRERBpPJpOp7ZWfn4/s7GyVV35+fqnj+O233+Dq6or//e9/qFmzJlq0aIF169aJ7ampqVAoFPDw8BCPmZiYwM3NDXFxcQCAuLg4mJqaiokoAHh4eEBLSwvHjh0TYzp27CgmogDg6emJlJQUPHjwQIx5+jrFMcXXqSxMRomIiEjjaanxFRoaChMTE5VXaGhoqeO4cuUK1qxZAwcHB+zbtw+jR4/G2LFjsX79egCAQqEAAFhZWamcZ2VlJbYpFArUrFlTpV1HRwfm5uYqMaX18fQ1yoopbq8sXE1PREREpEbTpk1DUFCQyjG5XF5qrFKphKurKxYsWAAAaNGiBc6cOYOIiAj4+fmpfaxSYGWUiIiINJ46H9PL5XIYGxurvMpKRm1sbODk5KRyzNHREWlpaQAAa2trAEB6erpKTHp6uthmbW2NO3fuqLQ/fvwYGRkZKjGl9fH0NcqKKW6vLExGiYiIiN4Q7dq1Q0pKisqxCxcuwM7ODgBgb28Pa2trHDhwQGzPzs7GsWPH4O7uDgBwd3dHZmYmEhISxJiYmBgolUq4ubmJMbGxsSgs/G/Rd3R0NBo3biyu3Hd3d1e5TnFM8XUqC5NRIiIi0nhvytZOEyZMwNGjR7FgwQJcunQJmzZtwtq1axEQEPBknDIZxo8fj3nz5uG3335DcnIyBg8eDFtbW3h7ewN4Uknt1q0bRowYgePHj+Pvv/9GYGAgfH19YWv7ZGX/gAEDoKurC39/f5w9exabN2/G8uXLVaYTjBs3Dnv37sXixYtx/vx5hISE4MSJEwgMDKzgXT0ft3YiorcKt3Yiqrqk3NopKqlyF+U8zfudij3W3rVrF6ZNm4aLFy/C3t4eQUFBGDFihNguCAJmz56NtWvXIjMzE+3bt8fq1avRqFEjMSYjIwOBgYHYuXMntLS00K9fP6xYsQKGhoZiTFJSEgICAhAfH48aNWpgzJgxCA4OVhnL1q1bMWPGDFy9ehUODg4ICwtD9+7dX/KTKB2TUSJ6qzAZJaq6pExGf01WXzLa27ly51hWNXxMT0RERESS4dZOREREpPG0Kjy7kyoLk1EiIiLSeDLmopLhY3oiIiIikgwro0RERKTxZHxMLxlWRomIiIhIMqyMEhERkcbjnFHpsDJKRERERJJhZZSIiIg0Hrd2ks4bWxlNT0/H3LlzpR4GEREREanRG5uMKhQKzJkzR+phEBERkQaQydT3oueT7DF9UlLSc9tTUlJe00iIiIhI0zFplI5kyaiLiwtkMhkEQSjRVnxcxp8MIiIioipNsmTU3NwcYWFh6NKlS6ntZ8+eRc+ePV/zqIiIiEgTcdN76UiWjLZq1Qq3bt2CnZ1dqe2ZmZmlVk2JiIiIqOqQLBkdNWoUcnNzy2yvW7cuvv/++9c4IiIiItJUWiyMSkYmVMHyoyK7UOohEJGaGOhqSz0EIlITIz3pNvk5cP6e2vru0qSG2vquCrjpPREREWk8zhmVzhu7zygRERERVX2sjBIREZHG426S0mEySkRERBqPj+mlw8f0RERERCQZyZPRvXv34q+//hLfh4eHw8XFBQMGDMCDBw8kHBkRERFpCi2Z+l70fJIno5MnT0Z2djYAIDk5GRMnTkT37t2RmpqKoKAgiUdHREREROok+ZzR1NRUODk5AQC2b9+OHj16YMGCBTh58iS6d+8u8eiIiIhIE3DOqHQkr4zq6uri4cOHAIA//vgDXbt2BfDku+uLK6ZEREREVDVJnoy2b98eQUFB+OKLL3D8+HF4eXkBAC5cuIDatWtLPDpSh9MnT2DqhAD0/bAzOrVuhj8PHSgzdnHoHHRq3QxbN20o0Rb312GMGtIfH7RvBa/322L6pLFiW1ZmJiaP+RR9P+wMj7Yt8JFXFywLm4/cnByVPqJ/34VhA/qia3tX9On2HhbOnYGszMxKu1ciTff1mlVwbe6o8urXu+RTL0EQMPazkXBt7ohDMX+UaN/56w74ftQbbVs3xwfvtcOXC+aqtF+8kILhQwahbevm8OraGeu//0Zt90RVk0ymvhc9n+SP6VetWoXPPvsM27Ztw5o1a1CrVi0AwO+//45u3bpJPDpSh0ePHqFho8bo3qsPZk4ZX2Zc7ME/8E9yEmpY1izRdjgmGovmz8aIz8ahpasbioqKcOXyRbFdS0uGdp06w3/0GJiamePm9TQsC5uP7OwszJoXBgBIPn0SC0I+R8CEKWjX4T3cvXsHS0LnYtH82Zi3aHml3zeRpqrfoCFWr/1OfK+jXfKfnk0/rkdZT0l//CESG3/4HuOCJqOZ8zt49OgRbt26Kbbn5OQgcNRwvOvmjmkzZuPSxQuYGzIDRkbG6PvRx5V+P0RUuSRPRuvWrYtdu3aVOL506VIJRkOvQ5t2HdCmXYfnxty9k44VX4Vi0YqvMXXCZyptjx8/xsrFCzF67ER49e4nHq9Xv4H430bGJvD+yFd8b21ji94f+eDnDd+Lx84mnYa1jS0+8h0EALCpVRs9+/4PP/3w3z+aRPTqdHR0UKOGZZntKefPYeMPkfjhp63o1qWjSlt2dhbWhC/H0hWr8a6bu3jcoVFj8b/37tmJwsJCzJo7D9Wq6aJBQwdcSDmPjRsimYxSubGAKR3JH9OfPHkSycnJ4vtff/0V3t7e+Pzzz1FQUCDhyEgqSqUS82dPg++gIbBv0LBE+8WUc7h7Jx0ymRb8B36EPt3ew+Sxo3Dl0sVSenvi3t07+PPgH3Bp6Soea/pOc9xJV+Do37EQBAEZ9+/h8IFouLV9fqJMRBWTdu0aunl0RO/uH2DGtMlQ3L4ltuU9eoQZ0yZjyuczS01Yj8UdgaBU4s6ddHzk7YXuH7yHqZMnQKG4LcYknU5Ei1auqFZNVzzm3rY9rl1NRXZ2lnpvjqoMLZlMbS96PsmT0U8//RQXLlwAAFy5cgW+vr6oXr06tm7diilTprzw/Pz8fGRnZ6u88vPz1T1sUqNN67+FtrY2+v1/xfJZt25eBwBErluNwf6fYuHScBgZG2P8qKHIzlL9h2fO9Mno2t4V/bq/j+oGhpg84795Zs7NW2LGF18i5PNJ6OLeAn26vQcDQ0NMCJ6uvpsj0jDNnN9ByBcLsHL1OkydPhu3bt7A8KGDkJubCwBYvGgh3mnugvc6dyn1/Js3bkCpFPD9N2sxcfI0fLl4GbKzMhHwqT8KC58ULO7fuwdzcwuV88wtLMQ2InqzSZ6MXrhwAS4uLgCArVu3omPHjti0aRMiIyOxffv2F54fGhoKExMTldfKJV+qedSkLinnzmL7zz9i2uz5kJXx26RSKQAABg0diU7vf4DGjk0xddY8QCbDoQP7VGIDJwRj3Y9bsOCrlbh14zrCl4aJbVevXMbKxQvhN3wU1m3YjEUrvobi9k0sDlVdGEFEL69d+47w6NoNDo0aw71deyxf9TX+/fdfRO/7HYcPxeBE/FFMnDKtzPOVghKPHxdicvB0uLdrD+d3XDB/4WJcT7uGE8ePv8Y7oapOpsYXPZ/kc0YFQYBSqQTwZGunHj16AADq1KmDe+X4jXbatGklNsd/kC95jk0vKenUSTx4kIGPe34gHisqKsLq5Yuw7ecN2Pzbflj8/6O8p+eI6urqwrZWbaQ/9egOACxq1IBFjRqwq1cfRiYmGDNiMPyGj4JFDUv8GLkOzZq3QP9PhgEAGjg0hp6+PsaMGIzho8eK1yGiymNkbAw7u3q4cT0Nly9dwI3r19G5vZtKzJSJ4+DSshXWfvuD+OjevsF/f9/NzM1hamoGheLJ436LGjWQkXFfpY+M+/fFNiJ6s0mejLq6umLevHnw8PDA4cOHsWbNGgBPNsO3srJ64flyuRxyuVzl2MPsQrWMldSva/eeaPVuG5Vjk8d+iq4f9sSHPb0BAI2bOEFXVxfXr6XiHZeWAIDHjwuhuH0TVta2ZfYt/P8vPcVzkfPz8qCtra0So6X15BcZQRAq5X6ISNXDh7m4cf06unv1godnN/Tu85FKu+9HvRE0aSo6dOoMAGju0gIAcO1qKqysrAEAWVmZyMx8ABubJ7uvvNPcBatXLsfjwkLoVKsGADh29Ajs6tnD2Njkdd0ave1YwpSM5MnosmXLMHDgQERFRWH69Olo2PDJgpVt27ahbdu2Eo+O1OHhw4e4eT1NfH/71k1cTDkPYxMTWFnbwMTUVCVeR0cH5hY1ULeePQDAwNAQvfp+jO/XrkZNK2tYWdvi5x+frJLv7PHkSxOO/h2LjPv30cSpGfSrV8fVK5ewZsViODdvARvbJ/+Ate3wHhbND0HUtp/xbpt2uH//LlYu/hKOTZ1L3U6KiCpu2eIwdOj0HmxsauHu3Tv4es1KaGlrwfNDL5iZm5e6aMnaxga1/n+fabt69ujUuQu++nIBps+aCwMDA4SvWIp69ezh2vpdAEC3D3tgXcRqzA2ZAb+hw3H50kX8tHEDgiZPfa33SkQvR/Jk9J133lFZTV9s0aJFJapWVDWknDuD8aOGie+L53F28+qNaSHzy9XH6HEToa2tjfmzpyE/Px+OTZ2xdPV3MPr/KoiuXA+7orYhfGkYCgoLUNPKGh3f88CAIf5iHx/29MbDh7nYseUnrF72FQyNjNDS9V18OiaorMsSUQWlpyswfeokZGVmwszMHM1btETkhp9hZm5e7j7mzFuIJYtCMT5wFLS0ZGjZqjVWrFknVkENjYywKuIbfLngC3zS/yOYmpph+Kejua0TVQi/DlQ6MqEKPo9U8DE9UZVloMtfUomqKiM96dZ8HLusvm3A3BpwusjzSF4ZLSoqwtKlS7FlyxakpaWV2Fs0IyNDopERERGRpuB2oNKRfNn5nDlzsGTJEvj4+CArKwtBQUHo27cvtLS0EBISIvXwiIiISANwayfpSP6YvkGDBlixYgW8vLxgZGSExMRE8djRo0exadOmCvfJx/REVRcf0xNVXVI+po+/or7H9K3r8zH980heGVUoFHB2dgYAGBoaIuv/v0GnR48e2L17t5RDIyIiIk3xhpZGFy5cCJlMhvHjx4vH8vLyEBAQAAsLCxgaGqJfv35IT09XOS8tLQ1eXl6oXr06atasicmTJ+Px48cqMYcOHULLli0hl8vRsGFDREZGlrh+eHg46tWrBz09Pbi5ueG4Gr5sQvJktHbt2rh9+8lG5Q0aNMD+/fsBAPHx8SX2DyUiIiLSFPHx8fj666/xzjvvqByfMGECdu7cia1bt+Lw4cO4desW+vbtK7YXFRXBy8sLBQUFOHLkCNavX4/IyEjMmjVLjElNTYWXlxc6d+6MxMREjB8/HsOHD8e+ff99k+HmzZsRFBSE2bNn4+TJk2jevDk8PT1x586dSr1PyR/TT506FcbGxvj888+xefNmDBo0CPXq1UNaWhomTJiAhQsXVrhPPqYnqrr4mJ6o6pLyMf2J1Gy19e1qb1zhc3JyctCyZUusXr0a8+bNg4uLC5YtW4asrCxYWlpi06ZN+OijJ18acf78eTg6OiIuLg5t2rTB77//jh49euDWrVviFwhFREQgODgYd+/eha6uLoKDg7F7926cOXNGvKavry8yMzOxd+9eAICbmxtat26NVatWAQCUSiXq1KmDMWPGYOrUytvHV/LK6MKFC/H5558DAHx8fBAbG4vRo0dj27ZtL5WIEhEREb1J8vPzkZ2drfLKz89/7jkBAQHw8vKCh4eHyvGEhAQUFhaqHG/SpAnq1q2LuLg4AEBcXBycnZ1VvsnS09MT2dnZOHv2rBjzbN+enp5iHwUFBUhISFCJ0dLSgoeHhxhTWSTf2ulZ7u7ucHd3l3oYREREpEHUubVTaGgo5syZo3Js9uzZZe4a9PPPP+PkyZOIj48v0aZQKKCrqwvTZ76t0MrKCgqFQox59ivVi9+/KCY7OxuPHj3CgwcPUFRUVGrM+fPnn3/DFSRJMvrbb7+VO7ZXr15qHAkRERGRek2bNg1BQarf7lfWupjr169j3LhxiI6Ohp6e3usYnuQkSUa9vb3LFSeTyVBUVKTewRAREZHGU+d+oHK5vNyLshMSEnDnzh20bNlSPFZUVITY2FisWrUK+/btQ0FBATIzM1Wqo+np6bC2tgYAWFtbl1j1Xrza/umYZ1fgp6enw9jYGPr6+tDW1oa2tnapMcV9VBZJ5owqlcpyvZiIEhER0Wvxhmzt1KVLFyQnJyMxMVF8ubq6YuDAgeJ/V6tWDQcOHBDPSUlJQVpamjjN0d3dHcnJySqr3qOjo2FsbAwnJycx5uk+imOK+9DV1UWrVq1UYpRKJQ4cOFDp0ynfuDmjRERERJrKyMgIzZo1UzlmYGAACwsL8bi/vz+CgoJgbm4OY2NjjBkzBu7u7mjTpg0AoGvXrnBycsInn3yCsLAwKBQKzJgxAwEBAWKFdtSoUVi1ahWmTJmCYcOGISYmBlu2bFHZ4z0oKAh+fn5wdXXFu+++i2XLliE3NxdDhw6t1HuWbDV9TEwMnJyckJ1dciuFrKwsNG3aFLGxsRKMjIiIiDSNTI3/q2xLly5Fjx490K9fP3Ts2BHW1tb45ZdfxHZtbW3s2rUL2tracHd3x6BBgzB48GDMnTtXjLG3t8fu3bsRHR2N5s2bY/Hixfjmm2/g6ekpxvj4+OCrr77CrFmz4OLigsTEROzdu7fEoqZXJdk+o7169ULnzp0xYcKEUttXrFiBgwcPYseOHRXum/uMElVd3GeUqOqScp/RU9f+VVvfLeyM1NZ3VSDZn/rp06fRrVu3Mtu7du2KhISE1zgiIiIi0lQymfpe9HySJaPp6emoVq1ame06Ojq4e/fuaxwREREREb1ukiWjtWrVUvkKqmclJSXBxsbmNY6IiIiINNUbspheI0mWjHbv3h0zZ85EXl5eibZHjx5h9uzZ6NGjhwQjIyIiIqLXRbIFTOnp6WjZsiW0tbURGBiIxo0bAwDOnz+P8PBwFBUV4eTJky+1YosLmIiqLi5gIqq6pFzAdPq6+hYwNa/DBUzPI1kyCgDXrl3D6NGjsW/fPhQPQyaTwdPTE+Hh4bC3t3+pfpmMElVdTEaJqi4pk9Gk6zlq6/udOoZq67sqkDQZLfbgwQNcunQJgiDAwcEBZmZmr9Qfk1GiqovJKFHVxWRUM70R38BkZmaG1q1bSz0MIiIi0lDcgkk60v0KQkREREQa742ojBIRERFJiYVR6bAySkRERESSYWWUiIiIiKVRybAySkRERESSYWWUiIiINJ6MpVHJsDJKRERERJJhZZSIiIg0HvcZlQ6TUSIiItJ4zEWlw8f0RERERCQZVkaJiIiIWBqVDCujRERERCQZVkaJiIhI43FrJ+mwMkpEREREkmFllIiIiDQet3aSDiujRERERCQZVkaJiIhI47EwKh0mo0RERETMRiXDx/REREREJBlWRomIiEjjcWsn6bAySkRERESSYWWUiIiINB63dpIOK6NEREREJBlWRomIiEjjsTAqHVZGiYiIiEgyrIwSERERsTQqGSajREREpPG4tZN0+JieiIiIiCTDyigRERFpPG7tJB1WRomIiIhIMqyMEhERkcZjYVQ6rIwSERERkWRYGSUiIiJiaVQyrIwSERERvSFCQ0PRunVrGBkZoWbNmvD29kZKSopKTF5eHgICAmBhYQFDQ0P069cP6enpKjFpaWnw8vJC9erVUbNmTUyePBmPHz9WiTl06BBatmwJuVyOhg0bIjIyssR4wsPDUa9ePejp6cHNzQ3Hjx+v9HtmMkpEREQaT6bG/1XE4cOHERAQgKNHjyI6OhqFhYXo2rUrcnNzxZgJEyZg586d2Lp1Kw4fPoxbt26hb9++YntRURG8vLxQUFCAI0eOYP369YiMjMSsWbPEmNTUVHh5eaFz585ITEzE+PHjMXz4cOzbt0+M2bx5M4KCgjB79mycPHkSzZs3h6enJ+7cufMKn3RJMkEQhErt8Q2gyC6UeghEpCYGutpSD4GI1MRIT7oaWVpGvtr6rmsuf+lz7969i5o1a+Lw4cPo2LEjsrKyYGlpiU2bNuGjjz4CAJw/fx6Ojo6Ii4tDmzZt8Pvvv6NHjx64desWrKysAAAREREIDg7G3bt3oauri+DgYOzevRtnzpwRr+Xr64vMzEzs3bsXAODm5obWrVtj1apVAAClUok6depgzJgxmDp16kvf07NYGSUiIiJSo/z8fGRnZ6u88vPLl/xmZWUBAMzNzQEACQkJKCwshIeHhxjTpEkT1K1bF3FxcQCAuLg4ODs7i4koAHh6eiI7Oxtnz54VY57uozimuI+CggIkJCSoxGhpacHDw0OMqSxMRomIiEjjydT4Cg0NhYmJicorNDT0hWNSKpUYP3482rVrh2bNmgEAFAoFdHV1YWpqqhJrZWUFhUIhxjydiBa3F7c9LyY7OxuPHj3CvXv3UFRUVGpMcR+VhavpiYiIiNRo2rRpCAoKUjkml7/40X1AQADOnDmDv/76S11DeyMwGSUiIiKNp86vA5XL5eVKPp8WGBiIXbt2ITY2FrVr1xaPW1tbo6CgAJmZmSrV0fT0dFhbW4sxz656L15t/3TMsyvw09PTYWxsDH19fWhra0NbW7vUmOI+Kgsf0xMRERG9IQRBQGBgIHbs2IGYmBjY29urtLdq1QrVqlXDgQMHxGMpKSlIS0uDu7s7AMDd3R3Jyckqq96jo6NhbGwMJycnMebpPopjivvQ1dVFq1atVGKUSiUOHDggxlQWVkaJiIiI3pBd7wMCArBp0yb8+uuvMDIyEudnmpiYQF9fHyYmJvD390dQUBDMzc1hbGyMMWPGwN3dHW3atAEAdO3aFU5OTvjkk08QFhYGhUKBGTNmICAgQKzQjho1CqtWrcKUKVMwbNgwxMTEYMuWLdi9e7c4lqCgIPj5+cHV1RXvvvsuli1bhtzcXAwdOrRS75lbOxHRW4VbOxFVXVJu7XTjQYHa+q5tplvuWFkZ8wW+//57DBkyBMCTTe8nTpyIn376Cfn5+fD09MTq1atVHp9fu3YNo0ePxqFDh2BgYAA/Pz8sXLgQOjr/1SEPHTqECRMm4J9//kHt2rUxc+ZM8RrFVq1ahUWLFkGhUMDFxQUrVqyAm5tb+W++PPfMZJSI3iZMRomqLimT0ZuZ6ktGa5mWPxnVRHxMT0RERBrvzXhIr5m4gImIiIiIJMPKKBEREWk8dW7tRM/HyigRERERSYaVUSIiItJ4Ms4alQwro0REREQkGVZGiYiIiFgYlQwro0REREQkGVZGiYiISOOxMCodJqNERESk8bi1k3T4mJ6IiIiIJMPKKBEREWk8bu0kHVZGiYiIiEgyrIwSERERsTAqGVZGiYiIiEgyrIwSERGRxmNhVDqsjBIRERGRZFgZJSIiIo3HfUalw2SUiIiINB63dpIOH9MTERERkWRYGSUiIiKNx8f00mFllIiIiIgkw2SUiIiIiCTDZJSIiIiIJMM5o0RERKTxOGdUOqyMEhEREZFkWBklIiIijcd9RqXDZJSIiIg0Hh/TS4eP6YmIiIhIMqyMEhERkcZjYVQ6rIwSERERkWRYGSUiIiJiaVQyrIwSERERkWRYGSUiIiKNx62dpMPKKBERERFJhpVRIiIi0njcZ1Q6rIwSERERkWRYGSUiIiKNx8KodJiMEhERETEblQwf0xMRERGRZFgZJSIiIo3HrZ2kw8ooEREREUmGlVEiIiLSeNzaSTqsjBIRERGRZGSCIAhSD4LoZeXn5yM0NBTTpk2DXC6XejhEVIn495tIMzAZpbdadnY2TExMkJWVBWNjY6mHQ0SViH+/iTQDH9MTERERkWSYjBIRERGRZJiMEhEREZFkmIzSW00ul2P27Nlc3EBUBfHvN5Fm4AImIiIiIpIMK6NEREREJBkmo0REREQkGSajRERERCQZJqP0xpDJZIiKipJ6GESkBvz7TURlYTJKr4VCocCYMWNQv359yOVy1KlTBz179sSBAwekHhoAQBAEzJo1CzY2NtDX14eHhwcuXrwo9bCI3gpv+t/vX375BV27doWFhQVkMhkSExOlHhIRPYXJKKnd1atX0apVK8TExGDRokVITk7G3r170blzZwQEBEg9PABAWFgYVqxYgYiICBw7dgwGBgbw9PREXl6e1EMjeqO9DX+/c3Nz0b59e3z55ZdSD4WISiMQqdmHH34o1KpVS8jJySnR9uDBA/G/AQg7duwQ30+ZMkVwcHAQ9PX1BXt7e2HGjBlCQUGB2J6YmCi89957gqGhoWBkZCS0bNlSiI+PFwRBEK5evSr06NFDMDU1FapXry44OTkJu3fvLnV8SqVSsLa2FhYtWiQey8zMFORyufDTTz+94t0TVW1v+t/vp6WmpgoAhFOnTr30/RJR5dOROBemKi4jIwN79+7F/PnzYWBgUKLd1NS0zHONjIwQGRkJW1tbJCcnY8SIETAyMsKUKVMAAAMHDkSLFi2wZs0aaGtrIzExEdWqVQMABAQEoKCgALGxsTAwMMA///wDQ0PDUq+TmpoKhUIBDw8P8ZiJiQnc3NwQFxcHX1/fV/gEiKqut+HvNxG9+ZiMklpdunQJgiCgSZMmFT53xowZ4n/Xq1cPkyZNws8//yz+Y5WWlobJkyeLfTs4OIjxaWlp6NevH5ydnQEA9evXL/M6CoUCAGBlZaVy3MrKSmwjopLehr/fRPTm45xRUivhFb7ga/PmzWjXrh2sra1haGiIGTNmIC0tTWwPCgrC8OHD4eHhgYULF+Ly5cti29ixYzFv3jy0a9cOs2fPRlJS0ivdBxGVxL/fRFQZmIySWjk4OEAmk+H8+fMVOi8uLg4DBw5E9+7dsWvXLpw6dQrTp09HQUGBGBMSEoKzZ8/Cy8sLMTExcHJywo4dOwAAw4cPx5UrV/DJJ58gOTkZrq6uWLlyZanXsra2BgCkp6erHE9PTxfbiKikt+HvNxG9BaSdskqaoFu3bhVe4PDVV18J9evXV4n19/cXTExMyryOr6+v0LNnz1Lbpk6dKjg7O5faVryA6auvvhKPZWVlcQETUTm86X+/n8YFTERvJlZGSe3Cw8NRVFSEd999F9u3b8fFixdx7tw5rFixAu7u7qWe4+DggLS0NPz888+4fPkyVqxYIVZFAODRo0cIDAzEoUOHcO3aNfz999+Ij4+Ho6MjAGD8+PHYt28fUlNTcfLkSRw8eFBse5ZMJsP48eMxb948/Pbbb0hOTsbgwYNha2sLb2/vSv88iKqSN/3vN/BkoVViYiL++ecfAEBKSgoSExM5J5zoTSF1Nkya4datW0JAQIBgZ2cn6OrqCrVq1RJ69eolHDx4UIzBM1u/TJ48WbCwsBAMDQ0FHx8fYenSpWLlJD8/X/D19RXq1Kkj6OrqCra2tkJgYKDw6NEjQRAEITAwUGjQoIEgl8sFS0tL4ZNPPhHu3btX5viUSqUwc+ZMwcrKSpDL5UKXLl2ElJQUdXwURFXOm/73+/vvvxcAlHjNnj1bDZ8GEVWUTBBeYQY6EREREdEr4GN6IiIiIpIMk1EiIiIikgyTUSIiIiKSDJNRIiIiIpIMk1EiIiIikgyTUSIiIiKSDJNRIiIiIpIMk1EiIiIikgyTUSKqNEOGDFH5CtX33nsP48ePf+3jOHToEGQyGTIzM9V2jWfv9WW8jnESEb3pmIwSVXFDhgyBTCaDTCaDrq4uGjZsiLlz5+Lx48dqv/Yvv/yCL774olyxrzsxq1evHpYtW/ZarkVERGXTkXoARKR+3bp1w/fff4/8/Hzs2bMHAQEBqFatGqZNm1YitqCgALq6upVyXXNz80rph4iIqi5WRok0gFwuh7W1Nezs7DB69Gh4eHjgt99+A/Df4+b58+fD1tYWjRs3BgBcv34dH3/8MUxNTWFubo7evXvj6tWrYp9FRUUICgqCqakpLCwsMGXKFAiCoHLdZx/T5+fnIzg4GHXq1IFcLkfDhg3x7bff4urVq+jcuTMAwMzMDDKZDEOGDAEAKJVKhIaGwt7eHvr6+mjevDm2bdumcp09e/agUaNG0NfXR+fOnVXG+TKKiorg7+8vXrNx48ZYvnx5qbFz5syBpaUljI2NMWrUKBQUFIht5Rk7EZGmY2WUSAPp6+vj/v374vsDBw7A2NgY0dHRAIDCwkJ4enrC3d0df/75J3R0dDBv3jx069YNSUlJ0NXVxeLFixEZGYnvvvsOjo6OWLx4MXbs2IH333+/zOsOHjwYcXFxWLFiBZo3b47U1FTcu3cPderUwfbt29GvXz+kpKTA2NgY+vr6AIDQ0FD8+OOPiIiIgIODA2JjYzFo0CBYWlqiU6dOuH79Ovr27YuAgACMHDkSJ06cwMSJE1/p81Eqlahduza2bt0KCwsLHDlyBCNHjoSNjQ0+/vhjlc9NT08Phw4dwtWrVzF06FBYWFhg/vz55Ro7EREBEIioSvPz8xN69+4tCIIgKJVKITo6WpDL5cKkSZPEdisrKyE/P188Z8OGDULjxo0FpVIpHsvPzxf09fWFffv2CYIgCDY2NkJYWJjYXlhYKNSuXVu8liAIQqdOnYRx48YJgiAIKSkpAgAhOjq61HEePHhQACA8ePBAPJaXlydUr15dOHLkiEqsv7+/0L9/f0EQBGHatGmCk5OTSntwcHCJvp5lZ2cnLF26tMz2ZwUEBAj9+vUT3/v5+Qnm5uZCbm6ueGzNmjWCoaGhUFRUVK6xl3bPRESahpVRIg2wa9cuGBoaorCwEEqlEgMGDEBISIjY7uzsrDJP9PTp07h06RKMjIxU+snLy8Ply5eRlZWF27dvw83NTWzT0dGBq6triUf1xRITE6GtrV2hiuClS5fw8OFDfPDBByrHCwoK0KJFCwDAuXPnVMYBAO7u7uW+RlnCw8Px3XffIS0tDY8ePUJBQQFcXFxUYpo3b47q1aurXDcnJwfXr19HTk7OC8dORER8TE+kETp37ow1a9ZAV1cXtra20NFR/atvYGCg8j4nJwetWrXCxo0bS/RlaWn5UmMofuxeETk5OQCA3bt3o1atWiptcrn8pcZRHj///DMmTZqExYsXw93dHUZGRli0aBGOHTtW7j6kGjsR0duGySiRBjAwMEDDhg3LHd+yZUts3rwZNWvWhLGxcakxNjY2OHbsGDp27AgAePz4MRISEtCyZctS452dnaFUKnH48GF4eHiUaC+uzBYVFYnHnJycIJfLkZaWVmZF1dHRUVyMVezo0aMvvsnn+Pvvv9G2bVt89tln4rHLly+XiDt9+jQePXokJtpHjx6FoaEh6tSpA3Nz8xeOnYiIuJqeiEoxcOBA1KhRA71798aff/6J1NRUHDp0CGPHjsWNGzcAAOPGjcPChQsRFRWF8+fP47PPPnvuHqH16tWDn58fhg0bhqioKLHPLVu2AADs7Owgk8mwa9cu3L17Fzk5OTAyMsKkSZMwYcIErF+/HpcvX8bJkyexcuVKrF+/HgAwatQoXLx4EZMnT0ZKSgo2bdqEyMjIct3nzZs3kZiYqPJ68OABHBwccOLECezbtw8XLlzAzJkzER8fX+L8goIC+Pv7459//sGePXswe/ZsBAYGQktLq1xjJyIicAETUVX39AKmirTfvn1bGDx4sFCjRg1BLpcL9evXF0aMGCFkZWUJgvBkwdK4ceMEY2NjwdTUVAgKChIGDx5c5gImQRCER48eCRMmTBBsbGwEXV1doWHDhsJ3330nts+dO1ewtrYWZDKZ4OfnJwjCk0VXy5YtExo3bixUq1ZNsLS0FDw9PYXDhw+L5+3cuVNo2LChIJfLhQ4dOgjfffdduRYwASjx2rBhg5CXlycMGTJEMDExEUxNTYXRo0cLU6dOFZo3b17ic5s1a5ZgYWEhGBoaCiNGjBDy8vLEmBeNnQuYiIgEQSYIZaw2ICIiIiJSMz6mJyIiIiLJMBklIiIiIskwGSUiIiIiyTAZJSIiIiLJMBklIiIiIskwGSUiIiIiyTAZJSIiIiLJMBklIiIiIskwGSUiIiIiyTAZJSIiIiLJMBklIiIiIsn8H4EZlv2XPtjAAAAAAElFTkSuQmCC\n"},"metadata":{}},{"name":"stdout","text":"\n=== (Validation Set) ===\nAccuracy: 0.9220\nAUC: 0.9233\n\n分类报告:\n              precision    recall  f1-score   support\n\n           0       0.92      1.00      0.96    179902\n           1       0.93      0.24      0.39     20098\n\n    accuracy                           0.92    200000\n   macro avg       0.92      0.62      0.67    200000\nweighted avg       0.92      0.92      0.90    200000\n\n","output_type":"stream"},{"output_type":"display_data","data":{"text/plain":"<Figure size 600x400 with 1 Axes>","image/png":"iVBORw0KGgoAAAANSUhEUgAAAjYAAAGJCAYAAACZwnkIAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8pXeV/AAAACXBIWXMAAA9hAAAPYQGoP6dpAABeuUlEQVR4nO3deVxU1f8/8NeAMCACouyKCO64oKEi7igxKlaYlmvivoQr5UIZYpmaprmRZpboJ8ytJFPDEBcscENx11xAcwFXGEXZz+8Pf9yv1xlW0cHp9Xw85qFzzvue+753GObNvefeUQghBIiIiIj0gIGuEyAiIiIqLyxsiIiISG+wsCEiIiK9wcKGiIiI9AYLGyIiItIbLGyIiIhIb7CwISIiIr3BwoaIiIj0BgsbIiIi0hssbEhDeHg4FAoFkpOTpbbOnTujc+fOZRqvc+fOaNKkSfkkV4x9+/ZBoVBgy5Ytr2R9r8KHH36IN99886Wuo3bt2hgyZIj0vGA/7tu3r9hlX+RnozChoaFQKBTlOmZF1KZNG0ydOlXXabzWtP2+elmGDBmC2rVrS8+Tk5OhUCjw9ddfv/R1A/+d98WLYmFDFcbNmzcRGhqKxMREXadSYSQlJWH16tX45JNPAACLFi2CQqHA7t27C13m+++/h0KhwLZt215VmmXy+PFjhIaGlqh4epUePXqEmTNnokmTJjAzM0P16tXRvHlzTJw4ETdv3iz1eGfPnkVoaKjWD95p06YhLCwMKSkpRY4xZMgQKBSKYh/PFqcVQWlf44KCuuChVCphZ2eHzp07Y86cObhz545O8nqVKnJur4tKuk6AXg9//vnnS1/HzZs3MWvWLNSuXRvNmzd/6et7HSxZsgQuLi7w9vYGAPTr1w9TpkzB+vXr4ePjo3WZ9evXo3r16ujevXuZ19uxY0c8efIExsbGZR6jOI8fP8asWbMAQOOIz4wZMzB9+vSXtu7C5OTkoGPHjjh//jwCAgIwfvx4PHr0CGfOnMH69evRq1cvODo6lmrMs2fPYtasWejcubPsr30AeOedd2BhYYFvv/0Wn3/+eaFjjB49WvZ6JyUlISQkBKNGjUKHDh2k9jp16pQqt5etqNe4KBMmTECrVq2Ql5eHO3fuIC4uDjNnzsSiRYuwadMmdOnSRYr94IMP0K9fPyiVypee1/fff4/8/PwSx5dFRXxfvG5Y2FCJvMwPONIuJycHERERGDNmjNTm6OgIb29v/Prrr1ixYoXGL/MbN24gNjYWo0aNgpGRUZnXbWBgABMTkzIv/6IqVaqESpVe/a+nyMhIHD9+HBERERgwYICsLzMzE9nZ2eW6PgMDA/Tp0wfr1q3DrFmzCj3N4OXlBS8vL+n50aNHERISAi8vLwwaNOiF88jIyICZmdkLj1NeOnTogD59+sjaTpw4AV9fX/Tu3Rtnz56Fg4MDAMDQ0BCGhoYvNZ+C/fMi76nyoKv3xeuGp6KoRLTNo7h69SrefvttmJmZwdbWFpMnT8auXbsKnZtx9uxZeHt7o3LlyqhRowbmz58v9e3btw+tWrUCAAwdOlQ6FB0eHi7FhIWFwdXVFaampmjdujUOHDhQ6PyOvLw8fPLJJ7C3t4eZmRnefvtt/Pvvvxrb1KRJE5w8eRKdOnVC5cqVUbduXWl+zv79++Hp6QlTU1M0aNCgyNM/hcnPz8fixYvRuHFjmJiYwM7ODqNHj8aDBw+KXfavv/7C3bt3NY7MDBo0COnp6dixY4fGMhs2bEB+fj4GDhwIAPj666/Rtm1bVK9eHaampvDw8CjR/KPC5tisWrUKderUkb0Gz8vOzkZISAg8PDxgaWkJMzMzdOjQAXv37pVikpOTYWNjAwDSB7pCoUBoaCgA7XMJcnNz8cUXX6BOnTpQKpWoXbs2PvnkE2RlZcniateujZ49e+Kvv/5C69atYWJiAldXV6xbt67Y7b58+TIAoF27dhp9JiYmsLCwkLWdP38effr0QbVq1WBiYoKWLVvKTgGGh4fjvffeAwB4e3tL2/nsfn3zzTdx9erVFz4Fe/LkSQwZMgSurq4wMTGBvb09hg0bhnv37sniCvbt2bNnMWDAAFhZWaF9+/YAnv68hoaGwtHREZUrV4a3tzfOnj2rMQcLANLS0jBp0iQ4OTlBqVSibt26+Oqrr6QjGsW9xqXl7u6OxYsXIy0tDcuXL5fatc2xOXr0KFQqFaytrWFqagoXFxcMGzasRHkNGTIEVapUweXLl9GjRw+Ym5tL76fn59g865tvvoGzszNMTU3RqVMnnD59WtZf2O+qZ8esqO+L1w0LGyqTjIwMdOnSBbt378aECRPw6aefIi4uDtOmTdMa/+DBA3Tr1g3u7u5YuHAhGjZsiGnTpuGPP/4AADRq1Eg6FD9q1Cj873//w//+9z907NgRALBixQqMGzcONWvWxPz589GhQwf4+/vj+vXrWtf35ZdfYseOHZg2bRomTJiA6Oho+Pj44MmTJxp59ezZE56enpg/fz6USiX69euHjRs3ol+/fujRowfmzZuHjIwM9OnTBw8fPizVfho9ejSmTJmCdu3aYcmSJRg6dCgiIiKgUqmQk5NT5LJxcXFQKBRo0aKFrP3dd9+FiYkJ1q9fr7HM+vXr4ezsLH0wL1myBC1atMDnn3+OOXPmoFKlSnjvvfe0FkXF+eGHHzB69GjY29tj/vz5aNeundaCUa1WY/Xq1ejcuTO++uorhIaG4s6dO1CpVNKHt42NDVasWAEA6NWrl/R6v/vuu4Wuf8SIEQgJCcEbb7yBb775Bp06dcLcuXPRr18/jdhLly6hT58+ePPNN7Fw4UJYWVlhyJAhOHPmTJHb6OzsDABYt24dhBBFxp45cwZt2rTBuXPnMH36dCxcuBBmZmbw9/fH1q1bATw9pTdhwgQAwCeffCJtZ6NGjaRxPDw8AAB///13kesrTnR0NK5cuYKhQ4di2bJl6NevHzZs2IAePXpo3Zb33nsPjx8/xpw5czBy5EgAQHBwMGbNmoWWLVtiwYIFqFevHlQqFTIyMmTLPn78GJ06dcJPP/2EwYMHY+nSpWjXrh2Cg4MRFBQEoGyvcXH69OkDU1PTIk+N3759G76+vkhOTsb06dOxbNkyDBw4EAcPHixxXrm5uVCpVLC1tcXXX3+N3r17F5nXunXrsHTpUgQGBiI4OBinT59Gly5dkJqaWqrtq6jvi9eOIHrOmjVrBACRlJQktXXq1El06tRJer5w4UIBQERGRkptT548EQ0bNhQAxN69e2XLAhDr1q2T2rKysoS9vb3o3bu31HbkyBEBQKxZs0aWT1ZWlqhevbpo1aqVyMnJkdrDw8MFAFlee/fuFQBEjRo1hFqtlto3bdokAIglS5Zo5LV+/Xqp7fz58wKAMDAwEAcPHpTad+3apTW3ohw4cEAAEBEREbL2qKgore3PGzRokKhevbrWvvfee0+YmJiI9PR0jdyDg4OltsePH8uWy87OFk2aNBFdunSRtTs7O4uAgADpecF+LHgds7Ozha2trWjevLnIysqS4latWqXxGuTm5spihBDiwYMHws7OTgwbNkxqu3PnjgAgZs6cqbF9M2fOFM/+ekpMTBQAxIgRI2RxH3/8sQAg9uzZI9sWACI2NlZqu337tlAqleKjjz7SWNezHj9+LBo0aCAACGdnZzFkyBDxww8/iNTUVI3Yrl27iqZNm4rMzEypLT8/X7Rt21bUq1dPatu8ebPGe+J5xsbGYuzYsUXm9ixt75XnX2shhPj555819kXBvu3fv78sNiUlRVSqVEn4+/vL2kNDQwUA2c/HF198IczMzMQ///wji50+fbowNDQU165dE0IU/RprU/Bzt3nz5kJj3N3dhZWVlfT8+d9XW7duFQDEkSNHCh2jqLwCAgIEADF9+nStfc7OztLzpKQkAUCYmpqK69evS+2HDh0SAMTkyZOltud/hxY2ZkV8X7xueMSGyiQqKgo1atTA22+/LbWZmJhIf/k9r0qVKrK5AMbGxmjdujWuXLlS7LqOHj2Ke/fuYeTIkbLzywMHDoSVlZXWZQYPHgxzc3PpeZ8+feDg4ICdO3dq5PXsXzYNGjRA1apV0ahRI3h6ekrtBf8vSb4FNm/eDEtLS7z55pu4e/eu9PDw8ECVKlVkp2a0uXfvXqHbN2jQIGRmZuLXX3+V2gqO4BQcNgcAU1NT6f8PHjxAeno6OnTogGPHjpV4O4Cnr8Ht27cxZswY2XyrIUOGwNLSUhZraGgoxeTn5+P+/fvIzc1Fy5YtS73eAgWvW8HRgAIfffQRAGgcgXJzc5NNqrWxsUGDBg2Kff1MTU1x6NAhTJkyBcDT0xzDhw+Hg4MDxo8fLx3ev3//Pvbs2YP3338fDx8+lF7be/fuQaVS4eLFi7hx40aJt8/Kygp3794tcXxhuRfIzMzE3bt30aZNGwDQut+fnbsFADExMcjNzcWHH34oax8/frzGsps3b0aHDh2kvAsePj4+yMvLQ2xs7AttS1GqVKlS5JHTqlWrAgC2b99e7FHRoowdO7bEsf7+/qhRo4b0vHXr1vD09NT4fVPeXtX74nXDwobK5OrVq6hTp47G+d66detqja9Zs6ZGrJWVVYnmmly9elXr2JUqVSr0fHe9evVkzxUKBerWratxya22vCwtLeHk5KTRBqBE+Ra4ePEi0tPTYWtrCxsbG9nj0aNHuH37drFjiEJOh3Tv3h3VqlWTnY76+eef4e7ujsaNG0tt27dvR5s2bWBiYoJq1apJh7rT09NLvB3A/70Gz+9XIyMjuLq6asSvXbsWzZo1g4mJCapXrw4bGxvs2LGj1Ot9dv0GBgYaPwP29vaoWrWqlF+BWrVqaYxR0p83S0tLzJ8/H8nJyUhOTsYPP/yABg0aYPny5fjiiy8APD2kL4TAZ599pvHazpw5EwBK9PoWEEK88P1J7t+/j4kTJ8LOzg6mpqawsbGBi4sLAGjd7wV9BQp7n1WrVk2jwL548SKioqI0tr1gPlhptr20Hj16JPuj5XmdOnVC7969MWvWLFhbW+Odd97BmjVrNOacFKVSpUqoWbNmieOff18AQP369V/6vXVe5fvidcLp1fRKFHbVQmEf3K9KYXmVR775+fmwtbVFRESE1v6CSYKFqV69eqG/cIyMjPD+++/j+++/R2pqKq5du4aLFy/KJmQfOHAAb7/9Njp27Ihvv/0WDg4OMDIywpo1a7TOzykvP/30E4YMGQJ/f39MmTIFtra2MDQ0xNy5c6XJuWVV0g//8vp5c3Z2xrBhw9CrVy+4uroiIiICs2fPlibIfvzxx1CpVFqXLazI1yYtLQ3W1talyu1577//PuLi4jBlyhQ0b94cVapUQX5+Prp166b1EuVnj/CUVn5+Pt58881Cby5Yv379Mo9dlJycHPzzzz9F3vCz4AadBw8exO+//45du3Zh2LBhWLhwIQ4ePIgqVaoUux6lUgkDg/L9u1+hUGj9+cvLyyuXsUuiov4eLm8sbKhMnJ2dcfbsWY2/NC9dulTmMQt7cxZM6Lx06ZJ0Pxfg6QS/5ORkNGvWTGOZixcvyp4LIXDp0iWtsS9LnTp1sHv3brRr165MHyINGzZEREQE0tPTNU73AE9POa1cuRIbN25EUlISFAoF+vfvL/X/8ssvMDExwa5du2SXha9Zs6bUuRS8BhcvXpTdQyQnJwdJSUlwd3eX2rZs2QJXV1f8+uuvste04EhGgdIcoXB2dkZ+fj4uXrwom3ibmpqKtLQ0Kb+XxcrKCnXq1JGudCk4SmVkZFTo/YQKFLedN27cQHZ2tmy7SuvBgweIiYnBrFmzEBISIrU//z4oyrPvs2eP5ty7d0+jwK5Tpw4ePXr0wtteWlu2bMGTJ08KLSaf1aZNG7Rp0wZffvkl1q9fj4EDB2LDhg0YMWJEueelbT//888/siPKVlZWWk/5PH9U5XV6X1RUPBVFZaJSqXDjxg3Zpa2ZmZn4/vvvyzxmwX000tLSZO0tW7ZE9erV8f333yM3N1dqj4iIKPSIxrp162Tn4bds2YJbt2690E3rSuv9999HXl6edPriWbm5uRrb+TwvLy8IIZCQkKC1v127dqhduzZ++uknbNy4EZ06dZIdPjc0NIRCoZD9RZicnIzIyMhSb0vLli1hY2ODlStXyu7lEh4errEdBX8VPvtX4KFDhxAfHy+Lq1y5MgDN11ubHj16AAAWL14sa1+0aBEAwM/Pr0TbUZwTJ05onety9epVnD17Fg0aNAAA2NraonPnzvjuu+9w69Ytjfhn75Bb2M91gYLXt23btmXOW9s+BzT3V1G6du2KSpUqSVflFHj20uoC77//PuLj47Fr1y6NvrS0NOl9WprXuDgnTpzApEmTYGVlhcDAwELjHjx4oLEfCm74WXA6qjzzAp7e/+jZOVWHDx/GoUOHZL9v6tSpg/Pnz8t+Nk6cOKFxNVxFfF+8bnjEhspk9OjRWL58Ofr374+JEyfCwcEBERER0k3dyvIXUZ06dVC1alWsXLkS5ubmMDMzg6enJ1xcXBAaGorx48ejS5cueP/995GcnIzw8HCt83yAp/MC2rdvj6FDhyI1NRWLFy9G3bp1C53c/DJ06tQJo0ePxty5c5GYmAhfX18YGRnh4sWL2Lx5M5YsWaJxE7JntW/fHtWrV8fu3btlR0kKKBQKDBgwAHPmzAEAjTvX+vn5YdGiRejWrRsGDBiA27dvIywsDHXr1sXJkydLtS1GRkaYPXs2Ro8ejS5duqBv375ISkrCmjVrNObY9OzZE7/++it69eoFPz8/JCUlYeXKlXBzc8OjR4+kOFNTU7i5uWHjxo2oX78+qlWrhiZNmmg9zeDu7o6AgACsWrUKaWlp6NSpEw4fPoy1a9fC399fdiTvRURHR2PmzJl4++230aZNG1SpUgVXrlzBjz/+iKysLNk9WMLCwtC+fXs0bdoUI0eOhKurK1JTUxEfH4/r16/jxIkTAJ5+qBoaGuKrr75Ceno6lEolunTpAltbW2mdtWrV0risvzQsLCzQsWNHzJ8/Hzk5OahRowb+/PNPJCUllXgMOzs7TJw4EQsXLsTbb7+Nbt264cSJE/jjjz9gbW0te59NmTIF27ZtQ8+ePTFkyBB4eHggIyMDp06dwpYtW5CcnCzdQ6akr/GzDhw4gMzMTOTl5eHevXv4+++/sW3bNlhaWmLr1q2wt7cvdNm1a9fi22+/Ra9evVCnTh08fPgQ33//PSwsLKRCoKx5FaZu3bpo3749xo4di6ysLCxevBjVq1eXnaobNmwYFi1aBJVKheHDh+P27dtYuXIlGjduDLVaLcVVxPfFa0cXl2JRxVaSy72FEOLKlSvCz89PmJqaChsbG/HRRx+JX375RQCQXSrdqVMn0bhxY431PH+ZoxBC/Pbbb8LNzU1UqlRJ43LWpUuXCmdnZ6FUKkXr1q3F33//LTw8PES3bt2kmILLRX/++WcRHBwsbG1thampqfDz8xNXr16VrauwvJydnYWfn59GOwARGBiobZcVadWqVcLDw0OYmpoKc3Nz0bRpUzF16lRx8+bNYpedMGGCqFu3bqH9Z86cEQCEUqkUDx480Oj/4YcfRL169YRSqRQNGzYUa9as0bhkVIjiL/cu8O233woXFxehVCpFy5YtRWxsrMbPRn5+vpgzZ470WrVo0UJs375d6+sdFxcnPDw8hLGxsewSV2055uTkiFmzZgkXFxdhZGQknJycRHBwsOxy64Jt0fb6FXa57bOuXLkiQkJCRJs2bYStra2oVKmSsLGxEX5+frJLZwtcvnxZDB48WNjb2wsjIyNRo0YN0bNnT7FlyxZZ3Pfffy9cXV2FoaGhbL/m5eUJBwcHMWPGjCLzep62y72vX78uevXqJapWrSosLS3Fe++9J27evKlx6XDBvr1z547GuLm5ueKzzz4T9vb2wtTUVHTp0kWcO3dOVK9eXYwZM0YW+/DhQxEcHCzq1q0rjI2NhbW1tWjbtq34+uuvRXZ2thRX2GusTcHPXcHDyMhI2NjYiI4dO4ovv/xS3L59W2OZ539fHTt2TPTv31/UqlVLKJVKYWtrK3r27CmOHj0qW66wvAICAoSZmZnW/Aq73HvBggVi4cKFwsnJSSiVStGhQwdx4sQJjeV/+ukn4erqKoyNjUXz5s3Frl27Xov3xetGIYSezRoinVq8eDEmT56M69evyy5/fBny8/NhY2ODd99994VOgVVkV65cQcOGDfHHH3+ga9euuk6HyllkZCQGDBiAy5cvS18RUNGkpaXBysoKs2fPxqeffqrrdIiKxTk2VGbP38U3MzMT3333HerVq1fuRU1mZqbGefN169bh/v37pfoSu9eNq6srhg8fjnnz5uk6FXoJvvrqK4wbN67CFDXPv6eB/5u/oc/vM9IvPGJDZda9e3fUqlULzZs3R3p6On766SecOXNG6xcIvqh9+/Zh8uTJeO+991C9enUcO3YMP/zwAxo1aoSEhIRX+iWdBd84XJQqVaqU6LJSoookPDwc4eHh6NGjB6pUqYK//voLP//8M3x9fbVOFCaqiDh5mMpMpVJh9erViIiIQF5eHtzc3LBhwwb07du33NdVu3ZtODk5YenSpbh//z6qVauGwYMHY968ea/8m8f//fdfjZubPW/mzJll/rI/Il1p1qwZKlWqhPnz50OtVksTimfPnq3r1IhKjEdsiEopMzMTf/31V5Exrq6uWu/IS0RELxcLGyIiItIbnDxMREREeoNzbF6h/Px83Lx5E+bm5uV+S28iIiJ9JoTAw4cP4ejoWOR3ebGweYVu3ryp8a3RREREVHL//vtvkd++zsLmFTI3Nwfw9EWxsLDQcTZERESvD7VaDScnJ+mztDAsbF6hgtNPFhYWLGyIiIjKoLipHDqdPDx37ly0atUK5ubmsLW1hb+/Py5cuCCLyczMRGBgIKpXr44qVaqgd+/eSE1NlcVcu3YNfn5+qFy5MmxtbTFlyhTZt0ADT2/w9sYbb0CpVKJu3boIDw/XyCcsLAy1a9eGiYkJPD09cfjw4VLnQkRERLqj08Jm//79CAwMxMGDBxEdHY2cnBz4+voiIyNDipk8eTJ+//13bN68Gfv378fNmzfx7rvvSv15eXnw8/NDdnY24uLisHbtWoSHhyMkJESKSUpKgp+fH7y9vZGYmIhJkyZhxIgRsjtpbty4EUFBQZg5cyaOHTsGd3d3qFQq3L59u8S50H9LdnY26tati7i4OF2n8tKsXLkSb731lq7TICIqOV19+6Y2t2/fFgDE/v37hRBCpKWlCSMjI7F582Yp5ty5cwKAiI+PF0IIsXPnTmFgYCBSUlKkmBUrVggLCwuRlZUlhBBi6tSpGt/i3LdvX6FSqaTnrVu3ln1zc15ennB0dBRz584tcS7Py8zMFOnp6dLj33//FQBEenp6mfbP62L//v2iZ8+ewsHBQQAQW7du1YhJSUkRAQEBwsHBQZiamgqVSiX++ecfrePl5+eLbt26aYxV8K2+2h6pqalCCCEOHDgg2rZtK6pVqyZMTExEgwYNxKJFi2TjOzs7ax3jww8/LHI7lyxZInx8fGRtCQkJwsfHR1haWopq1aqJkSNHiocPH5YqZ23eeust6ZuD7e3txaBBg8SNGzek/oJv/X3+UblyZSnmzz//FPXq1RPm5uZi0KBB0vtDiKc/3/Xq1RPJycmy9WZlZQlHR0cRGxtb5L4gInrZ0tPTS/QZWqHuY5Oeng4AqFatGgAgISEBOTk58PHxkWIaNmyIWrVqIT4+HgAQHx+Ppk2bws7OTopRqVRQq9U4c+aMFPPsGAUxBWNkZ2cjISFBFmNgYAAfHx8ppiS5PG/u3LmwtLSUHv+VK6IyMjLg7u6OsLAwrf1CCPj7++PKlSv47bffcPz4cTg7O8PHx0d2tK7A4sWLtZ5T7du3L27duiV7qFQqdOrUCba2tgAAMzMzjBs3DrGxsTh37hxmzJiBGTNmYNWqVdI4R44ckY0RHR0NAHjvvfcK3UYhBJYvX47hw4dLbTdv3oSPjw/q1q2LQ4cOISoqCmfOnMGQIUNKlbM23t7e2LRpEy5cuIBffvkFly9fRp8+faT+jz/+WGNcNzc3aRvy8/MxYMAAjBkzBvHx8Th69KhsH0yfPh1jxoyBs7OzbL3GxsYYMGAAli5dWmhuREQVyisps0ogLy9P+Pn5iXbt2kltERERwtjYWCO2VatWYurUqUIIIUaOHCl8fX1l/RkZGQKA2LlzpxBCiHr16ok5c+bIYnbs2CEAiMePH4sbN24IACIuLk4WM2XKFNG6desS5/K8/+oRm2dByxGbCxcuCADi9OnTUlteXp6wsbER33//vSz2+PHjokaNGuLWrVuFHv0pcPv2bWFkZCTWrVtXZE69evUSgwYNKrR/4sSJok6dOiI/P7/QmCNHjggDAwOhVqultu+++07Y2tqKvLw8qe3kyZMCgLh48eIL5fy83377TSgUCpGdna21PzExUQCQjrSkpqYKAOLJkydCiKdHMQuOSP3999/Cw8ND5Obmah1r//79wtjYWDx+/LhUORIRlafX7ohNYGAgTp8+jQ0bNug6lXKjVCqlK6B4JdT/ycrKAgCYmJhIbQYGBlAqlbLvYHr8+DEGDBiAsLAw2NvbFzvuunXrULlyZdmRjOcdP34ccXFx6NSpk9b+7Oxs/PTTTxg2bFiRM+8PHDiA+vXryy47zMrKgrGxsezGUaampgBQ6HdLlSTn592/fx8RERFo27YtjIyMtMasXr0a9evXR4cOHQAANjY2cHBwwJ9//onHjx/jwIEDaNasGXJycjB27Fh89913MDQ01DpWy5YtkZubi0OHDpU4RyIiXakQhc24ceOwfft27N27V3bTHXt7e2RnZyMtLU0Wn5qaKn3Q2dvba1yZVPC8uBgLCwuYmprC2toahoaGWmOeHaO4XKhkCk7hBQcH48GDB8jOzsZXX32F69ev49atW1Lc5MmT0bZtW7zzzjslGveHH37AgAEDpGLiWTVr1oRSqUTLli0RGBiIESNGaB0jMjISaWlpstNH2ly9ehWOjo6yti5duiAlJQULFixAdnY2Hjx4gOnTpwOAbLtKmvPzpk2bBjMzM1SvXh3Xrl3Db7/9pjUuMzMTERERstNkCoUCmzZtwhdffIHGjRujRYsWGDZsGObNmwdvb2+YmJigXbt2aNCgAZYvXy4br3LlyrC0tMTVq1eLzZGISOde0REkrfLz80VgYKBwdHTUOnG0YMLuli1bpLbz589rnTz87MTL7777TlhYWIjMzEwhxNPD7k2aNJGN3b9/f43Jw+PGjZOe5+XliRo1amhMHi4ql+KU9DCaPkEhp4+OHj0q3N3dBQBhaGgoVCqV6N69u+jWrZsQ4umplrp168om3hY2lhBCxMXFCQDi6NGjWvuvXLkiTp48KVatWiWqVasm1q9frzXO19dX9OzZs9jtGjVqlOjRo4dGe0REhLCzsxOGhobC2NhYfPzxx8LOzk7Mmzev1Dk/786dO+LChQvizz//FO3atRM9evTQerps/fr1olKlSrIJ9dpcuHBB2sctWrQQ4eHhIjU1VdjY2IgTJ07IYh0dHcW3335bojyJiF6Gkn6G6rSwGTt2rLC0tBT79u0Tt27dkh7PnssfM2aMqFWrltizZ484evSo8PLyEl5eXlJ/bm6uaNKkifD19RWJiYkiKipK2NjYiODgYCnmypUronLlymLKlCni3LlzIiwsTBgaGoqoqCgpZsOGDUKpVIrw8HBx9uxZMWrUKFG1alXZh0NxuRSHhY2mtLQ0cfv2bSHE0+KyYN7HxIkThUKhEIaGhtIDgDAwMBCdOnXSGGfYsGGiefPmJcrpiy++EPXr19doT05OFgYGBiIyMrLYMT755JMiX/uUlBTx8OFD8ejRI2FgYCA2bdr0Qjk/r2C+1vPzwoQQokuXLsLf37/YMTp37ix+++036ecyIyNDCCFEnz59xNKlS2WxJiYmsisCiYhetdeisEEhl72uWbNGinny5In48MMPhZWVlahcubLo1auXuHXrlmyc5ORk0b17d2Fqaiqsra3FRx99JHJycmQxe/fuFc2bNxfGxsbC1dVVto4Cy5YtE7Vq1RLGxsaidevW4uDBg7L+kuRSFBY2hfvnn3+EgYGB2LVrlxBCiFu3bolTp07JHgDEkiVLxJUrV2TLPnz4UFSpUkUsW7asRDnNmjVLODs7a7TPnDlT2Nvba/zsaLN582ZhZWVV5ARjIYT44YcfROXKlcWDBw9eKOfnXb16VQAQe/fulbVfuXJFKBQK8fvvvxe5/OrVq8W7774rhBDi/v37AoBIS0sTQgjxzjvviG+++UaKvXTpkgAgLl26VKZciYjKw2tR2PzX/FcKm4cPH4rjx4+L48ePCwBi0aJF4vjx4+Lq1atSzKZNm8TevXvF5cuXRWRkpHB2dpY+aAtTWJG0evVqYWJiolE8CCHE8uXLxbZt28Q///wj/vnnH7F69Wphbm4uPv30U1lcXl6eqFWrlpg2bVqJtvHu3bvCyMhInDp1Sta+bNkykZCQIC5cuCCWL18uTE1NxZIlS0qV86FDh0SDBg3E9evXhRBCHDx4UCxbtkwcP35cJCcni5iYGNG2bVtRp04d6XRrgRkzZghHR8dCr3AS4ukVUrVr15bdB6dRo0YiNDRUxMXFiSpVqojDhw9LfWvWrBGurq4l2i9ERC9LST9DFUII8fJn8hDw9Au8LC0tkZ6e/lKukBr/475yH7Msrp9PROSCyRrtDduq4DP86WTaE7t/wfGojXisfgAzy+po0NYXrd76AIaVtF/lAwDLh3ujR+AXcH2jvax9y5xxsLC2h++oGRrLnIj5FWf2/Q713RQYGBrC0sYRbh390KTTW1A8c/XStdNHsO2bqRj45TpY2ZfsfkNRK2fBwsYRbXuPlNqiV89B8slDyMl6Ait7J7RQ9UXDtr4ayxaVc8H+G/zVz7Cwtsfd61dw4OfluPvvZeRmPUHlqtXh3KQ1WvYchCpWNtJyIj8fa6f2Q4O2vvB6V/vkaADY9d0XcKjbBM269pLaUq+cw+4f5+GxOg3uPu+i9dsBUt9vi6agZsMW8OgxoET75WVbNqyzrlMgIh0o6WcoC5tX6L9S2PxX3P33Mn5bNAUfzI2AsUnxVzW9ju7dSELk1x9h0JfroKxcRdfpAGBhQ/RfVdLP0ApxuTfR68jaqQ7a9h4F9V3tl3Lrg8fp9+EzfHqFKWqIiIpTSdcJEL3OGrXvpusUXionNw9dp0BEVCo8YkNERER6g4UNERER6Q0WNkRERKQ3WNgQERGR3mBhQ0RERHqDhQ0RERHpDRY2REREpDdY2BAREZHeYGFDREREeoOFDREREekNFjZERESkN1jYEBERkd5gYUNERER6g4UNERER6Q0WNkRERKQ3WNgQERGR3mBhQ0RERHqDhQ0RERHpDRY2REREpDdY2BAREZHeYGFDREREeoOFDREREekNnRY2sbGxeOutt+Do6AiFQoHIyEhZv0Kh0PpYsGCBFFO7dm2N/nnz5snGOXnyJDp06AATExM4OTlh/vz5Grls3rwZDRs2hImJCZo2bYqdO3fK+oUQCAkJgYODA0xNTeHj44OLFy+W384gIiKiF6bTwiYjIwPu7u4ICwvT2n/r1i3Z48cff4RCoUDv3r1lcZ9//rksbvz48VKfWq2Gr68vnJ2dkZCQgAULFiA0NBSrVq2SYuLi4tC/f38MHz4cx48fh7+/P/z9/XH69GkpZv78+Vi6dClWrlyJQ4cOwczMDCqVCpmZmeW8V4iIiKisKuly5d27d0f37t0L7be3t5c9/+233+Dt7Q1XV1dZu7m5uUZsgYiICGRnZ+PHH3+EsbExGjdujMTERCxatAijRo0CACxZsgTdunXDlClTAABffPEFoqOjsXz5cqxcuRJCCCxevBgzZszAO++8AwBYt24d7OzsEBkZiX79+pV5HxAREVH5eW3m2KSmpmLHjh0YPny4Rt+8efNQvXp1tGjRAgsWLEBubq7UFx8fj44dO8LY2FhqU6lUuHDhAh48eCDF+Pj4yMZUqVSIj48HACQlJSElJUUWY2lpCU9PTylGm6ysLKjVatmDiIiIXh6dHrEpjbVr18Lc3BzvvvuurH3ChAl44403UK1aNcTFxSE4OBi3bt3CokWLAAApKSlwcXGRLWNnZyf1WVlZISUlRWp7NiYlJUWKe3Y5bTHazJ07F7NmzSrD1hIREVFZvDaFzY8//oiBAwfCxMRE1h4UFCT9v1mzZjA2Nsbo0aMxd+5cKJXKV52mTHBwsCw/tVoNJycnHWZERESk316LU1EHDhzAhQsXMGLEiGJjPT09kZubi+TkZABP5+mkpqbKYgqeF8zLKSzm2f5nl9MWo41SqYSFhYXsQURERC/Pa1HY/PDDD/Dw8IC7u3uxsYmJiTAwMICtrS0AwMvLC7GxscjJyZFioqOj0aBBA1hZWUkxMTExsnGio6Ph5eUFAHBxcYG9vb0sRq1W49ChQ1IMERER6Z5OT0U9evQIly5dkp4nJSUhMTER1apVQ61atQA8LSA2b96MhQsXaiwfHx+PQ4cOwdvbG+bm5oiPj8fkyZMxaNAgqWgZMGAAZs2aheHDh2PatGk4ffo0lixZgm+++UYaZ+LEiejUqRMWLlwIPz8/bNiwAUePHpUuCVcoFJg0aRJmz56NevXqwcXFBZ999hkcHR3h7+//EvcQERERlYZOC5ujR4/C29tbel4wHyUgIADh4eEAgA0bNkAIgf79+2ssr1QqsWHDBoSGhiIrKwsuLi6YPHmybF6LpaUl/vzzTwQGBsLDwwPW1tYICQmRLvUGgLZt22L9+vWYMWMGPvnkE9SrVw+RkZFo0qSJFDN16lRkZGRg1KhRSEtLQ/v27REVFaUx54eIiIh0RyGEELpO4r9CrVbD0tIS6enpL2W+zfgf95X7mEQVzbJhnXWdAhHpQEk/Q1+LOTZEREREJcHChoiIiPQGCxsiIiLSGyxsiIiISG+wsCEiIiK9wcKGiIiI9AYLGyIiItIbLGyIiIhIb7CwISIiIr3BwoaIiIj0BgsbIiIi0hssbIiIiEhvsLAhIiIivcHChoiIiPQGCxsiIiLSGyxsiIiISG+wsCEiIiK9wcKGiIiI9AYLGyIiItIbLGyIiIhIb7CwISIiIr3BwoaIiIj0BgsbIiIi0hssbIiIiEhvsLAhIiIivcHChoiIiPQGCxsiIiLSGzotbGJjY/HWW2/B0dERCoUCkZGRsv4hQ4ZAoVDIHt26dZPF3L9/HwMHDoSFhQWqVq2K4cOH49GjR7KYkydPokOHDjAxMYGTkxPmz5+vkcvmzZvRsGFDmJiYoGnTpti5c6esXwiBkJAQODg4wNTUFD4+Prh48WL57AgiIiIqFzotbDIyMuDu7o6wsLBCY7p164Zbt25Jj59//lnWP3DgQJw5cwbR0dHYvn07YmNjMWrUKKlfrVbD19cXzs7OSEhIwIIFCxAaGopVq1ZJMXFxcejfvz+GDx+O48ePw9/fH/7+/jh9+rQUM3/+fCxduhQrV67EoUOHYGZmBpVKhczMzHLcI0RERPQiFEIIoeskAEChUGDr1q3w9/eX2oYMGYK0tDSNIzkFzp07Bzc3Nxw5cgQtW7YEAERFRaFHjx64fv06HB0dsWLFCnz66adISUmBsbExAGD69OmIjIzE+fPnAQB9+/ZFRkYGtm/fLo3dpk0bNG/eHCtXroQQAo6Ojvjoo4/w8ccfAwDS09NhZ2eH8PBw9OvXr0TbqFarYWlpifT0dFhYWJR2FxVr/I/7yn1Moopm2bDOuk6BiHSgpJ+hFX6Ozb59+2Bra4sGDRpg7NixuHfvntQXHx+PqlWrSkUNAPj4+MDAwACHDh2SYjp27CgVNQCgUqlw4cIFPHjwQIrx8fGRrVelUiE+Ph4AkJSUhJSUFFmMpaUlPD09pRhtsrKyoFarZQ8iIiJ6eSp0YdOtWzesW7cOMTEx+Oqrr7B//350794deXl5AICUlBTY2trKlqlUqRKqVauGlJQUKcbOzk4WU/C8uJhn+59dTluMNnPnzoWlpaX0cHJyKtX2ExERUelU0nUCRXn2FE/Tpk3RrFkz1KlTB/v27UPXrl11mFnJBAcHIygoSHquVqtZ3BAREb1EFfqIzfNcXV1hbW2NS5cuAQDs7e1x+/ZtWUxubi7u378Pe3t7KSY1NVUWU/C8uJhn+59dTluMNkqlEhYWFrIHERERvTyvVWFz/fp13Lt3Dw4ODgAALy8vpKWlISEhQYrZs2cP8vPz4enpKcXExsYiJydHiomOjkaDBg1gZWUlxcTExMjWFR0dDS8vLwCAi4sL7O3tZTFqtRqHDh2SYoiIiEj3dFrYPHr0CImJiUhMTATwdJJuYmIirl27hkePHmHKlCk4ePAgkpOTERMTg3feeQd169aFSqUCADRq1AjdunXDyJEjcfjwYfz9998YN24c+vXrB0dHRwDAgAEDYGxsjOHDh+PMmTPYuHEjlixZIjtFNHHiRERFRWHhwoU4f/48QkNDcfToUYwbNw7A0yu2Jk2ahNmzZ2Pbtm04deoUBg8eDEdHR9lVXERERKRbOp1jc/ToUXh7e0vPC4qNgIAArFixAidPnsTatWuRlpYGR0dH+Pr64osvvoBSqZSWiYiIwLhx49C1a1cYGBigd+/eWLp0qdRvaWmJP//8E4GBgfDw8IC1tTVCQkJk97pp27Yt1q9fjxkzZuCTTz5BvXr1EBkZiSZNmkgxU6dORUZGBkaNGoW0tDS0b98eUVFRMDExeZm7iIiIiEqhwtzH5r+A97EhenG8jw3Rf5Pe3MeGiIiIqKRY2BAREZHeYGFDREREeoOFDREREekNFjZERESkN1jYEBERkd5gYUNERER6g4UNERER6Q0WNkRERKQ3WNgQERGR3mBhQ0RERHqDhQ0RERHpDRY2REREpDdY2BAREZHeYGFDREREeoOFDREREekNFjZERESkN1jYEBERkd5gYUNERER6g4UNERER6Q0WNkRERKQ3WNgQERGR3mBhQ0RERHqDhQ0RERHpDRY2REREpDdY2BAREZHeYGFDREREekOnhU1sbCzeeustODo6QqFQIDIyUurLycnBtGnT0LRpU5iZmcHR0RGDBw/GzZs3ZWPUrl0bCoVC9pg3b54s5uTJk+jQoQNMTEzg5OSE+fPna+SyefNmNGzYECYmJmjatCl27twp6xdCICQkBA4ODjA1NYWPjw8uXrxYfjuDiIiIXphOC5uMjAy4u7sjLCxMo+/x48c4duwYPvvsMxw7dgy//vorLly4gLffflsj9vPPP8etW7ekx/jx46U+tVoNX19fODs7IyEhAQsWLEBoaChWrVolxcTFxaF///4YPnw4jh8/Dn9/f/j7++P06dNSzPz587F06VKsXLkShw4dgpmZGVQqFTIzM8t5rxAREVFZKYQQQtdJAIBCocDWrVvh7+9faMyRI0fQunVrXL16FbVq1QLw9IjNpEmTMGnSJK3LrFixAp9++ilSUlJgbGwMAJg+fToiIyNx/vx5AEDfvn2RkZGB7du3S8u1adMGzZs3x8qVKyGEgKOjIz766CN8/PHHAID09HTY2dkhPDwc/fr107rurKwsZGVlSc/VajWcnJyQnp4OCwuLEu+bkhr/475yH5Ooolk2rLOuUyAiHVCr1bC0tCz2M/S1mmOTnp4OhUKBqlWrytrnzZuH6tWro0WLFliwYAFyc3Olvvj4eHTs2FEqagBApVLhwoULePDggRTj4+MjG1OlUiE+Ph4AkJSUhJSUFFmMpaUlPD09pRht5s6dC0tLS+nh5ORU5m0nIiKi4r02hU1mZiamTZuG/v37yyq1CRMmYMOGDdi7dy9Gjx6NOXPmYOrUqVJ/SkoK7OzsZGMVPE9JSSky5tn+Z5fTFqNNcHAw0tPTpce///5b2s0mIiKiUqik6wRKIicnB++//z6EEFixYoWsLygoSPp/s2bNYGxsjNGjR2Pu3LlQKpWvOlUZpVKp8xyIiIj+Syr8EZuCoubq1auIjo4udm6Kp6cncnNzkZycDACwt7dHamqqLKbgub29fZExz/Y/u5y2GCIiItK9Cl3YFBQ1Fy9exO7du1G9evVil0lMTISBgQFsbW0BAF5eXoiNjUVOTo4UEx0djQYNGsDKykqKiYmJkY0THR0NLy8vAICLiwvs7e1lMWq1GocOHZJiiIiISPd0eirq0aNHuHTpkvQ8KSkJiYmJqFatGhwcHNCnTx8cO3YM27dvR15enjSfpVq1ajA2NkZ8fDwOHToEb29vmJubIz4+HpMnT8agQYOkomXAgAGYNWsWhg8fjmnTpuH06dNYsmQJvvnmG2m9EydORKdOnbBw4UL4+flhw4YNOHr0qHRJuEKhwKRJkzB79mzUq1cPLi4u+Oyzz+Do6FjkVVxERET0aum0sDl69Ci8vb2l5wXzZQICAhAaGopt27YBAJo3by5bbu/evejcuTOUSiU2bNiA0NBQZGVlwcXFBZMnT5bNu7G0tMSff/6JwMBAeHh4wNraGiEhIRg1apQU07ZtW6xfvx4zZszAJ598gnr16iEyMhJNmjSRYqZOnYqMjAyMGjUKaWlpaN++PaKiomBiYvIydg0RERGVQYW5j81/QUmvwS8r3seG/gt4Hxui/ya9vI8NERERUVFY2BAREZHeYGFDREREeoOFDREREekNFjZERESkN1jYEBERkd5gYUNERER6g4UNERER6Q0WNkRERKQ3ylTYuLq64t69exrtaWlpcHV1feGkiIiIiMqiTIVNcnIy8vLyNNqzsrJw48aNF06KiIiIqCxK9SWYBV9KCQC7du2CpaWl9DwvLw8xMTGoXbt2uSVHREREVBqlKmz8/f0BAAqFAgEBAbI+IyMj1K5dGwsXLiy35IiIiIhKo1SFTX5+PgDAxcUFR44cgbW19UtJioiIiKgsSlXYFEhKSirvPIiIiIheWJkKGwCIiYlBTEwMbt++LR3JKfDjjz++cGJEREREpVWmwmbWrFn4/PPP0bJlSzg4OEChUJR3XkRERESlVqbCZuXKlQgPD8cHH3xQ3vkQERERlVmZ7mOTnZ2Ntm3blncuRERERC+kTIXNiBEjsH79+vLOhYiIiOiFlOlUVGZmJlatWoXdu3ejWbNmMDIykvUvWrSoXJIjIiIiKo0yFTYnT55E8+bNAQCnT5+W9XEiMREREelKmQqbvXv3lnceRERERC+sTHNsiIiIiCqiMh2x8fb2LvKU0549e8qcEBEREVFZlamwKZhfUyAnJweJiYk4ffq0xpdjEhEREb0qZToV9c0338gey5cvx19//YVJkyZpXCFVlNjYWLz11ltwdHSEQqFAZGSkrF8IgZCQEDg4OMDU1BQ+Pj64ePGiLOb+/fsYOHAgLCwsULVqVQwfPhyPHj2SxZw8eRIdOnSAiYkJnJycMH/+fI1cNm/ejIYNG8LExARNmzbFzp07S50LERER6Va5zrEZNGhQqb4nKiMjA+7u7ggLC9PaP3/+fCxduhQrV67EoUOHYGZmBpVKhczMTClm4MCBOHPmDKKjo7F9+3bExsZi1KhRUr9arYavry+cnZ2RkJCABQsWIDQ0FKtWrZJi4uLi0L9/fwwfPhzHjx+Hv78//P39ZVd8lSQXIiIi0i2FEEKU12D/+9//MG3aNNy8ebP0iSgU2Lp1K/z9/QE8PULi6OiIjz76CB9//DEAID09HXZ2dggPD0e/fv1w7tw5uLm54ciRI2jZsiUAICoqCj169MD169fh6OiIFStW4NNPP0VKSgqMjY0BANOnT0dkZCTOnz8PAOjbty8yMjKwfft2KZ82bdqgefPmWLlyZYlyKQm1Wg1LS0ukp6fDwsKi1PuoOON/3FfuYxJVNMuGddZ1CkSkAyX9DC3TEZt3331X9ujVqxfatGmDoUOHYvTo0WVO+llJSUlISUmBj4+P1GZpaQlPT0/Ex8cDAOLj41G1alWpqAEAHx8fGBgY4NChQ1JMx44dpaIGAFQqFS5cuIAHDx5IMc+upyCmYD0lyUWbrKwsqNVq2YOIiIhenjJNHra0tJQ9NzAwQIMGDfD555/D19e3XBJLSUkBANjZ2cna7ezspL6UlBTY2trK+itVqoRq1arJYlxcXDTGKOizsrJCSkpKsespLhdt5s6di1mzZhW/sURERFQuylTYrFmzprzz0EvBwcEICgqSnqvVajg5OekwIyIiIv1WpsKmQEJCAs6dOwcAaNy4MVq0aFEuSQGAvb09ACA1NRUODg5Se2pqqnS5ub29PW7fvi1bLjc3F/fv35eWt7e3R2pqqiym4HlxMc/2F5eLNkqlEkqlskTbS0RERC+uTHNsbt++jS5duqBVq1aYMGECJkyYAA8PD3Tt2hV37twpl8RcXFxgb2+PmJgYqU2tVuPQoUPw8vICAHh5eSEtLQ0JCQlSzJ49e5Cfnw9PT08pJjY2Fjk5OVJMdHQ0GjRoACsrKynm2fUUxBSspyS5EBERke6VqbAZP348Hj58iDNnzuD+/fu4f/8+Tp8+DbVajQkTJpR4nEePHiExMRGJiYkAnk7STUxMxLVr16BQKDBp0iTMnj0b27Ztw6lTpzB48GA4OjpKV041atQI3bp1w8iRI3H48GH8/fffGDduHPr16wdHR0cAwIABA2BsbIzhw4fjzJkz2LhxI5YsWSI7RTRx4kRERUVh4cKFOH/+PEJDQ3H06FGMGzcOAEqUCxEREelemU5FRUVFYffu3WjUqJHU5ubmhrCwsFJNHj569Ci8vb2l5wXFRkBAAMLDwzF16lRkZGRg1KhRSEtLQ/v27REVFQUTExNpmYiICIwbNw5du3aFgYEBevfujaVLl0r9lpaW+PPPPxEYGAgPDw9YW1sjJCREdq+btm3bYv369ZgxYwY++eQT1KtXD5GRkWjSpIkUU5JciIiISLfKdB8bc3NzHDhwQGN+yfHjx9GpUyde1lwI3seG6MXxPjZE/00v9T42Xbp0wcSJE2U34rtx4wYmT56Mrl27lmVIIiIiohdWpsJm+fLlUKvVqF27NurUqYM6derAxcUFarUay5YtK+8ciYiIiEqkTHNsnJyccOzYMezevVv6WoJGjRpp3L2XiIiI6FUq1RGbPXv2wM3NDWq1GgqFAm+++SbGjx+P8ePHo1WrVmjcuDEOHDjwsnIlIiIiKlKpCpvFixdj5MiRWiftWFpaYvTo0Vi0aFG5JUdERERUGqUqbE6cOIFu3boV2u/r6yu7WR4RERHRq1SqwiY1NRVGRkaF9leqVKnc7jxMREREVFqlKmxq1KiB06dPF9p/8uRJ2XcpEREREb1KpSpsevTogc8++wyZmZkafU+ePMHMmTPRs2fPckuOiIiIqDRKdbn3jBkz8Ouvv6J+/foYN24cGjRoAAA4f/48wsLCkJeXh08//fSlJEpERERUnFIVNnZ2doiLi8PYsWMRHByMgm9jUCgUUKlUCAsLg52d3UtJlIiIiKg4pb5Bn7OzM3bu3IkHDx7g0qVLEEKgXr16sLKyehn5EREREZVYme48DABWVlZo1apVeeZCRERE9ELK9F1RRERERBURCxsiIiLSGyxsiIiISG+wsCEiIiK9wcKGiIiI9AYLGyIiItIbLGyIiIhIb7CwISIiIr3BwoaIiIj0BgsbIiIi0hssbIiIiEhvsLAhIiIivcHChoiIiPQGCxsiIiLSGxW+sKlduzYUCoXGIzAwEADQuXNnjb4xY8bIxrh27Rr8/PxQuXJl2NraYsqUKcjNzZXF7Nu3D2+88QaUSiXq1q2L8PBwjVzCwsJQu3ZtmJiYwNPTE4cPH35p201ERESlV+ELmyNHjuDWrVvSIzo6GgDw3nvvSTEjR46UxcyfP1/qy8vLg5+fH7KzsxEXF4e1a9ciPDwcISEhUkxSUhL8/Pzg7e2NxMRETJo0CSNGjMCuXbukmI0bNyIoKAgzZ87EsWPH4O7uDpVKhdu3b7+CvUBEREQloRBCCF0nURqTJk3C9u3bcfHiRSgUCnTu3BnNmzfH4sWLtcb/8ccf6NmzJ27evAk7OzsAwMqVKzFt2jTcuXMHxsbGmDZtGnbs2IHTp09Ly/Xr1w9paWmIiooCAHh6eqJVq1ZYvnw5ACA/Px9OTk4YP348pk+fXqLc1Wo1LC0tkZ6eDgsLixfYC9qN/3FfuY9JVNEsG9ZZ1ykQkQ6U9DO0wh+xeVZ2djZ++uknDBs2DAqFQmqPiIiAtbU1mjRpguDgYDx+/Fjqi4+PR9OmTaWiBgBUKhXUajXOnDkjxfj4+MjWpVKpEB8fL603ISFBFmNgYAAfHx8pRpusrCyo1WrZg4iIiF6eSrpOoDQiIyORlpaGIUOGSG0DBgyAs7MzHB0dcfLkSUybNg0XLlzAr7/+CgBISUmRFTUApOcpKSlFxqjVajx58gQPHjxAXl6e1pjz588Xmu/cuXMxa9asMm8vERERlc5rVdj88MMP6N69OxwdHaW2UaNGSf9v2rQpHBwc0LVrV1y+fBl16tTRRZqS4OBgBAUFSc/VajWcnJx0mBEREZF+e20Km6tXr2L37t3SkZjCeHp6AgAuXbqEOnXqwN7eXuPqpdTUVACAvb299G9B27MxFhYWMDU1haGhIQwNDbXGFIyhjVKphFKpLNkGEhER0Qt7bebYrFmzBra2tvDz8ysyLjExEQDg4OAAAPDy8sKpU6dkVy9FR0fDwsICbm5uUkxMTIxsnOjoaHh5eQEAjI2N4eHhIYvJz89HTEyMFENERES691oUNvn5+VizZg0CAgJQqdL/HWS6fPkyvvjiCyQkJCA5ORnbtm3D4MGD0bFjRzRr1gwA4OvrCzc3N3zwwQc4ceIEdu3ahRkzZiAwMFA6mjJmzBhcuXIFU6dOxfnz5/Htt99i06ZNmDx5srSuoKAgfP/991i7di3OnTuHsWPHIiMjA0OHDn21O4OIiIgK9Vqcitq9ezeuXbuGYcOGydqNjY2xe/duLF68GBkZGXByckLv3r0xY8YMKcbQ0BDbt2/H2LFj4eXlBTMzMwQEBODzzz+XYlxcXLBjxw5MnjwZS5YsQc2aNbF69WqoVCoppm/fvrhz5w5CQkKQkpKC5s2bIyoqSmNCMREREenOa3cfm9cZ72ND9OJ4Hxui/ya9vI8NERERUVFY2BAREZHeYGFDREREeoOFDREREekNFjZERESkN1jYEBERkd5gYUNERER6g4UNERER6Q0WNkRERKQ3WNgQERGR3mBhQ0RERHqDhQ0RERHpDRY2REREpDdY2BAREZHeYGFDREREeoOFDREREekNFjZERESkN1jYEBERkd5gYUNERER6g4UNERER6Q0WNkRERKQ3WNgQERGR3mBhQ0RERHqDhQ0RERHpDRY2REREpDdY2BAREZHeYGFDREREeqNCFzahoaFQKBSyR8OGDaX+zMxMBAYGonr16qhSpQp69+6N1NRU2RjXrl2Dn58fKleuDFtbW0yZMgW5ubmymH379uGNN96AUqlE3bp1ER4erpFLWFgYateuDRMTE3h6euLw4cMvZZuJiIio7Cp0YQMAjRs3xq1bt6THX3/9JfVNnjwZv//+OzZv3oz9+/fj5s2bePfdd6X+vLw8+Pn5ITs7G3FxcVi7di3Cw8MREhIixSQlJcHPzw/e3t5ITEzEpEmTMGLECOzatUuK2bhxI4KCgjBz5kwcO3YM7u7uUKlUuH379qvZCURERFQiCiGE0HUShQkNDUVkZCQSExM1+tLT02FjY4P169ejT58+AIDz58+jUaNGiI+PR5s2bfDHH3+gZ8+euHnzJuzs7AAAK1euxLRp03Dnzh0YGxtj2rRp2LFjB06fPi2N3a9fP6SlpSEqKgoA4OnpiVatWmH58uUAgPz8fDg5OWH8+PGYPn16oflnZWUhKytLeq5Wq+Hk5IT09HRYWFi88P553vgf95X7mEQVzbJhnXWdAhHpgFqthqWlZbGfoRX+iM3Fixfh6OgIV1dXDBw4ENeuXQMAJCQkICcnBz4+PlJsw4YNUatWLcTHxwMA4uPj0bRpU6moAQCVSgW1Wo0zZ85IMc+OURBTMEZ2djYSEhJkMQYGBvDx8ZFiCjN37lxYWlpKDycnpxfYE0RERFScCl3YeHp6Ijw8HFFRUVixYgWSkpLQoUMHPHz4ECkpKTA2NkbVqlVly9jZ2SElJQUAkJKSIitqCvoL+oqKUavVePLkCe7evYu8vDytMQVjFCY4OBjp6enS499//y31PiAiIqKSq6TrBIrSvXt36f/NmjWDp6cnnJ2dsWnTJpiamuows5JRKpVQKpW6ToOIiOg/o0IfsXle1apVUb9+fVy6dAn29vbIzs5GWlqaLCY1NRX29vYAAHt7e42rpAqeFxdjYWEBU1NTWFtbw9DQUGtMwRhERERUMbxWhc2jR49w+fJlODg4wMPDA0ZGRoiJiZH6L1y4gGvXrsHLywsA4OXlhVOnTsmuXoqOjoaFhQXc3NykmGfHKIgpGMPY2BgeHh6ymPz8fMTExEgxREREVDFU6MLm448/xv79+5GcnIy4uDj06tULhoaG6N+/PywtLTF8+HAEBQVh7969SEhIwNChQ+Hl5YU2bdoAAHx9feHm5oYPPvgAJ06cwK5duzBjxgwEBgZKp4jGjBmDK1euYOrUqTh//jy+/fZbbNq0CZMnT5byCAoKwvfff4+1a9fi3LlzGDt2LDIyMjB06FCd7BciIiLSrkLPsbl+/Tr69++Pe/fuwcbGBu3bt8fBgwdhY2MDAPjmm29gYGCA3r17IysrCyqVCt9++620vKGhIbZv346xY8fCy8sLZmZmCAgIwOeffy7FuLi4YMeOHZg8eTKWLFmCmjVrYvXq1VCpVFJM3759cefOHYSEhCAlJQXNmzdHVFSUxoRiIiIi0q0KfR8bfVPSa/DLivexof8C3seG6L9Jb+5jQ0RERFRSLGyIiIhIb7CwISIiIr3BwoaIiIj0BgsbIiIi0hssbIiIiEhvsLAhIiIivcHChoiIiPQGCxsiIiLSGyxsiIiISG+wsCEiIiK9wcKGiIiI9AYLGyIiItIbLGyIiIhIb7CwISIiIr3BwoaIiIj0BgsbIiIi0hssbIiIiEhvsLAhIiIivcHChoiIiPQGCxsiIiLSGyxsiIiISG+wsCEiIiK9wcKGiIiI9AYLGyIiItIbLGyIiIhIb1Towmbu3Llo1aoVzM3NYWtrC39/f1y4cEEW07lzZygUCtljzJgxsphr167Bz88PlStXhq2tLaZMmYLc3FxZzL59+/DGG29AqVSibt26CA8P18gnLCwMtWvXhomJCTw9PXH48OFy32YiIiIquwpd2Ozfvx+BgYE4ePAgoqOjkZOTA19fX2RkZMjiRo4ciVu3bkmP+fPnS315eXnw8/NDdnY24uLisHbtWoSHhyMkJESKSUpKgp+fH7y9vZGYmIhJkyZhxIgR2LVrlxSzceNGBAUFYebMmTh27Bjc3d2hUqlw+/btl78jiIiIqEQUQgih6yRK6s6dO7C1tcX+/fvRsWNHAE+P2DRv3hyLFy/Wuswff/yBnj174ubNm7CzswMArFy5EtOmTcOdO3dgbGyMadOmYceOHTh9+rS0XL9+/ZCWloaoqCgAgKenJ1q1aoXly5cDAPLz8+Hk5ITx48dj+vTpJcpfrVbD0tIS6enpsLCwKOtuKNT4H/eV+5hEFc2yYZ11nQIR6UBJP0Mr9BGb56WnpwMAqlWrJmuPiIiAtbU1mjRpguDgYDx+/Fjqi4+PR9OmTaWiBgBUKhXUajXOnDkjxfj4+MjGVKlUiI+PBwBkZ2cjISFBFmNgYAAfHx8pRpusrCyo1WrZg4iIiF6eSrpOoKTy8/MxadIktGvXDk2aNJHaBwwYAGdnZzg6OuLkyZOYNm0aLly4gF9//RUAkJKSIitqAEjPU1JSioxRq9V48uQJHjx4gLy8PK0x58+fLzTnuXPnYtasWWXfaCIiIiqV16awCQwMxOnTp/HXX3/J2keNGiX9v2nTpnBwcEDXrl1x+fJl1KlT51WnKRMcHIygoCDpuVqthpOTkw4zIiIi0m+vRWEzbtw4bN++HbGxsahZs2aRsZ6engCAS5cuoU6dOrC3t9e4eik1NRUAYG9vL/1b0PZsjIWFBUxNTWFoaAhDQ0OtMQVjaKNUKqFUKku2kURERPTCKvQcGyEExo0bh61bt2LPnj1wcXEpdpnExEQAgIODAwDAy8sLp06dkl29FB0dDQsLC7i5uUkxMTExsnGio6Ph5eUFADA2NoaHh4csJj8/HzExMVIMERER6V6FPmITGBiI9evX47fffoO5ubk0J8bS0hKmpqa4fPky1q9fjx49eqB69eo4efIkJk+ejI4dO6JZs2YAAF9fX7i5ueGDDz7A/PnzkZKSghkzZiAwMFA6mjJmzBgsX74cU6dOxbBhw7Bnzx5s2rQJO3bskHIJCgpCQEAAWrZsidatW2Px4sXIyMjA0KFDX/2OISIiIq0qdGGzYsUKAE8v6X7WmjVrMGTIEBgbG2P37t1SkeHk5ITevXtjxowZUqyhoSG2b9+OsWPHwsvLC2ZmZggICMDnn38uxbi4uGDHjh2YPHkylixZgpo1a2L16tVQqVRSTN++fXHnzh2EhIQgJSUFzZs3R1RUlMaEYiIiItKd1+o+Nq873seG6MXxPjZE/016eR8bIiIioqKwsCEiIiK9wcKGiIiI9AYLGyIiItIbLGyIiIhIb7CwISIiIr3BwoaIiIj0BgsbIiIi0hssbIiIiEhvsLAhIiIivcHChoiIiPQGCxsiIiLSGyxsiIiISG+wsCEiIr3w2WefYdSoUbpOo8zatGmDX375RddpvPZY2BARUbFCQ0OhUChkj4YNG0r99+/fx/jx49GgQQOYmpqiVq1amDBhAtLT0zXGCg8PR7NmzWBiYgJbW1sEBgZKfRcuXIC3tzfs7OxgYmICV1dXzJgxAzk5OUXml5KSgiVLluDTTz+V2ubOnYtWrVrB3Nwctra28Pf3x4ULF4rd1rS0NAQGBsLBwQFKpRL169fHzp07pf4VK1agWbNmsLCwgIWFBby8vPDHH3/IxggKCkK1atXg5OSEiIgIWd/mzZvx1ltvaax3xowZmD59OvLz84vNkQpXSdcJEBHR66Fx48bYvXu39LxSpf/7CLl58yZu3ryJr7/+Gm5ubrh69SrGjBmDmzdvYsuWLVLcokWLsHDhQixYsACenp7IyMhAcnKy1G9kZITBgwfjjTfeQNWqVXHixAmMHDkS+fn5mDNnTqG5rV69Gm3btoWzs7PUtn//fgQGBqJVq1bIzc3FJ598Al9fX5w9exZmZmZax8nOzsabb74JW1tbbNmyBTVq1MDVq1dRtWpVKaZmzZqYN28e6tWrByEE1q5di3feeQfHjx9H48aN8fvvv2P9+vX4888/cfHiRQwbNgwqlQrW1tZIT0/Hp59+KtuPBbp3744RI0bgjz/+gJ+fX5GvBRVOIYQQuk7iv0KtVsPS0hLp6emwsLAo9/HH/7iv3MckqmiWDeus6xT+k0JDQxEZGYnExMQSL7N582YMGjQIGRkZqFSpEh48eIAaNWrg999/R9euXUs8TlBQEI4cOYIDBw4UGtOkSROMHTtWdvTneXfu3IGtrS3279+Pjh07ao1ZuXIlFixYgPPnz8PIyKjEOVarVg0LFizA8OHDMX/+fBw7dgwbNmwAANjZ2WH79u1o1aoVRo8ejYYNG2Ly5Mlaxxk2bBhycnLwv//9r8Tr/q8o6WcoT0UREVGJXLx4EY6OjnB1dcXAgQNx7dq1IuMLPoAKjuxER0cjPz8fN27cQKNGjVCzZk28//77+Pfffwsd49KlS4iKikKnTp0Kjbl//z7Onj2Lli1bFpsP8LQIKcy2bdvg5eWFwMBA2NnZoUmTJpgzZw7y8vK0xufl5WHDhg3IyMiAl5cXAMDd3R1Hjx7FgwcPkJCQgCdPnqBu3br466+/cOzYMUyYMKHQ9bdu3brIAo6Kx8KGiIiK5enpifDwcERFRWHFihVISkpChw4d8PDhQ63xd+/exRdffCGbzHvlyhXplNLixYuxZcsW3L9/H2+++Says7Nly7dt2xYmJiaoV68eOnTogM8//7zQ3K5duwYhBBwdHQuNyc/Px6RJk9CuXTs0adKk0LgrV65gy5YtyMvLw86dO/HZZ59h4cKFmD17tizu1KlTqFKlCpRKJcaMGYOtW7fCzc0NAKBSqTBo0CC0atUKQ4YMwdq1a2FmZoaxY8di5cqVWLFiBRo0aIB27drhzJkzsnEdHR3x77//cp7NC+AcGyIiKlb37t2l/zdr1gyenp5wdnbGpk2bMHz4cFmsWq2Gn58f3NzcEBoaKrXn5+cjJycHS5cuha+vLwDg559/hr29Pfbu3QuVSiXFbty4EQ8fPsSJEycwZcoUfP3115g6darW3J48eQIAMDExKTT/wMBAnD59Gn/99VeR25mfnw9bW1usWrUKhoaG8PDwwI0bN7BgwQLMnDlTimvQoAESExORnp6OLVu2ICAgAPv375eKm9DQUNm2z5o1Cz4+PjAyMsLs2bNx6tQpbN++HYMHD0ZCQoIUZ2pqivz8fGRlZcHU1LTIXEk7FjZERFRqVatWRf369XHp0iVZ+8OHD9GtWzeYm5tj69atsnkqDg4OACB9+AOAjY0NrK2tNU5rOTk5SbF5eXkYNWoUPvroIxgaGmrkYm1tDQB48OABbGxsNPrHjRuH7du3IzY2FjVr1ixyuxwcHGBkZCRbT6NGjZCSkoLs7GwYGxsDAIyNjVG3bl0AgIeHB44cOYIlS5bgu+++0xjz/Pnz+Omnn3D8+HH8+OOP6NixI2xsbPD+++9j2LBhePjwIczNzQE8Pa1mZmbGouYF8FQUERGV2qNHj3D58mWpWAGeHqnx9fWFsbExtm3bpnEEpV27dgAgu+T6/v37uHv3ruxqpucVHOkp7PRMnTp1YGFhgbNnz8rahRAYN24ctm7dij179sDFxaXY7WrXrh0uXbokW9c///wDBwcHqagpLMesrCyNdiEERo8ejUWLFqFKlSrIy8uTLl0v+PfZ+TunT59GixYtis2TCsfChoiIivXxxx9j//79SE5ORlxcHHr16gVDQ0P0798fwP8VNRkZGfjhhx+gVquRkpKClJQU6YO7fv36eOeddzBx4kTExcXh9OnTCAgIQMOGDeHt7Q0AiIiIwKZNm3Du3DlcuXIFmzZtQnBwMPr27VvoVUoGBgbw8fHROM0UGBiIn376CevXr4e5ubmUT8GpKwAYPHgwgoODpedjx47F/fv3MXHiRPzzzz/YsWMH5syZI7vaKjg4GLGxsUhOTsapU6cQHByMffv2YeDAgRq5rV69GjY2NtJ9a9q1a4c9e/bg4MGD+Oabb+Dm5ia7lPzAgQPSaToqG17u/Qrxcm+iF/c6X+7d5cO5uk6hzM5E/4y0m8nIyXwMY1MzWDo4w7W1L0wtqwMAHty4gsRtq7Uu22bgFJhaWAEAcrMzcenvHbhz5QygUKCqowvqte8JkypVAQCpl07i2vFYPEm/CwhAaV4V9vWbo2azdjCsVPjl1/euXsCF/Vvh9cFUKBRP/2bfu+ITrbENvXvDoaEHAOD4b9/DxNwKjbr0kfrTU67h0t878OjeLRibWcChYUs4t+gIhcHTcc/v/QUPblxGVsZDVDI2QZXq9qjVoiOqOdWTrSf78UMk/LoCb/QaA6XZ//3OTzoag+sn42BsWgWNuvSBhd3T025Zj9IRH/E12gz8GCZVLAvd1opuz7fBxQeVQUk/Q1nYvEIsbIheHAsb0kYIgYRfV8CpWTvY1XPXdTplcjk+CjlZT9Cwcy9dp/JCdF3Y8FQUERG99hQKBRp08od4jS+TNjI1g2trH12n8drjVVFERKQXzK0dYW5d+L1sKrpazTvoOgW9wCM2pRQWFobatWvDxMQEnp6eOHz4sK5TIiIiov+PhU0pbNy4EUFBQZg5cyaOHTsGd3d3qFQq3L59W9epEREREVjYlMqiRYswcuRIDB06FG5ubli5ciUqV66MH3/8UdepERERETjHpsSys7ORkJAgu99Bwb0T4uPjtS6TlZUlu2FTwRewqdXql5Pjk4yXMi5RRfKy3j+vQm52pq5TIHrpXtZ7tGDc4i7mZmFTQnfv3kVeXh7s7Oxk7XZ2djh//rzWZebOnYtZs2ZptBfcKpyISm/VOF1nQERFsfyh8C8sLQ8PHz6EpWXh9/lhYfMSBQcHIygoSHqen5+P+/fvo3r16lAoFDrMjMqDWq2Gk5MT/v3335dyXyIiejF8j+oXIQQePnxY5Le4AyxsSsza2hqGhoZITU2VtaempsLe3l7rMkqlEkqlUtb27K2zST9YWFjwlyZRBcb3qP4o6khNAU4eLiFjY2N4eHggJiZGasvPz0dMTAy8vLx0mBkREREV4BGbUggKCkJAQABatmyJ1q1bY/HixcjIyMDQoUN1nRoRERGBhU2p9O3bF3fu3EFISAhSUlLQvHlzREVFaUwopv8GpVKJmTNnapxuJKKKge/R/yZ+CSYRERHpDc6xISIiIr3BwoaIiIj0BgsbIiIi0hssbIiIiEhvsLAhKqOwsDDUrl0bJiYm8PT0xOHDh3WdEhEBiI2NxVtvvQVHR0coFApERkbqOiV6hVjYEJXBxo0bERQUhJkzZ+LYsWNwd3eHSqXC7du3dZ0a0X9eRkYG3N3dERYWputUSAd4uTdRGXh6eqJVq1ZYvnw5gKd3oXZycsL48eMxffp0HWdHRAUUCgW2bt0Kf39/XadCrwiP2BCVUnZ2NhISEuDj4yO1GRgYwMfHB/Hx8TrMjIiIWNgQldLdu3eRl5enccdpOzs7pKSk6CgrIiICWNgQERGRHmFhQ1RK1tbWMDQ0RGpqqqw9NTUV9vb2OsqKiIgAFjZEpWZsbAwPDw/ExMRIbfn5+YiJiYGXl5cOMyMiIn67N1EZBAUFISAgAC1btkTr1q2xePFiZGRkYOjQobpOjeg/79GjR7h06ZL0PCkpCYmJiahWrRpq1aqlw8zoVeDl3kRltHz5cixYsAApKSlo3rw5li5dCk9PT12nRfSft2/fPnh7e2u0BwQEIDw8/NUnRK8UCxsiIiLSG5xjQ0RERHqDhQ0RERHpDRY2REREpDdY2BAREZHeYGFDREREeoOFDREREekNFjZERESkN1jYEBERkd5gYUNE/ykKhQKRkZG6ToOIXhIWNkSkV1JSUjB+/Hi4urpCqVTCyckJb731luxLS4lIf/FLMIlIbyQnJ6Ndu3aoWrUqFixYgKZNmyInJwe7du1CYGAgzp8/r+sUiegl4xEbItIbH374IRQKBQ4fPozevXujfv36aNy4MYKCgnDw4EGty0ybNg3169dH5cqV4erqis8++ww5OTlS/4kTJ+Dt7Q1zc3NYWFjAw8MDR48eBQBcvXoVb731FqysrGBmZobGjRtj586dr2RbiUg7HrEhIr1w//59REVF4csvv4SZmZlGf9WqVbUuZ25ujvDwcDg6OuLUqVMYOXIkzM3NMXXqVADAwIED0aJFC6xYsQKGhoZITEyEkZERACAwMBDZ2dmIjY2FmZkZzp49iypVqry0bSSi4rGwISK9cOnSJQgh0LBhw1ItN2PGDOn/tWvXxscff4wNGzZIhc21a9cwZcoUadx69epJ8deuXUPv3r3RtGlTAICrq+uLbgYRvSCeiiIivSCEKNNyGzduRLt27WBvb48qVapgxowZuHbtmtQfFBSEESNGwMfHB/PmzcPly5elvgkTJmD27Nlo164dZs6ciZMnT77wdhDRi2FhQ0R6oV69elAoFKWaIBwfH4+BAweiR48e2L59O44fP45PP/0U2dnZUkxoaCjOnDkDPz8/7NmzB25ubti6dSsAYMSIEbhy5Qo++OADnDp1Ci1btsSyZcvKfduIqOQUoqx/5hARVTDdu3fHqVOncOHCBY15NmlpaahatSoUCgW2bt0Kf39/LFy4EN9++63sKMyIESOwZcsWpKWlaV1H//79kZGRgW3btmn0BQcHY8eOHTxyQ6RDPGJDRHojLCwMeXl5aN26NX755RdcvHgR586dw9KlS+Hl5aURX69ePVy7dg0bNmzA5cuXsXTpUuloDAA8efIE48aNw759+3D16lX8/fffOHLkCBo1agQAmDRpEnbt2oWkpCQcO3YMe/fulfqISDc4eZiI9IarqyuOHTuGL7/8Eh999BFu3boFGxsbeHh4YMWKFRrxb7/9NiZPnoxx48YhKysLfn5++OyzzxAaGgoAMDQ0xL179zB48GCkpqbC2toa7777LmbNmgUAyMvLQ2BgIK5fvw4LCwt069YN33zzzavcZCJ6Dk9FERERkd7gqSgiIiLSGyxsiIiISG+wsCEiIiK9wcKGiIiI9AYLGyIiItIbLGyIiIhIb7CwISIiIr3BwoaIiIj0BgsbIiIi0hssbIiIiEhvsLAhIiIivfH/AAjfGn7WQ5GnAAAAAElFTkSuQmCC\n"},"metadata":{}},{"output_type":"display_data","data":{"text/plain":"<Figure size 640x480 with 0 Axes>"},"metadata":{}},{"output_type":"display_data","data":{"text/plain":"<Figure size 800x600 with 2 Axes>","image/png":"iVBORw0KGgoAAAANSUhEUgAAAqMAAAIjCAYAAAA3LxKwAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8pXeV/AAAACXBIWXMAAA9hAAAPYQGoP6dpAABx3ElEQVR4nO3deVxN+f8H8Ndtu6VVtLhUQpZoZJsk61cjE0aYGdGMkHXKliWGso5mGOtYGrPxNTI0RsYy0QiNEVKSfY3GcsOkmkqLOr8//DpfdyqKrpPu6/l9nMdj7vm8z+d8zlVfb+/zOZ8jEwRBABERERGRBLSkHgARERERaS4mo0REREQkGSajRERERCQZJqNEREREJBkmo0REREQkGSajRERERCQZJqNEREREJBkmo0REREQkGSajRERERCQZJqNE5bh69Sp69eoFU1NTyGQyREZGVmn/N2/ehEwmw8aNG6u03zdZ9+7d0b179yrt86+//oK+vj7+/PPPKu33WRs3boRMJsPNmzfFfRW9lsOHD0Mmk+Hw4cNVOiaZTIZ58+ZVaZ/VTVRUFIyMjPDgwQOph0JEr4DJKFVr169fx9ixY9GoUSPo6+vDxMQEbm5uWLVqFR4/fqzWc/v6+uLs2bP47LPPsHnzZrRv316t53udhg8fDplMBhMTkzK/x6tXr0Imk0Emk+HLL7+sdP93797FvHnzkJSUVAWjfTULFiyAi4sL3NzcUFhYiLp166Jz587lxguCABsbG7Rt2/Y1jvLl7Nu3r1omnEePHsW7776L+vXrQ19fH7a2tujXrx/Cw8Nfqr9169aV+Y+23r17o0mTJggNDX3FERORlHSkHgBRefbu3YsPPvgAcrkcw4YNQ6tWrVBQUICjR49i+vTpOH/+PDZs2KCWcz9+/BhxcXGYPXs2AgIC1HIOOzs7PH78GLq6umrp/0V0dHSQm5uL3bt348MPP1Rp27JlC/T19ZGXl/dSfd+9exfz589Hw4YN4ezsXOHjDhw48FLnK8+DBw+wadMmbNq0CQCgq6uLDz74AF9//TVu3boFOzu7UsfExsbi9u3bmDJlyiudu6qvpSz79u3D2rVry0xIHz9+DB2d1/9/8RERERg8eDCcnZ0xadIk1K5dGykpKYiNjcU333yDoUOHVrrPdevWoW7duhg+fHiptrFjx2LatGmYP38+jI2Nq+AKiOh1YzJK1VJKSgq8vb1hZ2eHmJgY1KtXT2zz9/fHtWvXsHfvXrWdv+S2n5mZmdrOIZPJoK+vr7b+X0Qul8PNzQ1bt24tlYyGh4ejT58+2LFjx2sZS25uLmrVqgU9Pb0q7ffHH3+Ejo4O+vXrJ+7z8fFBWFgYtm7dipkzZ5Y6Jjw8HFpaWvD29n6lc1f1tVSWVD9b8+bNg6OjI44fP17qO7h//36Vn2/QoEGYMGECIiIiMHLkyCrvn4jUj7fpqVpasmQJsrOz8d1336kkoiWaNGmCSZMmiZ+fPHmChQsXonHjxpDL5WjYsCE+/fRT5OfnqxzXsGFD9O3bF0ePHsXbb78NfX19NGrUCP/973/FmHnz5okVs+nTp0Mmk6Fhw4YAnt7eLvnvZ82bNw8ymUxlX3R0NDp37gwzMzMYGRmhWbNm+PTTT8X28uaMxsTEoEuXLjA0NISZmRn69++Pixcvlnm+a9euYfjw4TAzM4OpqSlGjBiB3Nzc8r/Yfxk6dCh+++03ZGRkiPvi4+Nx9erVMitY6enpmDZtGpycnGBkZAQTExO8++67OHPmjBhz+PBhdOjQAQAwYsQI8XZ/yXV2794drVq1QkJCArp27YpatWqJ38u/51n6+vpCX1+/1PV7eHigdu3auHv37nOvLzIyEi4uLjAyMhL3ubm5oWHDhmXeMi4sLMTPP/+MHj16QKFQIDk5GcOHDxeniVhbW2PkyJH4+++/n3vesq4FAG7fvg0vLy8YGhrC0tISU6ZMKfUzCgB//PEHPvjgA9ja2kIul8PGxgZTpkxRmVIxfPhwrF27FgDE7/jZn8Gy5oyePn0a7777LkxMTGBkZISePXvi+PHjKjEl81///PNPBAYGwsLCAoaGhhgwYECF5mZev34dHTp0KDMZt7S0VPlcXFyMlStXomXLltDX14eVlRXGjh2LR48eiTENGzbE+fPnceTIEfEan/1eLS0t8dZbb2HXrl0vHBsRVU+sjFK1tHv3bjRq1AidOnWqUPyoUaOwadMmvP/++5g6dSpOnDiB0NBQXLx4ETt37lSJvXbtGt5//334+fnB19cX33//PYYPH4527dqhZcuWGDhwIMzMzDBlyhQMGTIEnp6eKslMRZw/fx59+/bFW2+9hQULFkAul+PatWsvfIjm999/x7vvvotGjRph3rx5ePz4Mb766iu4ubkhMTGxVCL84Ycfwt7eHqGhoUhMTMS3334LS0tLfPHFFxUa58CBAzFu3Dj88ssvYlUpPDwczZs3L3PO5I0bNxAZGYkPPvgA9vb2SEtLw9dff41u3brhwoULUCgUaNGiBRYsWICQkBCMGTMGXbp0AQCVP8u///4b7777Lry9vfHRRx/BysqqzPGtWrUKMTEx8PX1RVxcHLS1tfH111/jwIED2Lx5MxQKRbnXVlhYiPj4eIwfP15lv0wmw9ChQ7F48WKcP38eLVu2FNuioqKQnp4OHx8fAE//QXHjxg2MGDEC1tbW4tSQ8+fP4/jx46X+AfI8jx8/Rs+ePZGamoqJEydCoVBg8+bNiImJKRUbERGB3NxcjB8/HnXq1MHJkyfx1Vdf4fbt24iIiADw9Pb03bt3ER0djc2bN7/w/OfPn0eXLl1gYmKCGTNmQFdXF19//TW6d++OI0eOwMXFRSV+woQJqF27NubOnYubN29i5cqVCAgIwLZt2557Hjs7Oxw8eBC3b99GgwYNnhs7duxYbNy4ESNGjMDEiRORkpKCNWvW4PTp0/jzzz+hq6uLlStXYsKECTAyMsLs2bMBoNTPS7t27ar8AUMieo0EomomMzNTACD079+/QvFJSUkCAGHUqFEq+6dNmyYAEGJiYsR9dnZ2AgAhNjZW3Hf//n1BLpcLU6dOFfelpKQIAISlS5eq9Onr6yvY2dmVGsPcuXOFZ3+dVqxYIQAQHjx4UO64S87xww8/iPucnZ0FS0tL4e+//xb3nTlzRtDS0hKGDRtW6nwjR45U6XPAgAFCnTp1yj3ns9dhaGgoCIIgvP/++0LPnj0FQRCEoqIiwdraWpg/f36Z30FeXp5QVFRU6jrkcrmwYMECcV98fHypayvRrVs3AYAQFhZWZlu3bt1U9u3fv18AICxatEi4ceOGYGRkJHh5eb3wGq9duyYAEL766qtSbefPnxcACLNmzVLZ7+3tLejr6wuZmZmCIAhCbm5uqWO3bt1a6mfohx9+EAAIKSkp5V7LypUrBQDC9u3bxX05OTlCkyZNBADCoUOHxP1lnTc0NFSQyWTCrVu3xH3+/v5Cef83DkCYO3eu+NnLy0vQ09MTrl+/Lu67e/euYGxsLHTt2rXUtbi7uwvFxcXi/ilTpgja2tpCRkZGmecr8d133wkABD09PaFHjx5CcHCw8Mcff5T6ufnjjz8EAMKWLVtU9kdFRZXa37Jly1I/F89avHixAEBIS0t77tiIqHribXqqdrKysgCgwg8j7Nu3DwAQGBiosn/q1KkAUGpuqaOjo1itAwALCws0a9YMN27ceOkx/1vJXNNdu3ahuLi4Qsfcu3cPSUlJGD58OMzNzcX9b731Ft555x3xOp81btw4lc9dunTB33//LX6HFTF06FAcPnwYSqUSMTExUCqV5T5kIpfLoaX19P82ioqK8Pfff4tTEBITEyt8TrlcjhEjRlQotlevXhg7diwWLFiAgQMHQl9fH19//fULjyu5lV67du1SbY6OjmjTpg1++ukncV9OTg5+/fVX9O3bFyYmJgAAAwMDsT0vLw8PHz5Ex44dAaBS1ws8/TmtV68e3n//fXFfrVq1MGbMmFKxz543JycHDx8+RKdOnSAIAk6fPl2p8wJP/6wOHDgALy8vNGrUSNxfr149DB06FEePHi31MzNmzBiVym+XLl1QVFSEW7duPfdcI0eORFRUFLp3746jR49i4cKF6NKlCxwcHHDs2DExLiIiAqampnjnnXfw8OFDcWvXrh2MjIxw6NChCl9fyZ/xw4cPK3wMEVUfTEap2ilJBP75558Kxd+6dQtaWlpo0qSJyn5ra2uYmZmV+svT1ta2VB+1a9dWmaf2qgYPHgw3NzeMGjUKVlZW8Pb2xvbt25+bmJaMs1mzZqXaWrRogYcPHyInJ0dl/7+vpeQv5cpci6enJ4yNjbFt2zZs2bIFHTp0KPVdliguLsaKFSvg4OAAuVyOunXrwsLCAsnJycjMzKzwOevXr1+pB3y+/PJLmJubIykpCatXry419/B5BEEoc7+Pjw9SUlLEBCkyMhK5ubniLXrg6RzZSZMmwcrKCgYGBrCwsIC9vT0AVOp6gad/vk2aNCl1a7+sP+/U1FTxHyVGRkawsLBAt27dXuq8wNMH8nJzc8v92SouLsZff/2lsv9VfrY8PDywf/9+ZGRkIDY2Fv7+/rh16xb69u0rPsR09epVZGZmwtLSEhYWFipbdnZ2pR52Kvkzrsy0CSKqPjhnlKodExMTKBQKnDt3rlLHVfQvIm1t7TL3l5e0VOQcRUVFKp8NDAwQGxuLQ4cOYe/evYiKisK2bdvwn//8BwcOHCh3DJX1KtdSQi6XY+DAgdi0aRNu3Ljx3HUrFy9ejODgYIwcORILFy6Eubk5tLS0MHny5ApXgAHVyl9FnD59WkxOzp49iyFDhrzwmDp16gAoP3kaMmQIZsyYgfDwcHTq1Anh4eGoXbs2PD09xZgPP/wQx44dw/Tp0+Hs7AwjIyMUFxejd+/elbreyigqKsI777yD9PR0BAUFoXnz5jA0NMSdO3cwfPhwtZ3336riZ6tWrVro0qULunTpgrp162L+/Pn47bff4Ovri+LiYlhaWmLLli1lHmthYVHh85T8GdetW7fCxxBR9cFklKqlvn37YsOGDYiLi4Orq+tzY+3s7FBcXIyrV6+iRYsW4v60tDRkZGSUuZbky6pdu7bKk+clyrp1qaWlhZ49e6Jnz55Yvnw5Fi9ejNmzZ+PQoUNwd3cv8zoA4PLly6XaLl26hLp168LQ0PDVL6IMQ4cOxffff//CJY1KnjT/7rvvVPZnZGSoJAJVWaHKycnBiBEj4OjoiE6dOmHJkiUYMGCA+MR+eWxtbWFgYICUlJQy2xUKBXr06IGIiAgEBwcjOjoaw4cPFyu2jx49wsGDBzF//nyEhISIx129evWlrsPOzg7nzp2DIAgq38+//7zPnj2LK1euYNOmTRg2bJi4Pzo6ulSfFf2eLSwsUKtWrXJ/trS0tGBjY1PRS3kpJS+NuHfvHgCgcePG+P333+Hm5vbCf5y86DpTUlLEKj0RvXl4m56qpRkzZsDQ0BCjRo1CWlpaqfbr169j1apVACBWslauXKkSs3z5cgBAnz59qmxcjRs3RmZmJpKTk8V99+7dK/XEfnp6eqljSxZ/L2spH+Dp/D1nZ2ds2rRJJeE9d+4cDhw4oFKxq2o9evTAwoULsWbNGlhbW5cbp62tXaoyFhERgTt37qjsK0may0rcKysoKAipqanYtGkTli9fjoYNG8LX17fc77GErq4u2rdvj1OnTpUb4+Pjg/v372Ps2LEoLCxUuUVfUhn89/X+++esojw9PXH37l38/PPP4r7c3NxSL24o67yCIIg/78+q6Pesra2NXr16YdeuXSqvLE1LS0N4eDg6d+4sTo95VQcPHixzf8mc55KpAh9++CGKioqwcOHCUrFPnjxRuSZDQ8PnXmNCQsIL/9FKRNUXK6NULTVu3Bjh4eEYPHgwWrRoofIGpmPHjiEiIkJ8G0vr1q3h6+uLDRs2ICMjA926dcPJkyexadMmeHl5oUePHlU2Lm9vbwQFBWHAgAGYOHEicnNzsX79ejRt2lTlgZYFCxYgNjYWffr0gZ2dHe7fv49169ahQYMGz30V5dKlS/Huu+/C1dUVfn5+4tJOpqaman3to5aWFubMmfPCuL59+2LBggUYMWIEOnXqhLNnz2LLli0qD8UAT//8zMzMEBYWBmNjYxgaGsLFxUWcb1lRMTExWLduHebOnSsuNfXDDz+ge/fuCA4OxpIlS557fP/+/TF79mxkZWWVmWwNGjQIn3zyCXbt2gUbGxt07dpVbDMxMUHXrl2xZMkSFBYWon79+jhw4EC5ldYXGT16NNasWYNhw4YhISEB9erVw+bNm1GrVi2VuObNm6Nx48aYNm0a7ty5AxMTE+zYsaPM6Qbt2rUDAEycOBEeHh7Q1tYut7K9aNEice3bTz75BDo6Ovj666+Rn5//wu+xMvr37w97e3v069cPjRs3Rk5ODn7//Xfs3r0bHTp0EF9A0K1bN4wdOxahoaFISkpCr169oKuri6tXryIiIgKrVq0SH/Zq164d1q9fj0WLFqFJkyawtLTEf/7zHwBPF9JPTk6Gv79/lV0DEb1mEj3FT1QhV65cEUaPHi00bNhQ0NPTE4yNjQU3Nzfhq6++EvLy8sS4wsJCYf78+YK9vb2gq6sr2NjYCLNmzVKJEYSnSzv16dOn1Hn+vQxPeUs7CYIgHDhwQGjVqpWgp6cnNGvWTPjxxx9LLe108OBBoX///oJCoRD09PQEhUIhDBkyRLhy5Uqpc/x7+aPff/9dcHNzEwwMDAQTExOhX79+woULF1RiSs7376WjylpiqCzPLu1UnvKWdpo6dapQr149wcDAQHBzcxPi4uLKXJJp165dgqOjo6Cjo6Nynd26dRNatmxZ5jmf7ScrK0uws7MT2rZtKxQWFqrETZkyRdDS0hLi4uKeew1paWmCjo6OsHnz5nJjPvjgAwGAMGPGjFJtt2/fFgYMGCCYmZkJpqamwgcffCDcvXu31LJJFVnaSRAE4datW8J7770n1KpVS6hbt64wadIkcSmjZ5d2unDhguDu7i4YGRkJdevWFUaPHi2cOXOm1M/LkydPhAkTJggWFhaCTCZT+Rn89xgFQRASExMFDw8PwcjISKhVq5bQo0cP4dixYyoxJdcSHx+vsv/QoUOlxlmWrVu3Ct7e3kLjxo0FAwMDQV9fX3B0dBRmz54tZGVllYrfsGGD0K5dO8HAwEAwNjYWnJychBkzZgh3794VY5RKpdCnTx/B2NhYAKDyva5fv16oVatWmX0T0ZtBJgiVmI1ORPSG8fPzw5UrV/DHH39IPRRSgzZt2qB79+5YsWKF1EMhopfEZJSIarTU1FQ0bdoUBw8ehJubm9TDoSoUFRWF999/Hzdu3KjUcl9EVL0wGSUiIiIiyfBpeiIiIiKSDJNRIiIiIpIMk1EiIiIikgyTUSIiIiKSDJNRIiIiIpJMjXwDk0GbAKmHQERq8ih+jdRDICI10ZcwK1Fn7vD4NP9/63lYGSUiIiIiydTIyigRERFRpchYn5MKk1EiIiIimUzqEWgs/jOAiIiIiCTDyigRERERb9NLht88EREREUmGlVEiIiIizhmVDCujRERERCQZVkaJiIiIOGdUMvzmiYiIiEgyrIwSERERcc6oZJiMEhEREfE2vWT4zRMRERGRZFgZJSIiIuJtesmwMkpEREREkmFllIiIiIhzRiXDb56IiIiIJMPKKBERERHnjEqGlVEiIiIikgwro0REREScMyoZJqNEREREvE0vGf4zgIiIiIgkw8ooEREREW/TS4bfPBERERFJhpVRIiIiIlZGJcNvnoiIiIgkw8ooERERkRafppcKK6NEREREJBlWRomIiIg4Z1QyTEaJiIiIuOi9ZPjPACIiIiKSDCujRERERLxNLxl+80REREQkGVZGiYiIiDhnVDKsjBIRERGRZFgZJSIiIuKcUcnwmyciIiIiybAySkRERMQ5o5JhMkpERETE2/SS4TdPRERERJJhZZSIiIiIt+klw8ooEREREUmGlVEiIiIizhmVDL95IiIiIpIMK6NEREREnDMqGVZGiYiIiKqR2NhY9OvXDwqFAjKZDJGRkaViLl68iPfeew+mpqYwNDREhw4dkJqaKrbn5eXB398fderUgZGREQYNGoS0tDSVPlJTU9GnTx/UqlULlpaWmD59Op48eaISc/jwYbRt2xZyuRxNmjTBxo0bS41l7dq1aNiwIfT19eHi4oKTJ09W6nqZjBIRERHJtNS3VVJOTg5at26NtWvXltl+/fp1dO7cGc2bN8fhw4eRnJyM4OBg6OvrizFTpkzB7t27ERERgSNHjuDu3bsYOHCg2F5UVIQ+ffqgoKAAx44dw6ZNm7Bx40aEhISIMSkpKejTpw969OiBpKQkTJ48GaNGjcL+/fvFmG3btiEwMBBz585FYmIiWrduDQ8PD9y/f7/C1ysTBEGozBf0JjBoEyD1EIhITR7Fr5F6CESkJvoSTh406LdObX0/3v3JSx8rk8mwc+dOeHl5ifu8vb2hq6uLzZs3l3lMZmYmLCwsEB4ejvfffx8AcOnSJbRo0QJxcXHo2LEjfvvtN/Tt2xd3796FlZUVACAsLAxBQUF48OAB9PT0EBQUhL179+LcuXMq587IyEBUVBQAwMXFBR06dMCaNU//v7m4uBg2NjaYMGECZs6cWaFrZGWUiIiISI3y8/ORlZWlsuXn579UX8XFxdi7dy+aNm0KDw8PWFpawsXFReVWfkJCAgoLC+Hu7i7ua968OWxtbREXFwcAiIuLg5OTk5iIAoCHhweysrJw/vx5MebZPkpiSvooKChAQkKCSoyWlhbc3d3FmIpgMkpEREQkk6ltCw0NhampqcoWGhr6UsO8f/8+srOz8fnnn6N37944cOAABgwYgIEDB+LIkSMAAKVSCT09PZiZmakca2VlBaVSKcY8m4iWtJe0PS8mKysLjx8/xsOHD1FUVFRmTEkfFcGn6YmIiIjUaNasWQgMDFTZJ5fLX6qv4uJiAED//v0xZcoUAICzszOOHTuGsLAwdOvW7dUGKwEmo0RERERqXPReLpe/dPL5b3Xr1oWOjg4cHR1V9rdo0QJHjx4FAFhbW6OgoAAZGRkq1dG0tDRYW1uLMf9+6r3kaftnY/79BH5aWhpMTExgYGAAbW1taGtrlxlT0kdF8DY9ERER0RtCT08PHTp0wOXLl1X2X7lyBXZ2dgCAdu3aQVdXFwcPHhTbL1++jNTUVLi6ugIAXF1dcfbsWZWn3qOjo2FiYiImuq6urip9lMSU9KGnp4d27dqpxBQXF+PgwYNiTEWwMkpERERUjRa9z87OxrVr18TPKSkpSEpKgrm5OWxtbTF9+nQMHjwYXbt2RY8ePRAVFYXdu3fj8OHDAABTU1P4+fkhMDAQ5ubmMDExwYQJE+Dq6oqOHTsCAHr16gVHR0d8/PHHWLJkCZRKJebMmQN/f3+xijtu3DisWbMGM2bMwMiRIxETE4Pt27dj79694tgCAwPh6+uL9u3b4+2338bKlSuRk5ODESNGVPh6mYwSERERVSOnTp1Cjx49xM8l8019fX2xceNGDBgwAGFhYQgNDcXEiRPRrFkz7NixA507dxaPWbFiBbS0tDBo0CDk5+fDw8MD69b9b/kqbW1t7NmzB+PHj4erqysMDQ3h6+uLBQsWiDH29vbYu3cvpkyZglWrVqFBgwb49ttv4eHhIcYMHjwYDx48QEhICJRKJZydnREVFVXqoabn4TqjRPRG4TqjRDWXpOuMDvhWbX0/3jlKbX3XBKyMEhEREVWj2/Sahg8wEREREZFkWBklIiIijSdjZVQyrIwSERERkWRYGSUiIiKNx8qodFgZJSIiIiLJsDJKRERExMKoZFgZJSIiIiLJsDJKREREGo9zRqXDZJSIiIg0HpNR6fA2PRERERFJhpVRIiIi0nisjEqHlVEiIiIikgwro0RERKTxWBmVDiujRERERCQZVkaJiIiIWBiVDCujRERERCQZVkaJiIhI43HOqHRYGSUiIiIiybAySkRERBqPlVHpMBklIiIijcdkVDq8TU9EREREkmFllIiIiDQeK6PSYWWUiIiIiCTDyigRERERC6OSYWWUiIiIiCTDyigRERFpPM4ZlQ4ro0REREQkGVZGiYiISOOxMiodJqNERESk8ZiMSoe36YmIiIhIMqyMEhEREbEwKhlWRomIiIhIMqyMEhERkcbjnFHpsDJKRERERJJhZZSIiIg0Hiuj0pE0GS0oKEBkZCTi4uKgVCoBANbW1ujUqRP69+8PPT09KYdHRERERGom2W36a9euoUWLFvD19cXp06dRXFyM4uJinD59GsOGDUPLli1x7do1qYZHREREGkQmk6lto+eTrDI6fvx4ODk54fTp0zAxMVFpy8rKwrBhw+Dv74/9+/dLNEIiIiLSFEwapSNZMvrnn3/i5MmTpRJRADAxMcHChQvh4uIiwciIiIiI6HWR7Da9mZkZbt68WW77zZs3YWZm9trGQ0RERBpMpsaNnkuyZHTUqFEYNmwYVqxYgeTkZKSlpSEtLQ3JyclYsWIFhg8fjjFjxkg1PCIiIiJJxMbGol+/flAoFJDJZIiMjCw3dty4cZDJZFi5cqXK/vT0dPj4+MDExARmZmbw8/NDdna2SkxycjK6dOkCfX192NjYYMmSJaX6j4iIQPPmzaGvrw8nJyfs27dPpV0QBISEhKBevXowMDCAu7s7rl69WqnrlSwZXbBgAYKCgrB06VI4OztDoVBAoVDA2dkZS5cuRVBQEObNmyfV8IiIiEiDVKcHmHJyctC6dWusXbv2uXE7d+7E8ePHoVAoSrX5+Pjg/PnziI6Oxp49exAbG6tS5MvKykKvXr1gZ2eHhIQELF26FPPmzcOGDRvEmGPHjmHIkCHw8/PD6dOn4eXlBS8vL5w7d06MWbJkCVavXo2wsDCcOHEChoaG8PDwQF5eXoWvVyYIglDhaDVJSUlRWdrJ3t7+lfozaBNQFcMiomroUfwaqYdARGqiL+GCk/XH71Rb33fWD3jpY2UyGXbu3AkvLy/VPu/cgYuLC/bv348+ffpg8uTJmDx5MgDg4sWLcHR0RHx8PNq3bw8AiIqKgqenJ27fvg2FQoH169dj9uzZUCqV4lKaM2fORGRkJC5dugQAGDx4MHJycrBnzx7xvB07doSzszPCwsIgCAIUCgWmTp2KadOmAQAyMzNhZWWFjRs3wtvbu0LXWC3ewGRvbw9XV1e4urq+ciJKREREVFnqrIzm5+cjKytLZcvPz3/psRYXF+Pjjz/G9OnT0bJly1LtcXFxMDMzExNRAHB3d4eWlhZOnDghxnTt2lVlTXcPDw9cvnwZjx49EmPc3d1V+vbw8EBcXByA/xUTn40xNTWFi4uLGFMR1SIZJSIiIqqpQkNDYWpqqrKFhoa+dH9ffPEFdHR0MHHixDLblUolLC0tVfbp6OjA3NxcvBOtVCphZWWlElPy+UUxz7Y/e1xZMRXB14ESERGRxlPnOqOzZs1CYGCgyj65XP5SfSUkJGDVqlVITEysMWujsjJKREREpMalneRyOUxMTFS2l01G//jjD9y/fx+2trbQ0dGBjo4Obt26halTp6Jhw4YAnj5/c//+fZXjnjx5gvT0dFhbW4sxaWlpKjEln18U82z7s8eVFVMRTEaJiIiI3hAff/wxkpOTkZSUJG4KhQLTp08X31rp6uqKjIwMJCQkiMfFxMSguLhYfKGQq6srYmNjUVhYKMZER0ejWbNmqF27thhz8OBBlfNHR0fD1dUVwNNnfqytrVVisrKycOLECTGmIiS/TR8VFQUjIyN07twZALB27Vp88803cHR0xNq1a8UvhIiIiEhdqtMt7+zsbFy7dk38nJKSgqSkJJibm8PW1hZ16tRRidfV1YW1tTWaNWsGAGjRogV69+6N0aNHIywsDIWFhQgICIC3t7e4DNTQoUMxf/58+Pn5ISgoCOfOncOqVauwYsUKsd9JkyahW7duWLZsGfr06YOffvoJp06dEpd/kslkmDx5MhYtWgQHBwfY29sjODgYCoWi1NP/zyN5ZXT69OnIysoCAJw9exZTp06Fp6cnUlJSSs2vICIiIqrpTp06hTZt2qBNmzYAgMDAQLRp0wYhISEV7mPLli1o3rw5evbsCU9PT3Tu3FllDVFTU1McOHAAKSkpaNeuHaZOnYqQkBCVtUg7deqE8PBwbNiwAa1bt8bPP/+MyMhItGrVSoyZMWMGJkyYgDFjxqBDhw7Izs5GVFQU9PX1KzxWydcZNTIywrlz59CwYUPMmzcP586dw88//4zExER4enpW6mmsElxnlKjm4jqjRDWXlOuM2k3crba+b63up7a+awLJK6N6enrIzc0FAPz+++/o1asXAMDc3FysmBIRERFRzST5nNHOnTsjMDAQbm5uOHnyJLZt2wYAuHLlCho0aCDx6OhVubVtjCnD3NHW0Rb1LEzx4ZQN2H04WWx/fLrsKtenK3ZixX+fToh2bt4AiyZ5oV1LWxQVCYg8mISgZTuQ87jguf0Mm/kDIvY/nbzd/z+tMfqDLnirWX3IdXVw8YYSi8L24fe4i2L8tJG94PWf1mja0AqP8wtx4swNzF61C1dv3S/VNxG9nO0/hWP7tq24e+cOAKBxEweMHf8JOnfpBgB4+OABli9bguPHjiEnNwcNG9pj9JhxcO/lIfaRmZGBzxcvxJHDh6ClpYWe7/RC0MzZqGVoKMk1Uc1QneaMahrJK6Nr1qyBjo4Ofv75Z6xfvx7169cHAPz222/o3bu3xKOjV2VoIMfZK3cwOXRbme0N3WepbGPm/oji4mLsPJgEAKhnYYq9YRNw/a8H6Prxl+jvvxaOja3xzYKPS/U1OmSzSl+/HjojtnVu2wQxxy9hQMB6dPJZgiPxV7Bj1Vi0bva/f/B0adsEYdti0W3Yl+g7fg10dLSxZ30AaunrlToXEb0cSytrTJoyDVsjfkH49h1426UjJgX449q1qwCA2Z8G4WZKClatWY8dO3ejp/s7mD51Mi5evCD2MStoGq5fu4awb3/A6rVhSDx1CgvmVXwuHRFVL5JXRm1tbVXeeVri2ae56M114M8LOPDnhXLb0/7+R+Vzv+5OOBJ/FTfv/A0AeLdLKxQ+KcLk0O0omd484bNtOBXxKRrZ1MWNvx6Kx2b+87hUfyWmf7lD5fPcNbvRt/tb8OzWCmcu3wYA9A9YpxIzZu6P+Cvmc7RxtMGfidcreMVE9Dzde/xH5fOESVOw/aetSD6ThCZNHHDm9GnMDpkLp7feAgCMGfcJfvzvJlw8fx4tWjjixvXr+PPoHwjf9jNatnICAMz8dA78x49B4PQZsLS0KnVOoopgZVQ6kldGExMTcfbsWfHzrl274OXlhU8//RQFBQXPOZJqGktzY/Tu3AqbIv/3Plu5ng4KC4vw7HN2j/Of/lx0cm6scvzKWR/ir5jP8cfmaRjWv+NzzyWTyWBcS45HmbnlxpgYPX0S8HkxRPTyioqK8Nu+vXj8OBetWz99arh1mzbYH/UbMjMyUFxcjN/27UV+QT7ad3gbAHDmzGkYm5iIiSgAuLh2gpaWFs4mJ5d5HqIKUeOi9/R8kiejY8eOxZUrVwAAN27cgLe3N2rVqoWIiAjMmDHjhcfn5+cjKytLZROKi9Q9bFKDj/q54J/cPETGJIn7Dp+8DKs6JpgyrCd0dbRhZmyARRP7AwCsLUzFuPnr9uCjGd+j7/g1iDyYhFWzBuOTId3KPdeUYT1hWEuOHQcSy2yXyWRYOu19HDt9HReu36uaCyQiAMDVK5fRsX0bdGjjhM8WzMWK1WvRuEkTAMDSZSvxpPAJurq5oEMbJyyaH4IVq9bA1s4OAPD3w4cwNzdX6U9HRwcmpqb4++GD134tRPTqJE9Gr1y5AmdnZwBAREQEunbtivDwcGzcuBE7dux4/sEAQkNDYWpqqrI9SUt44XFU/Qzr3xHbfjuF/IIn4r6LN5QYHbIZEz/uifS45bj5+2LcvPM3lA+zIBQXi3GffxOFuDM3cObybSzb+DuWb/odU4a5l3mewb3b49Ox7+KjoO/x4FF2mTErZ32Ilk3qYdjMH6r2IokIDRvaY/uOSPy4dTs+GDwEwZ8G4fr/L/C99qtV+OefLGz4biPCt+3Ax74jMGPqZFy9clniUVNNJ5PJ1LbR80k+Z1QQBBT/f1Lx+++/o2/fvgAAGxsbPHz48HmHAgBmzZpVanF8yy5BVT9QUiu3No3RzN4aH5eR/G2LOoVtUadgaW6MnMf5EARg4kf/Qcrtv8vtL/7sTXw65l3o6eqgoPB/ye0HHu2wLmQofGZ8h0Mnyv7LbUXQB/Ds0grufitx537GK18bEanS1dMTK52OLVvh/Lmz2PLjfzFi5Cj8FP4jduzagyZNHAAAzZo3R2LCKfy0dQuC5y5Anbp1kZ6ertLfkydPkJWZiTp1LV77tRDRq5M8GW3fvj0WLVoEd3d3HDlyBOvXrwfw9NVXVlYvnogul8shl8tV9sm0tNUyVlIfXy9XJFxIxdkrd8qNuZ/+9OGkYf07Iq+gEAePXyo39q1mDZCemaOSiH7Yux3C5vpg2KwfEHX0fJnHrQj6AO/9pzV6jV6FW3fLT3aJqOoUFxejsKAAeXmPAQBaMtWbdlpa2hCKn84bb926Df7JysKF8+fg2PLpW2BOnjiO4uJi8aEnopfBCqZ0JE9GV65cCR8fH0RGRmL27Nlo8v/zhn7++Wd06tRJ4tHRqzI00ENjm/9VKxrWr4O3mtbHo6xc/KV8BAAwNtTHwHfaYObynWX2MW5wVxw/cwPZuQXo2bE5Fk/2QvBXu5CZ/fQvLs+urWBZxxgnk28ir6AQPTs2xwy/Xlj5/+uUAk9vzX+z4GNMW/oz4s/ehFUdYwDA4/xCZGXnAXh6a37wu+3xwZQNyM7JE2Mys/OQl19Y9V8OkQZatWIZOnfpCut69ZCbk4N9e/fgVPxJrN/wHRraN4KtrR0Wzg9B4LQgmJmZISbmdxyP+xNfrfsaANCocWO4de6C+XODMSdkPp48KUToZwvR+90+fJKe6A0l+etAy5OXlwdtbW3o6upW+li+DrT66NLOAQe+nVRq/+Zfj2PM3B8BACMHumHptEGw7/WpmBg+69uFH6N351YwqqWHyzfTsPK/B7F1b7zY/k6nFlgw4T00trGATCbD9b8e4JuIP/D9L8fEp/D3fzMJXds7PHcc5S3APzpkM37cfaLyF09qwdeBvtnmBn+Kk8eP48GD+zAyNkbTps0wwm80XDu5AQBu3bqJVcuX4fTpBOTm5sLWxhbDRoxEv/e8xD4yMzIQ+tlCHDkcIy56P3PWHC56XwNI+TrQJtN+U1vf1758V2191wTVNhl9FUxGiWouJqNENReTUc0k+W36oqIirFixAtu3b0dqamqptUX/PVGdiIiIqKpxzqh0JF/aaf78+Vi+fDkGDx6MzMxMBAYGYuDAgdDS0sK8efOkHh4RERFpAJlMfRs9n+TJ6JYtW/DNN99g6tSp0NHRwZAhQ/Dtt98iJCQEx48fl3p4RERERKRGkiejSqUSTk5PX+tmZGSEzMxMAEDfvn2xd+9eKYdGREREGoKL3ktH8mS0QYMGuHfv6esWGzdujAMHDgAA4uPjS60fSkREREQ1i+TJ6IABA3Dw4NP1ICdMmIDg4GA4ODhg2LBhGDlypMSjIyIiIk3AOaPSkfxp+s8//1z878GDB8PW1hZxcXFwcHBAv379JBwZEREREamb5Mnov7m6usLV1VXqYRAREZEG0dJiCVMqkiSjv/76a4Vj33vvPTWOhIiIiIikJEky6uXlVaE4mUyGoqIi9Q6GiIiINB7ndkpHkmS0uLhYitMSERERlYlLMElH8qfpiYiIiEhzSZaMxsTEwNHREVlZWaXaMjMz0bJlS8TGxkowMiIiItI0XNpJOpIloytXrsTo0aNhYmJSqs3U1BRjx47FihUrJBgZEREREb0ukiWjZ86cQe/evctt79WrFxISEl7jiIiIiEhT8XWg0pEsGU1LS4Ourm657To6Onjw4MFrHBERERERvW6SJaP169fHuXPnym1PTk5GvXr1XuOIiIiISFOxMiodyZJRT09PBAcHIy8vr1Tb48ePMXfuXPTt21eCkRERERHR6yLZ60DnzJmDX375BU2bNkVAQACaNWsGALh06RLWrl2LoqIizJ49W6rhERERkQZhAVM6kiWjVlZWOHbsGMaPH49Zs2ZBEAQAT8vkHh4eWLt2LaysrKQaHhEREWkQ3k6XjmTJKADY2dlh3759ePToEa5duwZBEODg4IDatWtLOSwiIiIiek0kTUZL1K5dGx06dJB6GERERKShWBiVDl8HSkRERESSqRaVUSIiIiIpcc6odFgZJSIiIiLJsDJKREREGo+FUemwMkpEREREkmFllIiIiDQe54xKh5VRIiIiomokNjYW/fr1g0KhgEwmQ2RkpNhWWFiIoKAgODk5wdDQEAqFAsOGDcPdu3dV+khPT4ePjw9MTExgZmYGPz8/ZGdnq8QkJyejS5cu0NfXh42NDZYsWVJqLBEREWjevDn09fXh5OSEffv2qbQLgoCQkBDUq1cPBgYGcHd3x9WrVyt1vUxGiYiISOPJZOrbKisnJwetW7fG2rVrS7Xl5uYiMTERwcHBSExMxC+//ILLly/jvffeU4nz8fHB+fPnER0djT179iA2NhZjxowR27OystCrVy/Y2dkhISEBS5cuxbx587BhwwYx5tixYxgyZAj8/Pxw+vRpeHl5wcvLC+fOnRNjlixZgtWrVyMsLAwnTpyAoaEhPDw8kJeXV+HrlQkl7+GsQQzaBEg9BCJSk0fxa6QeAhGpib6EkwddQo+ore8Ts7q99LEymQw7d+6El5dXuTHx8fF4++23cevWLdja2uLixYtwdHREfHw82rdvDwCIioqCp6cnbt++DYVCgfXr12P27NlQKpXQ09MDAMycORORkZG4dOkSAGDw4MHIycnBnj17xHN17NgRzs7OCAsLgyAIUCgUmDp1KqZNmwYAyMzMhJWVFTZu3Ahvb+8KXSMro0RERERqlJ+fj6ysLJUtPz+/yvrPzMyETCaDmZkZACAuLg5mZmZiIgoA7u7u0NLSwokTJ8SYrl27iokoAHh4eODy5ct49OiRGOPu7q5yLg8PD8TFxQEAUlJSoFQqVWJMTU3h4uIixlQEk1EiIiLSeOq8TR8aGgpTU1OVLTQ0tErGnZeXh6CgIAwZMgQmJiYAAKVSCUtLS5U4HR0dmJubQ6lUijFWVlYqMSWfXxTzbPuzx5UVUxF8mp6IiIhIjWbNmoXAwECVfXK5/JX7LSwsxIcffghBELB+/fpX7k8qTEaJiIhI46lzaSe5XF4lyeezShLRW7duISYmRqyKAoC1tTXu37+vEv/kyROkp6fD2tpajElLS1OJKfn8ophn20v21atXTyXG2dm5wtfC2/REREREb5CSRPTq1av4/fffUadOHZV2V1dXZGRkICEhQdwXExOD4uJiuLi4iDGxsbEoLCwUY6Kjo9GsWTPUrl1bjDl48KBK39HR0XB1dQUA2Nvbw9raWiUmKysLJ06cEGMqgskoERERabzqtLRTdnY2kpKSkJSUBODpg0JJSUlITU1FYWEh3n//fZw6dQpbtmxBUVERlEollEolCgoKAAAtWrRA7969MXr0aJw8eRJ//vknAgIC4O3tDYVCAQAYOnQo9PT04Ofnh/Pnz2Pbtm1YtWqVynSCSZMmISoqCsuWLcOlS5cwb948nDp1CgEBAf//nckwefJkLFq0CL/++ivOnj2LYcOGQaFQPPfp/1LfPZd2IqI3CZd2Iqq5pFzaqdOSWLX1fWxG10rFHz58GD169Ci139fXF/PmzYO9vX2Zxx06dAjdu3cH8HTR+4CAAOzevRtaWloYNGgQVq9eDSMjIzE+OTkZ/v7+iI+PR926dTFhwgQEBQWp9BkREYE5c+bg5s2bcHBwwJIlS+Dp6Sm2C4KAuXPnYsOGDcjIyEDnzp2xbt06NG3atMLXy2SUiN4oTEaJai4pk1G3pX+ore8/p3dRW981AR9gIiIiIo3HV9NLh3NGiYiIiEgyrIwSERGRxlPn0k70fKyMEhEREZFkWBklIiIijcfKqHRYGSUiIiIiybAySkRERBqPhVHpsDJKRERERJJhZZSIiIg0HueMSofJKBEREWk85qLS4W16IiIiIpIMK6NERESk8XibXjqsjBIRERGRZFgZJSIiIo3Hwqh0WBklIiIiIsmwMkpEREQaT4ulUcmwMkpEREREkmFllIiIiDQeC6PSYTJKREREGo9LO0mHt+mJiIiISDKsjBIREZHG02JhVDKsjBIRERGRZFgZJSIiIo3HOaPSYWWUiIiIiCTDyigRERFpPBZGpcPKKBERERFJhpVRIiIi0ngysDQqFSajREREpPG4tJN0eJueiIiIiCTDyigRERFpPC7tJB1WRomIiIhIMqyMEhERkcZjYVQ6rIwSERERkWRYGSUiIiKNp8XSqGRYGSUiIiIiybAySkRERBqPhVHpMBklIiIijcelnaRToWQ0OTm5wh2+9dZbLz0YIiIiItIsFUpGnZ2dIZPJIAhCme0lbTKZDEVFRVU6QCIiIiJ1Y2FUOhVKRlNSUtQ9DiIiIiLSQBVKRu3s7NQ9DiIiIiLJcGkn6bzU0k6bN2+Gm5sbFAoFbt26BQBYuXIldu3aVaWDIyIiItI0sbGx6NevHxQKBWQyGSIjI1XaBUFASEgI6tWrBwMDA7i7u+Pq1asqMenp6fDx8YGJiQnMzMzg5+eH7OxslZjk5GR06dIF+vr6sLGxwZIlS0qNJSIiAs2bN4e+vj6cnJywb9++So/lRSqdjK5fvx6BgYHw9PRERkaGOEfUzMwMK1eurGx3RERERJKTqXGrrJycHLRu3Rpr164ts33JkiVYvXo1wsLCcOLECRgaGsLDwwN5eXlijI+PD86fP4/o6Gjs2bMHsbGxGDNmjNielZWFXr16wc7ODgkJCVi6dCnmzZuHDRs2iDHHjh3DkCFD4Ofnh9OnT8PLywteXl44d+5cpcbyIjKhvKeSyuHo6IjFixfDy8sLxsbGOHPmDBo1aoRz586he/fuePjwYWW6UwuDNgFSD4GI1ORR/Bqph0BEaqIv4YKT3ptOq63vn3zbvPSxMpkMO3fuhJeXF4CnlUiFQoGpU6di2rRpAIDMzExYWVlh48aN8Pb2xsWLF+Ho6Ij4+Hi0b98eABAVFQVPT0/cvn0bCoUC69evx+zZs6FUKqGnpwcAmDlzJiIjI3Hp0iUAwODBg5GTk4M9e/aI4+nYsSOcnZ0RFhZWobFURKUroykpKWjTpvSXKpfLkZOTU9nuiIiIiCQnk8nUtuXn5yMrK0tly8/Pf6lxpqSkQKlUwt3dXdxnamoKFxcXxMXFAQDi4uJgZmYmJqIA4O7uDi0tLZw4cUKM6dq1q5iIAoCHhwcuX76MR48eiTHPnqckpuQ8FRlLRVQ6GbW3t0dSUlKp/VFRUWjRokVluyMiIiKSnJZMfVtoaChMTU1VttDQ0Jcap1KpBABYWVmp7LeyshLblEolLC0tVdp1dHRgbm6uElNWH8+eo7yYZ9tfNJaKqHRBPDAwEP7+/sjLy4MgCDh58iS2bt2K0NBQfPvtt5XtjoiIiKhGmzVrFgIDA1X2yeVyiUZT/VQ6GR01ahQMDAwwZ84c5ObmYujQoVAoFFi1alWF5wYQERERVSfqfB2oXC6vsuTT2toaAJCWloZ69eqJ+9PS0uDs7CzG3L9/X+W4J0+eID09XTze2toaaWlpKjEln18U82z7i8ZSES+1tJOPjw+uXr2K7OxsKJVK3L59G35+fi/TFRERERFVkL29PaytrXHw4EFxX1ZWFk6cOAFXV1cAgKurKzIyMpCQkCDGxMTEoLi4GC4uLmJMbGwsCgsLxZjo6Gg0a9YMtWvXFmOePU9JTMl5KjKWinjp59bu37+Py5cvA3j6rwkLC4uX7YqIiIhIUtVpzfvs7Gxcu3ZN/JySkoKkpCSYm5vD1tYWkydPxqJFi+Dg4AB7e3sEBwdDoVCIT9y3aNECvXv3xujRoxEWFobCwkIEBATA29sbCoUCADB06FDMnz8ffn5+CAoKwrlz57Bq1SqsWLFCPO+kSZPQrVs3LFu2DH369MFPP/2EU6dOics/yWSyF46lIiqdjP7zzz/45JNPsHXrVhQXFwMAtLW1MXjwYKxduxampqaV7ZKIiIiI/t+pU6fQo0cP8XPJfFNfX19s3LgRM2bMQE5ODsaMGYOMjAx07twZUVFR0NfXF4/ZsmULAgIC0LNnT2hpaWHQoEFYvXq12G5qaooDBw7A398f7dq1Q926dRESEqKyFmmnTp0QHh6OOXPm4NNPP4WDgwMiIyPRqlUrMaYiY3mRSq8zOnjwYJw+fRpfffWVWIKNi4vDpEmT4OzsjJ9++qky3akF1xklqrm4zihRzSXlOqPDwpPV1vd/h76ltr5rgkr/se/Zswf79+9H586dxX0eHh745ptv0Lt37yodHBERERHVbJVORuvUqVPmrXhTU1NxwisRERHRm0SrGs0Z1TSVfpp+zpw5CAwMVFnMVKlUYvr06QgODq7SwRERERG9Dup8AxM9X4Uqo23atFH5Mq9evQpbW1vY2toCAFJTUyGXy/HgwQOMHTtWPSMlIiIiohqnQsloZR7PJyIiInrTsH4pnQolo3PnzlX3OIiIiIhIA0m4iAIRERFR9aDFuZ2SqXQyWlRUhBUrVmD79u1ITU1FQUGBSnt6enqVDY6IiIiIarZKP00/f/58LF++HIMHD0ZmZiYCAwMxcOBAaGlpYd68eWoYIhEREZF6yWTq2+j5Kp2MbtmyBd988w2mTp0KHR0dDBkyBN9++y1CQkJw/PhxdYyRiIiIiGqoSiejSqUSTk5OAAAjIyNkZmYCAPr27Yu9e/dW7eiIiIiIXgOuMyqdSiejDRo0wL179wAAjRs3xoEDBwAA8fHxkMvlVTs6IiIiIqrRKp2MDhgwAAcPHgQATJgwAcHBwXBwcMCwYcMwcuTIKh8gERERkbpxzqh0Kv00/eeffy7+9+DBg2FnZ4djx47BwcEB/fr1q9LBEREREb0OXNpJOpWujP5bx44dERgYCBcXFyxevLgqxkREREREGuKVk9ES9+7dQ3BwcFV1R0RERPTa8Da9dKosGSUiIiIiqiy+DpSIiIg0Hpdgkg4ro0REREQkmQpXRgMDA5/b/uDBg1ceTFVJObxC6iEQkZoUFhVLPQQiUhN9HelqZKzOSafCyejp06dfGNO1a9dXGgwRERERaZYKJ6OHDh1S5ziIiIiIJMM5o9LhA0xERESk8bSYi0qGUySIiIiISDKsjBIREZHGY2VUOqyMEhEREZFkWBklIiIijccHmKTzUpXRP/74Ax999BFcXV1x584dAMDmzZtx9OjRKh0cEREREdVslU5Gd+zYAQ8PDxgYGOD06dPIz88HAGRmZmLx4sVVPkAiIiIiddOSqW+j56t0Mrpo0SKEhYXhm2++ga6urrjfzc0NiYmJVTo4IiIiIqrZKj1n9PLly2W+acnU1BQZGRlVMSYiIiKi14pTRqVT6cqotbU1rl27Vmr/0aNH0ahRoyoZFBEREdHrpCWTqW2j56t0Mjp69GhMmjQJJ06cgEwmw927d7FlyxZMmzYN48ePV8cYiYiIiKiGqvRt+pkzZ6K4uBg9e/ZEbm4uunbtCrlcjmnTpmHChAnqGCMRERGRWnHhdenIBEEQXubAgoICXLt2DdnZ2XB0dISRkVFVj+2lKTMLpR4CEamJob621EMgIjUxlkuXEn6674ra+l7s2VRtfdcEL73ovZ6eHhwdHatyLERERESS4NRO6VQ6Ge3Ro8dz31IQExPzSgMiIiIiIs1R6WTU2dlZ5XNhYSGSkpJw7tw5+Pr6VtW4iIiIiF4bPvUunUonoytWrChz/7x585Cdnf3KAyIiIiIizVFlM4U/+ugjfP/991XVHREREdFrI5Opb6Pnq7JkNC4uDvr6+lXVHREREdFrU13eTV9UVITg4GDY29vDwMAAjRs3xsKFC/Hs4keCICAkJAT16tWDgYEB3N3dcfXqVZV+0tPT4ePjAxMTE5iZmcHPz6/UHezk5GR06dIF+vr6sLGxwZIlS0qNJyIiAs2bN4e+vj6cnJywb9++yl1QBVT6Nv3AgQNVPguCgHv37uHUqVMIDg6usoERERERaZovvvgC69evx6ZNm9CyZUucOnUKI0aMgKmpKSZOnAgAWLJkCVavXo1NmzbB3t4ewcHB8PDwwIULF8TCoI+PD+7du4fo6GgUFhZixIgRGDNmDMLDwwEAWVlZ6NWrF9zd3REWFoazZ89i5MiRMDMzw5gxYwAAx44dw5AhQxAaGoq+ffsiPDwcXl5eSExMRKtWrarsmiu9zuiIESNUPmtpacHCwgL/+c9/0KtXryob2KvgOqNENRfXGSWquaRcZ3RBdOlXnVeVkHeaVDi2b9++sLKywnfffSfuGzRoEAwMDPDjjz9CEAQoFApMnToV06ZNAwBkZmbCysoKGzduhLe3Ny5evAhHR0fEx8ejffv2AICoqCh4enri9u3bUCgUWL9+PWbPng2lUgk9PT0AT19sFBkZiUuXLgEABg8ejJycHOzZs0ccS8eOHeHs7IywsLBX/l5KVKoyWlRUhBEjRsDJyQm1a9euskEQERER1VT5+fnIz89X2SeXyyGXy0vFdurUCRs2bMCVK1fQtGlTnDlzBkePHsXy5csBACkpKVAqlXB3dxePMTU1hYuLC+Li4uDt7Y24uDiYmZmJiSgAuLu7Q0tLCydOnMCAAQMQFxeHrl27iokoAHh4eOCLL77Ao0ePULt2bcTFxSEwMFBlfB4eHoiMjKyKr0VUqX+CaGtro1evXsjIyKjSQRARERFJSZ0PMIWGhsLU1FRlCw0NLXMcM2fOhLe3N5o3bw5dXV20adMGkydPho+PDwBAqVQCAKysrFSOs7KyEtuUSiUsLS1V2nV0dGBubq4SU1Yfz56jvJiS9qpS6TmjrVq1wo0bN2Bvb1+lAyEiIiKqiWbNmlWqwlhWVRQAtm/fji1btiA8PBwtW7ZEUlISJk+eDIVCUWPXc690Mrpo0SJMmzYNCxcuRLt27WBoaKjSbmJiUmWDIyIiInodKvvUe2WUd0u+LNOnTxerowDg5OSEW7duITQ0FL6+vrC2tgYApKWloV69euJxaWlp4ouJrK2tcf/+fZV+nzx5gvT0dPF4a2trpKWlqcSUfH5RTEl7VanwbfoFCxYgJycHnp6eOHPmDN577z00aNAAtWvXRu3atWFmZsZ5pERERESvIDc3F1paqumZtrY2iouLAQD29vawtrbGwYMHxfasrCycOHECrq6uAABXV1dkZGQgISFBjImJiUFxcTFcXFzEmNjYWBQW/u+h7+joaDRr1kzM51xdXVXOUxJTcp6qUuHK6Pz58zFu3DgcOnSoSgdAREREJDUZqsfq9P369cNnn30GW1tbtGzZEqdPn8by5csxcuRIAIBMJsPkyZOxaNEiODg4iEs7KRQKeHl5AQBatGiB3r17Y/To0QgLC0NhYSECAgLg7e0NhUIBABg6dCjmz58PPz8/BAUF4dy5c1i1apXKmzYnTZqEbt26YdmyZejTpw9++uknnDp1Chs2bKjSa67w0k5aWlplToitjri0E1HNxaWdiGouKZd2+jzmutr6nvmfxhWO/eeffxAcHIydO3fi/v37UCgUGDJkCEJCQsQn3wVBwNy5c7FhwwZkZGSgc+fOWLduHZo2bSr2k56ejoCAAOzevRtaWloYNGgQVq9eDSMjIzEmOTkZ/v7+iI+PR926dTFhwgQEBQWpjCciIgJz5szBzZs34eDggCVLlsDT0/MVvxFVlUpG09LSYGFhUaUDUAcmo0Q1F5NRopqLyahmqtQDTE2bNoXsBS9ZTU9Pf6UBEREREb1u6nyAiZ6vUsno/PnzYWpqqq6xEBEREZGGqVQy6u3t/UbMGSUiIiKqjBfd+SX1qfDkDP4hEREREVFVq3BltILPORERERG9cThnVDoVTkZLFlslIiIiIqoqlX4dKBEREVFNw9mI0mEySkRERBpPi9moZKRbXZaIiIiINB4ro0RERKTx+ACTdFgZJSIiIiLJsDJKREREGo9TRqXDyigRERERSYaVUSIiItJ4WmBpVCqsjBIRERGRZFgZJSIiIo3HOaPSYTJKREREGo9LO0mHt+mJiIiISDKsjBIREZHG4+tApcPKKBERERFJhpVRIiIi0ngsjEqHlVEiIiIikgwro0RERKTxOGdUOqyMEhEREZFkWBklIiIijcfCqHSYjBIREZHG461i6fC7JyIiIiLJsDJKREREGk/G+/SSYWWUiIiIiCTDyigRERFpPNZFpcPKKBERERFJhpVRIiIi0nhc9F46rIwSERERkWRYGSUiIiKNx7qodJiMEhERkcbjXXrp8DY9EREREUmGlVEiIiLSeFz0XjqsjBIRERGRZFgZJSIiIo3H6px0+N0TERERkWRYGSUiIiKNxzmj0mFllIiIiKgauXPnDj766CPUqVMHBgYGcHJywqlTp8R2QRAQEhKCevXqwcDAAO7u7rh69apKH+np6fDx8YGJiQnMzMzg5+eH7OxslZjk5GR06dIF+vr6sLGxwZIlS0qNJSIiAs2bN4e+vj6cnJywb9++Kr9eJqNERESk8WRq3Crj0aNHcHNzg66uLn777TdcuHABy5YtQ+3atcWYJUuWYPXq1QgLC8OJEydgaGgIDw8P5OXliTE+Pj44f/48oqOjsWfPHsTGxmLMmDFie1ZWFnr16gU7OzskJCRg6dKlmDdvHjZs2CDGHDt2DEOGDIGfnx9Onz4NLy8veHl54dy5c5W8queTCYIgVGmP1YAys1DqIRCRmhjqa0s9BCJSE2O5dDWyiKS7auv7A2dFhWNnzpyJP//8E3/88UeZ7YIgQKFQYOrUqZg2bRoAIDMzE1ZWVti4cSO8vb1x8eJFODo6Ij4+Hu3btwcAREVFwdPTE7dv34ZCocD69esxe/ZsKJVK6OnpieeOjIzEpUuXAACDBw9GTk4O9uzZI56/Y8eOcHZ2RlhY2Et9F2VhZZSIiIg0nkwmU9uWn5+PrKwslS0/P7/Mcfz6669o3749PvjgA1haWqJNmzb45ptvxPaUlBQolUq4u7uL+0xNTeHi4oK4uDgAQFxcHMzMzMREFADc3d2hpaWFEydOiDFdu3YVE1EA8PDwwOXLl/Ho0SMx5tnzlMSUnKeqMBklIiIijaelxi00NBSmpqYqW2hoaJnjuHHjBtavXw8HBwfs378f48ePx8SJE7Fp0yYAgFKpBABYWVmpHGdlZSW2KZVKWFpaqrTr6OjA3NxcJaasPp49R3kxJe1VhU/TExEREanRrFmzEBgYqLJPLpeXGVtcXIz27dtj8eLFAIA2bdrg3LlzCAsLg6+vr9rHKgVWRomIiEjjqfM2vVwuh4mJicpWXjJar149ODo6quxr0aIFUlNTAQDW1tYAgLS0NJWYtLQ0sc3a2hr3799XaX/y5AnS09NVYsrq49lzlBdT0l5VmIwSERERVRNubm64fPmyyr4rV67Azs4OAGBvbw9ra2scPHhQbM/KysKJEyfg6uoKAHB1dUVGRgYSEhLEmJiYGBQXF8PFxUWMiY2NRWHh/x76jo6ORrNmzcQn911dXVXOUxJTcp6qwmSUiIiINF51WdppypQpOH78OBYvXoxr164hPDwcGzZsgL+//9NxymSYPHkyFi1ahF9//RVnz57FsGHDoFAo4OXlBeBpJbV3794YPXo0Tp48iT///BMBAQHw9vaGQvH0yf6hQ4dCT08Pfn5+OH/+PLZt24ZVq1apTCeYNGkSoqKisGzZMly6dAnz5s3DqVOnEBAQUMmrej4u7UREbxQu7URUc0m5tFNkctU+lPMsr7cqd1t7z549mDVrFq5evQp7e3sEBgZi9OjRYrsgCJg7dy42bNiAjIwMdO7cGevWrUPTpk3FmPT0dAQEBGD37t3Q0tLCoEGDsHr1ahgZGYkxycnJ8Pf3R3x8POrWrYsJEyYgKChIZSwRERGYM2cObt68CQcHByxZsgSenp4v+U2UjckoEb1RmIwS1VxSJqO7zqovGe3vVLVzLGsa3qYnIiIiIslwaSciIiLSeFqVnt1JVYXJKBEREWk8GXNRyfA2PRERERFJhpVRIiIi0ngy3qaXDCujRERERCQZVkaJiIhI43HOqHRYGSUiIiIiybAySkRERBqPSztJp9pWRtPS0rBgwQKph0FEREREalRtk1GlUon58+dLPQwiIiLSADKZ+jZ6Pslu0ycnJz+3/fLly69pJERERKTpmDRKR7Jk1NnZGTKZDIIglGor2S/jTwYRERFRjSZZMmpubo4lS5agZ8+eZbafP38e/fr1e82jIiIiIk3ERe+lI1ky2q5dO9y9exd2dnZltmdkZJRZNSUiIiKimkOyZHTcuHHIyckpt93W1hY//PDDaxwRERERaSotFkYlIxNqYPlRmVko9RCISE0M9bWlHgIRqYmxXLpFfg5eeqi2vns2r6u2vmsCLnpPREREGo9zRqVTbdcZJSIiIqKaj5VRIiIi0nhcTVI6TEaJiIhI4/E2vXR4m56IiIiIJCN5MhoVFYWjR4+Kn9euXQtnZ2cMHToUjx49knBkREREpCm0ZOrb6PkkT0anT5+OrKwsAMDZs2cxdepUeHp6IiUlBYGBgRKPjoiIiIjUSfI5oykpKXB0dAQA7NixA3379sXixYuRmJgIT09PiUdHREREmoBzRqUjeWVUT08Pubm5AIDff/8dvXr1AvD03fUlFVMiIiIiqpkkr4x27twZgYGBcHNzw8mTJ7Ft2zYAwJUrV9CgQQOJR0fqcCbxFLb++AOuXLqAvx8+wKIlq9Cle0+xPXT+bETt3aVyzNsd3bB09dcAgHt37+C/34Uh8dRJpKc/RN26Fnjn3b74eMRY6OrqAgDy8/Ox/PMFuHzpAlJv3oCrWzd89uVqlT5PJ5zE5PEjS43vl32HUacu35ZBVNU2fvcN1qxajiE+H2Nq0KcAgIcPH2DV8qU4GReHnJwc2DVsiJGjx6HnO73E427dTMGq5V/iTFIinhQWoknTZhjvPxHt33YBAGRkPELwzBm4evUyMjMyYG5eB117/Af+E6fAyMhIkmulNw+XdpKO5MnomjVr8Mknn+Dnn3/G+vXrUb9+fQDAb7/9ht69e0s8OlKHx3mP0cShGTz7DUBw0OQyY9527YyZwYvEz3p6uuJ/p95KQbEgYNqsENS3sUXK9WtYungu8h4/xieTpgMAiouLoCeXY9BgH8TGRD93PD9G7EEtw//9hVXb3PwVro6IynL+3Fn8ErENDk2bqeyfO3sm/vnnHyxbvRZmtWsjat8ezJo+Bf/dGoHmLZ5O4ZoyYTxsbO0Q9u1GyOVybP3xv5gcMB6R+/ajbl0LaGlpoVuP/2D8hEmoXbs2/kpNxReLFyIrMxOfffGlFJdLRJUgeTJqa2uLPXv2lNq/YsUKCUZDr0PHTl3QsVOX58bo6eqVW510ce0MF9fO4mdFfRuk3krBrh3bxWTUwKAWps4MAQCcO3Ma2f/8U+65zMzNYWxsUtnLIKIKys3NQfCs6Zg9bwG+2xCm0paclISZc0LQyuktAMCoMeOxdfMmXLpwHs1bOCLj0SOk3rqF4HmLxEQ2YPJURGzbiuvXrqJuXQuYmJji/cFDxD7rKerjg8FDsHnj96/vIumNx8KodCSfM5qYmIizZ8+Kn3ft2gUvLy98+umnKCgokHBkJKWkxHj09+iKj97vi2WfL0BmRsZz43Oys2Fi8nIJ5aiP3seAd7sjMGAUzp5JfKk+iKh8X3y2EG5dusGlY6dSbW85OyN6/2/IzMxAcXEx9v+2F/n5BWjX4W0AgKmZGewa2mPv7l14nJuLJ0+e4JeIbTA3r4MWji3LPN+D+/cRczAabdt3UOt1Uc2iJZOpbaPnk7wyOnbsWMycORNOTk64ceMGvL29MWDAAERERCA3NxcrV6587vH5+fnIz8//1z4tyOVyNY6a1OltVzd07eEOa0V93L39F75ZvwozJo/Duu+2QFtbu1T87b9S8cv2cIyfNK1S56lT1wJTZ4agWYuWKCwswJ5dOzBp3EiE/RCOps0dq+pyiDTa/t/24tLFC/jv1ogy2z9fugKzZgSiZxdXaOvoQF9fH1+u/Ao2tnYAAJlMhnUbvse0yQHo6toeWlpaqG1ujtXrN8DExFSlr09nTMWRwzHIz8tDl249MGfeQrVfHxG9Oskro1euXIGzszMAICIiAl27dkV4eDg2btyIHTt2vPD40NBQmJqaqmxfLf9CzaMmderZyxNuXXugcZOm6NK9Jz5fvhaXLpxDUkJ8qdgH99MwY9JYdO/ZC/283q/UeWzt7PHewA/RrEVLtHqrDWYGL0Krt5yxfet/q+pSiDSaUnkPy74IxaLPl5ZbIFi/djX+yfoH6zZ8j81bI+Dz8XDMnD4F165cAQAIgoAvFi9EbXNzfLPxR2zasg3de/RE4IRP8PDBfZW+AmfMxJZtO7Bs1VrcuZ2KFUs/V/s1Us0hU+NGzyd5ZVQQBBQXFwN4urRT3759AQA2NjZ4+PDhC4+fNWtWqcXxH+VJnmNTFVLUt4GpWW3cuZ2Kdm93FPc/fHAfk8ePREsnZ0z7dF6VnKtFy1ZITjpdJX0RabpLF84jPf1vfDR4kLivqKgIpxNOYftP4djx6z5s37oF2375FY2bOAAAmjZrjqTEU9i+LRyfBs9D/InjOBp7GDFHT4hPxs90bIkTx49hz6+7MNxvtNh33boWqFvXAg3tG8HU1BSjhn+EUWPHo66F5eu9cCKqFMmT0fbt22PRokVwd3fHkSNHsH79egBPF8O3srJ64fFyubzUv7hzhUK1jJWkcT9NiazMDNSpayHue3A/DZPHj0TTFo6YGbIIWlpV8w+Qq1cucVknoirSwcUVP+1QXaZtQchs2Nnbw3fEKOQ9zgOAUr+/WtraEP6/SJGXVxKjWl+SybTEQkZZStoKCvj3AVUQS5iSkTwZXblyJXx8fBAZGYnZs2ejSZMmAICff/4ZnTqVnuxOb77c3FzcuZ0qfr539w6uXrkEExNTGJuYYtO369C1xzswr1MXd2//hbA1y1G/gS06dHQD8DQRnTR+BKytFfhk4jRkPHok9vVsInnzxnUUPilEVlYmcnNzcPXKJQCAQ9PmAICIrZtRT1EfDRs1QUFBPvbu2oHTp07iy9UbXsfXQFTjGRoaoolDU5V9+gYGMDM1QxOHpnhSWAgbW1ssXjAXk6bOgJmZGQ7HHMSJuGNYseZpYeKt1s4wNjHB3NmzMHrcJ5DL5Yjc8TPu3rmDzl27AQCO/nEE6X//DceWrVCrliFuXL+KVcu/ROs2baH4/+UCiaj6kgmCIEg9iLLk5eVBW1tbXMS8MpSZ/JdwdVbeYvO9+/RHYFAwZk+fiKtXLiH7nyzUtbBEe5dO8BsbAPM6TxPN3/ZE4vMFc8rs+8jJc+J/D+7fC8p7d8uNCf/v99gTGYEHD+5DX66PRg5N4es3Hm3bv10Vl0lqYqhf+iE2enOMGTkMzZo1Fxe9T711E1+tXI4zpxORm5sLG1tbfOQ7An369RePuXD+HNZ9tRIXz5/DkydP0KhxE4wa+wncunQFAJw6eQLrvlqJGzeuo7CgAFbW1ujR8x0MHzkaxi+5ygZJw1gu3TS7E9cz1da3S2PTFwdpsGqbjL4KJqNENReTUaKai8moZpL8Nn1RURFWrFiB7du3IzU1tdTaounp6RKNjIiIiDQFlwOVjuSPnc+fPx/Lly/H4MGDkZmZicDAQAwcOBBaWlqYN2+e1MMjIiIiDcClnaQj+W36xo0bY/Xq1ejTpw+MjY2RlJQk7jt+/DjCw8Mr3Sdv0xPVXLxNT1RzSXmbPv6G+m7Td2jE2/TPI3llVKlUwsnJCQBgZGSEzMynPwx9+/bF3r17pRwaERERaQqWRiUjeTLaoEED3Lt3D8DTKumBAwcAAPHx8XylJxEREWm0zz//HDKZDJMnTxb35eXlwd/fH3Xq1IGRkREGDRqEtLQ0leNSU1PRp08f1KpVC5aWlpg+fTqePHmiEnP48GG0bdsWcrkcTZo0wcaNG0udf+3atWjYsCH09fXh4uKCkydPVvk1Sp6MDhgwAAcPHgQATJgwAcHBwXBwcMCwYcMwcmTp5X+IiIiIqppMjf97WfHx8fj666/x1ltvqeyfMmUKdu/ejYiICBw5cgR3797FwIEDxfaioiL06dMHBQUFOHbsGDZt2oSNGzciJCREjElJSUGfPn3Qo0cPJCUlYfLkyRg1ahT2798vxmzbtg2BgYGYO3cuEhMT0bp1a3h4eOD+fdVX8b4qyeeM/ltcXBzi4uLg4OCAfv36vVQfnDNKVHNxzihRzSXlnNFTKVlq67u9feXXu83Ozkbbtm2xbt06LFq0CM7Ozli5ciUyMzNhYWGB8PBwvP/++wCAS5cuoUWLFoiLi0PHjh3x22+/oW/fvrh79674NsuwsDAEBQXhwYMH0NPTQ1BQEPbu3Ytz5/63Pre3tzcyMjIQFRUFAHBxcUGHDh2wZs0aAE/fbGZjY4MJEyZg5syZr/q1iCSvjP6bq6srAgMDXzoRJSIiIqosmUx9W35+PrKyslS2/Pz8547H398fffr0gbu7u8r+hIQEFBYWquxv3rw5bG1tERcXB+BpYc/JyUnlteoeHh7IysrC+fPnxZh/9+3h4SH2UVBQgISEBJUYLS0tuLu7izFVRZJ1Rn/99dcKx7733ntqHAkRERGReoWGhmL+/Pkq++bOnVvuEpY//fQTEhMTER8fX6pNqVRCT08PZmZmKvutrKygVCrFmGcT0ZL2krbnxWRlZeHx48d49OgRioqKyoy5dOnS8y+4kiRJRr28vCoUJ5PJUFRUpN7BEBERkcZT50Pvs2bNQmBgoMq+8h7S/uuvvzBp0iRER0dDX19fjaOqPiRJRouLi6U4LREREVHZ1JiNyuXyCq8QlJCQgPv376Nt27bivqKiIsTGxmLNmjXYv38/CgoKkJGRoVIdTUtLg7W1NQDA2tq61FPvJU/bPxvz7yfw09LSYGJiAgMDA2hra0NbW7vMmJI+qkq1mzNKREREpKl69uyJs2fPIikpSdzat28PHx8f8b91dXXFlYgA4PLly0hNTYWrqyuAp8/fnD17VuWp9+joaJiYmMDR0VGMebaPkpiSPvT09NCuXTuVmOLiYhw8eFCMqSqSJaMxMTFwdHREVlbpp9cyMzPRsmVLxMbGSjAyIiIi0jTVZWknY2NjtGrVSmUzNDREnTp10KpVK5iamsLPzw+BgYE4dOgQEhISMGLECLi6uqJjx44AgF69esHR0REff/wxzpw5g/3792POnDnw9/cXK7Tjxo3DjRs3MGPGDFy6dAnr1q3D9u3bMWXKFHEsgYGB+Oabb7Bp0yZcvHgR48ePR05ODkaMGFF1Xzwkuk0PACtXrsTo0aNhYlJ6uQNTU1OMHTsWK1asQNeuXSUYHREREVH1tGLFCmhpaWHQoEHIz8+Hh4cH1q1bJ7Zra2tjz549GD9+PFxdXWFoaAhfX18sWLBAjLG3t8fevXsxZcoUrFq1Cg0aNMC3334LDw8PMWbw4MF48OABQkJCoFQq4ezsjKioqFIPNb0qydYZtbOzQ1RUFFq0aFFm+6VLl9CrVy+kpqZWum+uM0pUc3GdUaKaS8p1RpNS/1Fb3862xmrruyaQ7E89LS0Nurq65bbr6OjgwYMHr3FERERERPS6SZaM1q9fX2XV/39LTk5GvXr1XuOIiIiISFPJ1LjR80mWjHp6eiI4OBh5eXml2h4/foy5c+eib9++EoyMiIiIiF4XyeaMpqWloW3bttDW1kZAQACaNWsG4Olc0bVr16KoqAiJiYkvNUmWc0aJai7OGSWquaScM3rmL/XNGW1twzmjzyNZMgoAt27dwvjx47F//36UDEMmk8HDwwNr166Fvb39S/XLZJSo5mIySlRzSZmMJv+Vrba+37IxUlvfNYGkyWiJR48e4dq1axAEAQ4ODqhdu/Yr9cdklKjmYjJKVHMxGdVMkq0z+qzatWujQ4cOUg+DiIiINJSMTxpJhq8DJSIiIiLJVIvKKBEREZGUWBiVDiujRERERCQZVkaJiIiIWBqVDCujRERERCQZVkaJiIhI48lYGpUMK6NEREREJBlWRomIiEjjcZ1R6TAZJSIiIo3HXFQ6vE1PRERERJJhZZSIiIiIpVHJsDJKRERERJJhZZSIiIg0Hpd2kg4ro0REREQkGVZGiYiISONxaSfpsDJKRERERJJhZZSIiIg0Hguj0mEySkRERMRsVDK8TU9EREREkmFllIiIiDQel3aSDiujRERERCQZVkaJiIhI43FpJ+mwMkpEREREkmFllIiIiDQeC6PSYWWUiIiIiCTDyigRERERS6OSYTJKREREGo9LO0mHt+mJiIiISDKsjBIREZHG49JO0mFllIiIiIgkw8ooERERaTwWRqXDyigRERERSYaVUSIiIiKWRiXDyigRERFRNREaGooOHTrA2NgYlpaW8PLywuXLl1Vi8vLy4O/vjzp16sDIyAiDBg1CWlqaSkxqair69OmDWrVqwdLSEtOnT8eTJ09UYg4fPoy2bdtCLpejSZMm2LhxY6nxrF27Fg0bNoS+vj5cXFxw8uTJKr9mJqNERESk8WRq/F9lHDlyBP7+/jh+/Diio6NRWFiIXr16IScnR4yZMmUKdu/ejYiICBw5cgR3797FwIEDxfaioiL06dMHBQUFOHbsGDZt2oSNGzciJCREjElJSUGfPn3Qo0cPJCUlYfLkyRg1ahT2798vxmzbtg2BgYGYO3cuEhMT0bp1a3h4eOD+/fuv8E2XJhMEQajSHqsBZWah1EMgIjUx1NeWeghEpCbGculqZKnp+Wrr29Zc/tLHPnjwAJaWljhy5Ai6du2KzMxMWFhYIDw8HO+//z4A4NKlS2jRogXi4uLQsWNH/Pbbb+jbty/u3r0LKysrAEBYWBiCgoLw4MED6OnpISgoCHv37sW5c+fEc3l7eyMjIwNRUVEAABcXF3To0AFr1qwBABQXF8PGxgYTJkzAzJkzX/qa/o2VUSIiIiI1ys/PR1ZWlsqWn1+x5DczMxMAYG5uDgBISEhAYWEh3N3dxZjmzZvD1tYWcXFxAIC4uDg4OTmJiSgAeHh4ICsrC+fPnxdjnu2jJKakj4KCAiQkJKjEaGlpwd3dXYypKkxGiYiISOPJ1LiFhobC1NRUZQsNDX3hmIqLizF58mS4ubmhVatWAAClUgk9PT2YmZmpxFpZWUGpVIoxzyaiJe0lbc+LycrKwuPHj/Hw4UMUFRWVGVPSR1Xh0/REREREajRr1iwEBgaq7JPLX3zr3t/fH+fOncPRo0fVNbRqgckoERERaTx1vg5ULpdXKPl8VkBAAPbs2YPY2Fg0aNBA3G9tbY2CggJkZGSoVEfT0tJgbW0txvz7qfeSp+2fjfn3E/hpaWkwMTGBgYEBtLW1oa2tXWZMSR9VhbfpiYiIiKoJQRAQEBCAnTt3IiYmBvb29irt7dq1g66uLg4ePCjuu3z5MlJTU+Hq6goAcHV1xdmzZ1Weeo+OjoaJiQkcHR3FmGf7KIkp6UNPTw/t2rVTiSkuLsbBgwfFmKrCyigRERFRNVn13t/fH+Hh4di1axeMjY3F+ZmmpqYwMDCAqakp/Pz8EBgYCHNzc5iYmGDChAlwdXVFx44dAQC9evWCo6MjPv74YyxZsgRKpRJz5syBv7+/WKEdN24c1qxZgxkzZmDkyJGIiYnB9u3bsXfvXnEsgYGB8PX1Rfv27fH2229j5cqVyMnJwYgRI6r0mrm0ExG9Ubi0E1HNJeXSTrcfFait7wa19SocKytnvsAPP/yA4cOHA3i66P3UqVOxdetW5Ofnw8PDA+vWrVO5fX7r1i2MHz8ehw8fhqGhIXx9ffH5559DR+d/dcjDhw9jypQpuHDhAho0aIDg4GDxHCXWrFmDpUuXQqlUwtnZGatXr4aLi0vFL74i18xklIjeJExGiWouKZPROxnqS0brm1U8GdVEvE1PREREGq963KTXTHyAiYiIiIgkw8ooERERaTx1Lu1Ez8fKKBERERFJhpVRIiIi0ngyzhqVDCujRERERCQZVkaJiIiIWBiVDCujRERERCQZVkaJiIhI47EwKh0mo0RERKTxuLSTdHibnoiIiIgkw8ooERERaTwu7SQdVkaJiIiISDKsjBIRERGxMCoZVkaJiIiISDKsjBIREZHGY2FUOqyMEhEREZFkWBklIiIijcd1RqXDZJSIiIg0Hpd2kg5v0xMRERGRZFgZJSIiIo3H2/TSYWWUiIiIiCTDZJSIiIiIJMNklIiIiIgkwzmjREREpPE4Z1Q6rIwSERERkWRYGSUiIiKNx3VGpcNklIiIiDQeb9NLh7fpiYiIiEgyrIwSERGRxmNhVDqsjBIRERGRZFgZJSIiImJpVDKsjBIRERGRZFgZJSIiIo3HpZ2kw8ooEREREUmGlVEiIiLSeFxnVDqsjBIRERGRZFgZJSIiIo3Hwqh0mIwSERERMRuVDG/TExEREZFkWBklIiIijcelnaTDyigRERERSYaVUSIiItJ4XNpJOqyMEhEREZFkZIIgCFIPguhl5efnIzQ0FLNmzYJcLpd6OERUhfj7TaQZmIzSGy0rKwumpqbIzMyEiYmJ1MMhoirE328izcDb9EREREQkGSajRERERCQZJqNEREREJBkmo/RGk8vlmDt3Lh9uIKqB+PtNpBn4ABMRERERSYaVUSIiIiKSDJNRIiIiIpIMk1EiIiIikgyTUao2ZDIZIiMjpR4GEakBf7+JqDxMRum1UCqVmDBhAho1agS5XA4bGxv069cPBw8elHpoAABBEBASEoJ69erBwMAA7u7uuHr1qtTDInojVPff719++QW9evVCnTp1IJPJkJSUJPWQiOgZTEZJ7W7evIl27dohJiYGS5cuxdmzZxEVFYUePXrA399f6uEBAJYsWYLVq1cjLCwMJ06cgKGhITw8PJCXlyf10IiqtTfh9zsnJwedO3fGF198IfVQiKgsApGavfvuu0L9+vWF7OzsUm2PHj0S/xuAsHPnTvHzjBkzBAcHB8HAwECwt7cX5syZIxQUFIjtSUlJQvfu3QUjIyPB2NhYaNu2rRAfHy8IgiDcvHlT6Nu3r2BmZibUqlVLcHR0FPbu3Vvm+IqLiwVra2th6dKl4r6MjAxBLpcLW7dufcWrJ6rZqvvv97NSUlIEAMLp06df+nqJqOrpSJwLUw2Xnp6OqKgofPbZZzA0NCzVbmZmVu6xxsbG2LhxIxQKBc6ePYvRo0fD2NgYM2bMAAD4+PigTZs2WL9+PbS1tZGUlARdXV0AgL+/PwoKChAbGwtDQ0NcuHABRkZGZZ4nJSUFSqUS7u7u4j5TU1O4uLggLi4O3t7er/ANENVcb8LvNxFVf0xGSa2uXbsGQRDQvHnzSh87Z84c8b8bNmyIadOm4aeffhL/skpNTcX06dPFvh0cHMT41NRUDBo0CE5OTgCARo0alXsepVIJALCyslLZb2VlJbYRUWlvwu83EVV/nDNKaiW8wgu+tm3bBjc3N1hbW8PIyAhz5sxBamqq2B4YGIhRo0bB3d0dn3/+Oa5fvy62TZw4EYsWLYKbmxvmzp2L5OTkV7oOIiqNv99EVBWYjJJaOTg4QCaT4dKlS5U6Li4uDj4+PvD09MSePXtw+vRpzJ49GwUFBWLMvHnzcP78efTp0wcxMTFwdHTEzp07AQCjRo3CjRs38PHHH+Ps2bNo3749vvrqqzLPZW1tDQBIS0tT2Z+Wlia2EVFpb8LvNxG9AaSdskqaoHfv3pV+wOHLL78UGjVqpBLr5+cnmJqalnseb29voV+/fmW2zZw5U3ByciqzreQBpi+//FLcl5mZyQeYiCqguv9+P4sPMBFVT6yMktqtXbsWRUVFePvtt7Fjxw5cvXoVFy9exOrVq+Hq6lrmMQ4ODkhNTcVPP/2E69evY/Xq1WJVBAAeP36MgIAAHD58GLdu3cKff/6J+Ph4tGjRAgAwefJk7N+/HykpKUhMTMShQ4fEtn+TyWSYPHkyFi1ahF9//RVnz57FsGHDoFAo4OXlVeXfB1FNUt1/v4GnD1olJSXhwoULAIDLly8jKSmJc8KJqgups2HSDHfv3hX8/f0FOzs7QU9PT6hfv77w3nvvCYcOHRJj8K+lX6ZPny7UqVNHMDIyEgYPHiysWLFCrJzk5+cL3t7ego2NjaCnpycoFAohICBAePz4sSAIghAQECA0btxYkMvlgoWFhfDxxx8LDx8+LHd8xcXFQnBwsGBlZSXI5XKhZ8+ewuXLl9XxVRDVONX99/uHH34QAJTa5s6dq4Zvg4gqSyYIrzADnYiIiIjoFfA2PRERERFJhskoEREREUmGySgRERERSYbJKBERERFJhskoEREREUmGySgRERERSYbJKBERERFJhskoEREREUmGySgRVZnhw4ervEK1e/fumDx58msfx+HDhyGTyZCRkaG2c/z7Wl/G6xgnEVF1x2SUqIYbPnw4ZDIZZDIZ9PT00KRJEyxYsABPnjxR+7l/+eUXLFy4sEKxrzsxa9iwIVauXPlazkVEROXTkXoARKR+vXv3xg8//ID8/Hzs27cP/v7+0NXVxaxZs0rFFhQUQE9Pr0rOa25uXiX9EBFRzcXKKJEGkMvlsLa2hp2dHcaPHw93d3f8+uuvAP53u/mzzz6DQqFAs2bNAAB//fUXPvzwQ5iZmcHc3Bz9+/fHzZs3xT6LiooQGBgIMzMz1KlTBzNmzIAgCCrn/fdt+vz8fAQFBcHGxgZyuRxNmjTBd999h5s3b6JHjx4AgNq1a0Mmk2H48OEAgOLiYoSGhsLe3h4GBgZo3bo1fv75Z5Xz7Nu3D02bNoWBgQF69OihMs6XUVRUBD8/P/GczZo1w6pVq8qMnT9/PiwsLGBiYoJx48ahoKBAbKvI2ImINB0ro0QayMDAAH///bf4+eDBgzAxMUF0dDQAoLCwEB4eHnB1dcUff/wBHR0dLFq0CL1790ZycjL09PSwbNkybNy4Ed9//z1atGiBZcuWYefOnfjPf/5T7nmHDRuGuLg4rF69Gq1bt0ZKSgoePnwIGxsb7NixA4MGDcLly5dhYmICAwMDAEBoaCh+/PFHhIWFwcHBAbGxsfjoo49gYWGBbt264a+//sLAgQPh7++PMWPG4NSpU5g6deorfT/FxcVo0KABIiIiUKdOHRw7dgxjxoxBvXr18OGHH6p8b/r6+jh8+DBu3ryJESNGoE6dOvjss88qNHYiIgIgEFGN5uvrK/Tv318QBEEoLi4WoqOjBblcLkybNk1st7KyEvLz88VjNm/eLDRr1kwoLi4W9+Xn5wsGBgbC/v37BUEQhHr16glLliwR2wsLC4UGDRqI5xIEQejWrZswadIkQRAE4fLlywIAITo6usxxHjp0SAAgPHr0SNyXl5cn1KpVSzh27JhKrJ+fnzBkyBBBEARh1qxZgqOjo0p7UFBQqb7+zc7OTlixYkW57f/m7+8vDBo0SPzs6+srmJubCzk5OeK+9evXC0ZGRkJRUVGFxl7WNRMRaRpWRok0wJ49e2BkZITCwkIUFxdj6NChmDdvntju5OSkMk/0zJkzuHbtGoyNjVX6ycvLw/Xr15GZmYl79+7BxcVFbNPR0UH79u1L3aovkZSUBG1t7UpVBK9du4bc3Fy88847KvsLCgrQpk0bAMDFixdVxgEArq6uFT5HedauXYvvv/8eqampePz4MQoKCuDs7KwS07p1a9SqVUvlvNnZ2fjrr7+QnZ39wrETERFv0xNphB49emD9+vXQ09ODQqGAjo7qr76hoaHK5+zsbLRr1w5btmwp1ZeFhcVLjaHktntlZGdnAwD27t2L+vXrq7TJ5fKXGkdF/PTTT5g2bRqWLVsGV1dXGBsbY+nSpThx4kSF+5Bq7EREbxomo0QawNDQEE2aNKlwfNu2bbFt2zZYWlrCxMSkzJh69erhxIkT6Nq1KwDgyZMnSEhIQNu2bcuMd3JyQnFxMY4cOQJ3d/dS7SWV2aKiInGfo6Mj5HI5UlNTy62otmjRQnwYq8Tx48dffJHP8eeff6JTp0745JNPxH3Xr18vFXfmzBk8fvxYTLSPHz8OIyMj2NjYwNzc/IVjJyIiPk1PRGXw8fFB3bp10b9/f/zxxx9ISUnB4cOHMXHiRNy+fRsAMGnSJHz++eeIjIzEpUuX8Mknnzx3jdCGDRvC19cXI0eORGRkpNjn9u3bAQB2dnaQyWTYs2cPHjx4gOzsbBgbG2PatGmYMmUKNm3ahOvXryMxMRFfffUVNm3aBAAYN24crl69iunTp+Py5csIDw/Hxo0bK3Sdd+7cQVJSksr26NEjODg44NSpU9i/fz+uXLmC4OBgxMfHlzq+oKAAfn5+uHDhAvbt24e5c+ciICAAWlpaFRo7ERGBDzAR1XTPPsBUmfZ79+4Jw4YNE+rWrSvI5XKhUaNGwujRo4XMzExBEJ4+sDRp0iTBxMREMDMzEwIDA4Vhw4aV+wCTIAjC48ePhSlTpgj16tUT9PT0hCZNmgjff/+92L5gwQLB2tpakMlkgq+vryAITx+6WrlypdCsWTNBV1dXsLCwEDw8PIQjR46Ix+3evVto0qSJIJfLhS5dugjff/99hR5gAlBq27x5s5CXlycMHz5cMDU1FczMzITx48cLM2fOFFq3bl3qewsJCRHq1KkjGBkZCaNHjxby8vLEmBeNnQ8wEREJgkwQynnagIiIiIhIzXibnoiIiIgkw2SUiIiIiCTDZJSIiIiIJMNklIiIiIgkw2SUiIiIiCTDZJSIiIiIJMNklIiIiIgkw2SUiIiIiCTDZJSIiIiIJMNklIiIiIgkw2SUiIiIiCTzf2lr+CTjx1g5AAAAAElFTkSuQmCC\n"},"metadata":{}}],"execution_count":18},{"cell_type":"markdown","source":"# 用集成模型测试test","metadata":{}},{"cell_type":"code","source":"preds_test_cum = np.zeros(preds_test.shape[0])\nfor i in range(len(features)):\n    preds_test_cum += preds_test[:,i]","metadata":{"trusted":true,"execution":{"iopub.status.busy":"2025-09-19T14:31:30.273912Z","iopub.execute_input":"2025-09-19T14:31:30.274532Z","iopub.status.idle":"2025-09-19T14:31:30.423090Z","shell.execute_reply.started":"2025-09-19T14:31:30.274508Z","shell.execute_reply":"2025-09-19T14:31:30.422321Z"}},"outputs":[],"execution_count":19},{"cell_type":"markdown","source":"# 非集成训练，没有预处理的情况下在public上0.89764","metadata":{}},{"cell_type":"code","source":"def train_lightgbm_cv(\n    X, y, n_splits=5, X_test=None,\n    lgb_params=None, early_stopping_rounds=50, verbose=100, random_state=42, use_gpu=False\n):\n    \"\"\"\n    X: pd.DataFrame\n    y: pd.Series or np.array (二分类0/1)\n    X_test: 可选 pd.DataFrame，用于生成平均的测试集预测\n    lgb_params: 可选，覆盖默认参数的dict\n    use_gpu: 布尔值，是否使用 GPU 进行训练\n    返回:\n      models: 每折模型列表\n      oof_pred: OOF概率预测 (与y同长度)\n      fold_aucs: 每折AUC\n      test_pred: 若提供X_test，则返回对test的平均概率预测；否则为None\n    \"\"\"\n    # 保证 y 是一维\n    if isinstance(y, (pd.Series, pd.DataFrame)):\n        y_array = np.asarray(y).ravel()\n    else:\n        y_array = np.array(y).ravel()\n\n    # 默认参数（与你原始代码风格一致）\n    default_params = dict(\n        objective='binary',  # 二分类\n        metric='auc',  # AUC评估\n        num_iterations=9260, #7857\n        learning_rate=0.021486041710411602,  #0.01439208177569179\n        max_depth=-1,# 4\n        num_leaves=7, #3\n        min_child_samples=15,#14\n        min_child_weight=2.8347439982643077, #0.0010600546651740215\n        subsample=0.739143973150885,  #0.7287198574060416\n        colsample_bytree=0.9118372262395729,  #0.7760038290531817 \n        reg_alpha=2.103349141991908e-05,  #0.46861625691160347\n        reg_lambda=0.02735993658949373,  # 4.618569160105155\n        min_split_gain=0.12617128133514482, #0.5318699257329744\n        max_bin=197, #114\n        bagging_freq=2, #1\n        max_delta_step=6, #0\n        feature_fraction=0.9775505543864151 , #0.8931702719994763\n        scale_pos_weight=0.6178343363291716*9,  #  1.0123091516481864\n        random_state=random_state,\n        n_jobs=-1,\n        early_stopping_rounds=early_stopping_rounds,\n        verbose=-1\n    )\n\n    # 根据是否使用 GPU，修改默认参数\n    if use_gpu:\n        default_params.update({\n            'device': 'gpu',  # 启用 GPU\n            'gpu_platform_id': 0,  # 使用第一个 GPU 平台\n            'gpu_device_id': 0  # 使用第一个 GPU 设备\n        })\n    else:\n        # 若使用 CPU，则不需要额外的 GPU 设置\n        default_params.update({\n            'device': 'cpu',  # 强制使用 CPU\n        })\n\n    if lgb_params is not None:\n        default_params.update(lgb_params)\n\n    skf = StratifiedKFold(n_splits=n_splits, shuffle=True, random_state=random_state)\n\n    oof_pred = np.zeros(len(X), dtype=float)\n    test_pred = np.zeros(len(X_test), dtype=float) if X_test is not None else None\n    models = []\n\n    for fold, (train_idx, val_idx) in enumerate(skf.split(X, y_array), 1):\n        print(f\"\\n===== Fold {fold}/{n_splits} =====\")\n\n        # 按行号切片（关键修复点）\n        X_train, X_val = X.iloc[train_idx], X.iloc[val_idx]\n        y_train, y_val = y_array[train_idx], y_array[val_idx]\n\n        # 每折计算类别权重（可缓解不平衡）\n        params_fold = default_params.copy()\n\n        # 使用 LightGBM 模型\n        model = lgb.LGBMClassifier(**params_fold)\n\n        model.fit(\n            X_train, y_train,\n            eval_set=[(X_val, y_val)]\n        )\n\n        # ====== 评估：调用封装的评估函数评估训练集 ======\n        y_pred = model.predict(X_train)\n        y_pred_proba = model.predict_proba(X_train)[:, 1]\n        \n        metrics = evaluate_binary_classifier(\n            y_true=y_train,\n            y_pred=y_pred,\n            y_proba=y_pred_proba,\n            model=model,\n            model_name=\"lightgbm\",\n            label_names=('Class 0','Class 1'),\n            title_suffix=f'(Train Set)_{fold}',\n            save_results=True\n        )\n\n        #  ====== 评估：调用封装的评估函数评估验证集 ======\n        y_pred = model.predict(X_val)\n        y_pred_proba = model.predict_proba(X_val)[:, 1]\n            \n        metrics = evaluate_binary_classifier(\n            y_true=y_val,\n            y_pred=y_pred,\n            y_proba=y_pred_proba,\n            model=model,\n            model_name=\"lightgbm\",\n            label_names=('Class 0','Class 1'),\n            title_suffix=f'(Validation Set)_{fold}',\n            save_results=True\n        )\n        models.append(model)\n    \n    return models","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"lightgbm_models = train_lightgbm_cv(X_train, target_train,use_gpu=True)","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"markdown","source":"# 保存树模型","metadata":{}},{"cell_type":"code","source":"def save_lgb_tree_pdfs(lgb_models, tree_index=0, out_prefix=\"lgb_tree\", rankdir=\"LR\"):\n    \"\"\"\n    lgb_models: 训练好的 LGBMClassifier/LGBMRegressor 列表\n    tree_index: 想导出的树索引（从 0 开始）\n    out_prefix: 输出文件前缀\n    rankdir: Graphviz 方向，'LR' 左→右，'TB' 上→下\n    \"\"\"\n    os.makedirs(\"lgb_trees_pdf\", exist_ok=True)\n\n    for i, model in enumerate(lgb_models, start=1):\n        # 兼容不同版本：booster_（新）/_Booster（旧）\n        booster = getattr(model, \"booster_\", None) or getattr(model, \"_Booster\", None)\n        if booster is None:\n            raise RuntimeError(\"模型还未 fit，或未找到 booster_/_Booster。\")\n\n        # 生成 graphviz.Digraph\n        dot = create_tree_digraph(\n            booster,\n            tree_index=tree_index,\n            show_info=(\"split_gain\", \"internal_value\", \"internal_count\", \"leaf_count\")\n        )\n        # 设置从左到右\n        dot.graph_attr.update(rankdir=rankdir)\n\n        # 渲染为 PDF\n        out_path = os.path.join(\"lgb_trees_pdf\", f\"{out_prefix}_{i}_{tree_index}\")\n        dot.render(out_path, format=\"pdf\", cleanup=True)\n        print(f\"Saved: {out_path}.pdf\")\nsave_lgb_tree_pdfs(lightgbm_models, tree_index=0, out_prefix=\"lgb_tree\", rankdir=\"LR\")","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"markdown","source":"# 特征重要性","metadata":{}},{"cell_type":"code","source":"# ====== 特征重要性（如需可关闭/挪到外部）======\ni=0\nfor model in lightgbm_models:\n    i+=1\n    plot_feature_importance(model, top_n=20, save_path=f\"/kaggle/working/lightgbm_feature_importance_{i}.png\")","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"markdown","source":"# 非集成简单平均预测test","metadata":{}},{"cell_type":"code","source":"test_pred = np.zeros(len(X_test))\nfor model in lightgbm_models:\n    test_pred += model.predict_proba(X_test)[:, 1]","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"markdown","source":"# 输出预测结果","metadata":{}},{"cell_type":"code","source":"sub = pd.read_csv('/kaggle/input/santander-customer-transaction-prediction/sample_submission.csv')\nsub['target'] = 0.0\n# 对于 real_samples_indexes 中的索引，填充预测值\nsub.loc[real_samples_indexes, 'target'] = preds_test_cum\nsub.to_csv('/kaggle/working/submission.csv',index=False)\nsub.head()","metadata":{"trusted":true,"execution":{"iopub.status.busy":"2025-09-19T14:31:37.057531Z","iopub.execute_input":"2025-09-19T14:31:37.057806Z","iopub.status.idle":"2025-09-19T14:31:37.519594Z","shell.execute_reply.started":"2025-09-19T14:31:37.057789Z","shell.execute_reply":"2025-09-19T14:31:37.518888Z"}},"outputs":[{"execution_count":20,"output_type":"execute_result","data":{"text/plain":"  ID_code     target\n0  test_0   0.000000\n1  test_1   0.000000\n2  test_2   0.000000\n3  test_3  58.086971\n4  test_4   0.000000","text/html":"<div>\n<style scoped>\n    .dataframe tbody tr th:only-of-type {\n        vertical-align: middle;\n    }\n\n    .dataframe tbody tr th {\n        vertical-align: top;\n    }\n\n    .dataframe thead th {\n        text-align: right;\n    }\n</style>\n<table border=\"1\" class=\"dataframe\">\n  <thead>\n    <tr style=\"text-align: right;\">\n      <th></th>\n      <th>ID_code</th>\n      <th>target</th>\n    </tr>\n  </thead>\n  <tbody>\n    <tr>\n      <th>0</th>\n      <td>test_0</td>\n      <td>0.000000</td>\n    </tr>\n    <tr>\n      <th>1</th>\n      <td>test_1</td>\n      <td>0.000000</td>\n    </tr>\n    <tr>\n      <th>2</th>\n      <td>test_2</td>\n      <td>0.000000</td>\n    </tr>\n    <tr>\n      <th>3</th>\n      <td>test_3</td>\n      <td>58.086971</td>\n    </tr>\n    <tr>\n      <th>4</th>\n      <td>test_4</td>\n      <td>0.000000</td>\n    </tr>\n  </tbody>\n</table>\n</div>"},"metadata":{}}],"execution_count":20},{"cell_type":"markdown","source":"# 划分训练和验证便于后续调参","metadata":{}},{"cell_type":"markdown","source":"调参分为2步，第一步是快速调参，将会在train集中抽取15%的数据用于调参，在val上进行验证，第二步是全量验证，将会运用调好的参数在所有数据上进行运行训练。","metadata":{}},{"cell_type":"code","source":"#划分训练集和验证集\nx_train, x_val, y_train, y_val = train_test_split(train_x, train_y, test_size=0.2, random_state=42)","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"markdown","source":"# optuna搜索调优","metadata":{}},{"cell_type":"code","source":"# --- 小工具：根据是否用GPU返回合适的参数（避免使用 gpu_hist） ---\ndef _device_params(use_gpu: bool):\n    if use_gpu:\n        return {\"device\": \"gpu\"}  # 使用 GPU版本\n    else:\n        return {}  # CPU版本\n\ndef stratified_subsample(X, y, frac=0.1, random_state=42):\n    \"\"\"分层抽样：用于快速搜参（修正为只抽一次，避免不一致）\"\"\"\n    X_sub, _, y_sub, _ = train_test_split(\n        X, y, test_size=1 - frac, stratify=y, random_state=random_state\n    )\n    return X_sub, y_sub\n\ndef objective(trial, X_train, y_train, X_val, y_val, frac=0.15, use_gpu=False):\n    \"\"\"Optuna优化目标函数\"\"\"\n    # 子样本（搜参更快）\n    X_sub, y_sub = stratified_subsample(X_train, y_train, frac=frac, random_state=42)\n\n    # 定义超参数空间\n    param = {\n    \"num_iterations\": trial.suggest_int('num_iterations', 1000, 10000),\n    \"learning_rate\": trial.suggest_float('learning_rate', 0.01, 0.1),\n    \"num_leaves\": trial.suggest_int('num_leaves', 3,10),  # 关键：叶子数\n    \"min_child_samples\": trial.suggest_int('min_child_samples', 1, 30),  # 每个叶子最小样本数\n    \"min_child_weight\": trial.suggest_float('min_child_weight', 1e-3, 10.0, log=True),  # LGBM版本的 min_data_in_hessian\n    \"subsample\": trial.suggest_float('subsample', 0.7, 1.0),\n    \"colsample_bytree\": trial.suggest_float('colsample_bytree', 0.7, 1.0),\n    \"reg_alpha\": trial.suggest_float('reg_alpha', 1e-5, 1.0, log=True),\n    \"reg_lambda\": trial.suggest_float('reg_lambda', 1e-5, 5.0, log=True),\n    \"min_split_gain\": trial.suggest_float('min_split_gain', 0.0, 1.0),  # 分裂最小增益\n    \"max_bin\": trial.suggest_int('max_bin', 63, 255),  # 分箱数，精度 vs 速度\n    \"bagging_freq\": trial.suggest_int('bagging_freq', 1, 10),  # bagging频率\n    \"feature_fraction\": trial.suggest_float('feature_fraction', 0.7, 1.0),  # 等价于 colsample_bytree\n    \"scale_pos_weight\": trial.suggest_float('scale_pos_weight', 0.5, 2.0),\n    \"max_delta_step\": trial.suggest_int('max_delta_step', 0, 10),\n}\n\n\n    # 类别不平衡（基于子样本）\n    pos = np.sum(y_sub == 1)\n    neg = np.sum(y_sub == 0)\n    spw = (neg / pos) if pos > 0 else 1.0\n    param['scale_pos_weight'] = spw * param['scale_pos_weight']\n\n    # 创建 LightGBM 模型\n    model = lgb.LGBMClassifier(\n        objective='binary',\n        metric='auc',\n        n_estimators=3000,\n        early_stopping_rounds=50,\n        random_state=42,\n        n_jobs=-1,\n        **_device_params(use_gpu),\n        **param,\n        verbose=-1\n    )\n\n    # 转换为 NumPy 数组\n    X_sub_np = X_sub.values if hasattr(X_sub, \"values\") else np.asarray(X_sub)\n    X_val_np = X_val.values if hasattr(X_val, \"values\") else np.asarray(X_val)\n    y_sub_np = np.asarray(y_sub)\n    y_val_np = np.asarray(y_val)\n\n    # 训练模型并评估 AUC\n    model.fit(X_sub_np, y_sub_np, eval_set=[(X_val_np, y_val_np)])\n    val_auc = roc_auc_score(y_val_np, model.predict_proba(X_val_np)[:, 1])\n\n    return val_auc  # 目标是最大化 AUC\n\n\ndef tune_lgb_with_optuna(X_train, y_train, X_val, y_val, n_trials=25, use_gpu=False):\n    \"\"\"使用 Optuna 优化 LightGBM 超参数\"\"\"\n    study = optuna.create_study(direction='maximize')  # 目标是最大化 AUC\n    study.optimize(lambda trial: objective(trial, X_train, y_train, X_val, y_val, use_gpu=use_gpu), n_trials=n_trials)\n\n    print(\"Best parameters:\", study.best_params)\n    print(\"Best AUC:\", study.best_value)\n    return study.best_params\n\ndef train_full_lgb(X_train, y_train, X_val, y_val, best_params, use_gpu=False):\n    \"\"\"用全量数据+早停做精修训练\"\"\"\n    # 转 numpy\n    X_train_np = X_train.values if hasattr(X_train, \"values\") else np.asarray(X_train)\n    X_val_np = X_val.values if hasattr(X_val, \"values\") else np.asarray(X_val)\n    y_train_np = np.asarray(y_train)\n    y_val_np = np.asarray(y_val)\n\n    # 创建最终模型\n    model = lgb.LGBMClassifier(\n        objective='binary',\n        metric='auc',\n        n_estimators=5000,\n        early_stopping_rounds=100,\n        random_state=42,\n        n_jobs=-1,\n        **_device_params(use_gpu),\n        **best_params,\n        verbose=-1\n    )\n\n    # 训练模型\n    model.fit(X_train_np, y_train_np, eval_set=[(X_val_np, y_val_np)])\n\n    # 验证集 AUC\n    val_auc = roc_auc_score(y_val_np, model.predict_proba(X_val_np)[:, 1])\n    print(\"Final model AUC:\", val_auc)\n    return model\n","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"best_params = tune_lgb_with_optuna(X_train=x_train, y_train=y_train, X_val=x_val, y_val=y_val, n_trials=50, use_gpu=True)\n\n# 步骤2：使用调优后的超参数训练最终模型\nfinal_model = train_full_lgb(X_train=x_train, y_train=y_train, X_val=x_val, y_val=y_val, best_params=best_params, use_gpu=True)","metadata":{"trusted":true},"outputs":[],"execution_count":null}]}