{"cells":[{"cell_type":"markdown","metadata":{"id":"13A99CE0B3EC4F8A9748041F4C877C5A","mdEditEnable":false},"source":"# 下面是可能会用到的库"},{"cell_type":"code","execution_count":4,"metadata":{"id":"AE88ECE6983244259309EB6CA1538EC8","collapsed":false,"scrolled":false},"outputs":[],"source":"import numpy as np\nimport pandas as pd\nimport time\nimport sys\nimport datetime\nimport gc\nfrom sklearn.model_selection import StratifiedShuffleSplit\nfrom sklearn.model_selection import KFold, cross_val_score, train_test_split\nfrom sklearn.model_selection import StratifiedKFold\nfrom sklearn.metrics import roc_auc_score, log_loss\nimport lightgbm as lgb\nfrom sklearn.preprocessing import OneHotEncoder, LabelEncoder\nfrom sklearn.feature_extraction.text import CountVectorizer\nfrom sklearn.feature_selection import chi2, SelectPercentile\nimport math\nfrom sklearn.metrics import f1_score\nimport jieba\nimport jieba.posseg as psg\nfrom collections import Counter\nimport functools\nfrom time import time\nimport Levenshtein\nfrom gensim.models import Word2Vec\nimport numpy as np\nfrom sklearn.cluster import MiniBatchKMeans\nfrom sklearn import datasets\n\ndata_path='/home/kesci/input/bytedance/first-round/'\ndata_store_path='/home/kesci/work/1000W_stacking/'"},{"cell_type":"markdown","metadata":{"id":"6C33A00B229E48FF8170B41E6C732332","mdEditEnable":false},"source":"# 处理数据"},{"cell_type":"markdown","metadata":{"id":"C2A10C2B107B4B7DA961201400C78B78"},"source":"## 读测试集"},{"cell_type":"code","execution_count":3,"metadata":{"collapsed":false,"id":"CE77F2F87EC74C918D59B45EBB57566E","scrolled":true},"outputs":[{"output_type":"execute_result","metadata":{},"data":{"text/plain":"                         query  \\\n0              11202 184 50256   \n1              11202 184 50256   \n2              11202 184 50256   \n3              11202 184 50256   \n4         1013 6811 14038 1156   \n5         1013 6811 14038 1156   \n6         1013 6811 14038 1156   \n7                  28 240 2157   \n8                  28 240 2157   \n9                  28 240 2157   \n10                 28 240 2157   \n11                 28 240 2157   \n12                 28 240 2157   \n13                 28 240 2157   \n14                 28 240 2157   \n15             1117 4269 35410   \n16             1117 4269 35410   \n17             1117 4269 35410   \n18             1117 4269 35410   \n19             1117 4269 35410   \n20             1117 4269 35410   \n21            12072 1214 23026   \n22            12072 1214 23026   \n23            12072 1214 23026   \n24            12072 1214 23026   \n25            12072 1214 23026   \n26            2816 121 14 1898   \n27            2816 121 14 1898   \n28            2816 121 14 1898   \n29                 123635 1842   \n...                        ...   \n4999970           13807 462173   \n4999971           13807 462173   \n4999972           13807 462173   \n4999973           13807 462173   \n4999974       166630 997 19737   \n4999975       166630 997 19737   \n4999976       166630 997 19737   \n4999977              781 49481   \n4999978              781 49481   \n4999979              781 49481   \n4999980  127710 39 19167 15 10   \n4999981  127710 39 19167 15 10   \n4999982  127710 39 19167 15 10   \n4999983  127710 39 19167 15 10   \n4999984  127710 39 19167 15 10   \n4999985  127710 39 19167 15 10   \n4999986  127710 39 19167 15 10   \n4999987  127710 39 19167 15 10   \n4999988              76936 663   \n4999989              76936 663   \n4999990              76936 663   \n4999991              76936 663   \n4999992      240 4077 122 1544   \n4999993      240 4077 122 1544   \n4999994      240 4077 122 1544   \n4999995      240 4077 122 1544   \n4999996      240 4077 122 1544   \n4999997      240 4077 122 1544   \n4999998      240 4077 122 1544   \n4999999      240 4077 122 1544   \n\n                                                     title  \n0        11202 184 2346 2527 274 383 34 1033 156 18 502...  \n1        11202 184 21479 808 383 34 1033 18 50256 19 27...  \n2        11202 184 21479 15 227 383 34 1033 156 116 18 ...  \n3        11202 184 274 383 34 1033 156 15 18 50256 19 2...  \n4        361 628 1020 513 126 15 1013 6811 2315 743 27 ...  \n5                                  21 126 15 1013 6811 743  \n6        4303 15 1013 6811 743 18608 202 3474 671 4 280...  \n7                                             28 240 10 11  \n8                              28 240 10 27 32 241 289 241  \n9                               28 240 10 27 5922 12478 11  \n10          240 2157 27 1248 28 27 2 34 289 144 4422 31 11  \n11                                240 2157 36 28 9351 2868  \n12                     28 240 10 27 32 31 2578 40 66664 31  \n13       28 10 240 11898 64751 18179 13627 28 27 613 40...  \n14                              28 240 10 1098 27 34152 31  \n15       1117 485 783 18 4077 19 211 183 4269 35410 27 ...  \n16       1117 12984 27 18 4269 35410 19 6573 5908 37 11...  \n17       1117 36 4269 35410 286 14 4315 15 747 16 2701 ...  \n18       1117 36 635 253 114 4269 35410 27 311 6135 407...  \n19       1117 12984 27 1117 1522 5854 4269 35410 27 158...  \n20       1117 12984 27 166 25587 18 4269 35410 19 27 10...  \n21                   447 1214 2721 23026 4944 15 4536 3052  \n22       1753 11 6664 395 979 447 1214 4944 27 50497 14...  \n23       52 4481 27 13663 15 447 1214 406 2114 14 4560 ...  \n24                                55431 1214 15 23026 6815  \n25       447 1214 15356 23026 27 2721 3546 9669 24115 5...  \n26       72670 65010 52517 27 42 755 64 5218 2504 2816 ...  \n27       2816 4360 5834 1603 3918 16446 27 26889 329 81...  \n28       3739 58257 46717 16446 21449 27 1266 14 38787 ...  \n29       123635 26197 58 13 2969 3500 123635 15 1921 48...  \n...                                                    ...  \n4999970  3176 483 31 56197 15 74581 27 247 679 24187 13...  \n4999971  543 27 41816 331 1137 75 13807 462173 416 31 3...  \n4999972  3176 59 483 31 74581 27 247 679 3327 13807 462...  \n4999973  14692 36 548 651 563 15 419 27 288680 6298 108...  \n4999974   81069 36 166630 2874 186208 860 15 323 34 961 11  \n4999975                                     166630 36 3002  \n4999976  64166 746 5232 997 11259 1224 48 14977 237512 ...  \n4999977             781 49481 2138 27 22 1591 22 9 113 802  \n4999978  781 83535 508 21502 27 32 15 17 14296 5881 31 ...  \n4999979  49481 12601 11 247 49481 1824 21502 1032 43 44...  \n4999980  396 36 19167 6143 3280 4873 27 11901 59189 15 ...  \n4999981  396 36 1416 54 68 1100 6377 16003 4959 5034 37...  \n4999982      439 19167 3920 94 27 209 19167 15 221 3912 11  \n4999983  396 56449 36 7203 1276 2827 647 13 65 1670 37 ...  \n4999984  4 917 79 913 396 6012 15 25820 27 145 37215 19...  \n4999985  130 3849 1560 11 56449 39 19167 1391 21 1481 1...  \n4999986  396 36 811 632 134 1100 44088 330 102397 508 8...  \n4999987  396 36 85 1416 15 1499 39 835 38 108 247 19167...  \n4999988  48550 36 335 75 5976 1491 1297 35 663 207 9623...  \n4999989  10121 5992 35 10121 1604 87 87 76936 1476 2195...  \n4999990                              76936 3244 1066 48079  \n4999991  731 75 19762 42697 11491 8345 3998 73649 564 7...  \n4999992  240 4077 1544 2 23628 292 83 24098 2991 13 125...  \n4999993  240 4077 1544 2308 344791 5956 55 182 37 23628...  \n4999994  240 4077 1544 183 70130 15 16 490 2000 37 4313...  \n4999995  240 4077 1544 24 107206 23 15 118748 22 872 30...  \n4999996            2804 4077 36 2701 771 18510 27 1879 240  \n4999997  21440 36 88691 71 18 1108 4077 19 70 1425 3204...  \n4999998      59449 11 4077 122 69275 41793 5628 6313 19403  \n4999999  240 7644 2372 36 4077 122 1948 15 24431 27 188...  \n\n[5000000 rows x 2 columns]","text/html":"<div>\n<style scoped>\n    .dataframe tbody tr th:only-of-type {\n        vertical-align: middle;\n    }\n\n    .dataframe tbody tr th {\n        vertical-align: top;\n    }\n\n    .dataframe thead th {\n        text-align: right;\n    }\n</style>\n<table border=\"1\" class=\"dataframe\">\n  <thead>\n    <tr style=\"text-align: right;\">\n      <th></th>\n      <th>query</th>\n      <th>title</th>\n    </tr>\n  </thead>\n  <tbody>\n    <tr>\n      <th>0</th>\n      <td>11202 184 50256</td>\n      <td>11202 184 2346 2527 274 383 34 1033 156 18 502...</td>\n    </tr>\n    <tr>\n      <th>1</th>\n      <td>11202 184 50256</td>\n      <td>11202 184 21479 808 383 34 1033 18 50256 19 27...</td>\n    </tr>\n    <tr>\n      <th>2</th>\n      <td>11202 184 50256</td>\n      <td>11202 184 21479 15 227 383 34 1033 156 116 18 ...</td>\n    </tr>\n    <tr>\n      <th>3</th>\n      <td>11202 184 50256</td>\n      <td>11202 184 274 383 34 1033 156 15 18 50256 19 2...</td>\n    </tr>\n    <tr>\n      <th>4</th>\n      <td>1013 6811 14038 1156</td>\n      <td>361 628 1020 513 126 15 1013 6811 2315 743 27 ...</td>\n    </tr>\n    <tr>\n      <th>5</th>\n      <td>1013 6811 14038 1156</td>\n      <td>21 126 15 1013 6811 743</td>\n    </tr>\n    <tr>\n      <th>6</th>\n      <td>1013 6811 14038 1156</td>\n      <td>4303 15 1013 6811 743 18608 202 3474 671 4 280...</td>\n    </tr>\n    <tr>\n      <th>7</th>\n      <td>28 240 2157</td>\n      <td>28 240 10 11</td>\n    </tr>\n    <tr>\n      <th>8</th>\n      <td>28 240 2157</td>\n      <td>28 240 10 27 32 241 289 241</td>\n    </tr>\n    <tr>\n      <th>9</th>\n      <td>28 240 2157</td>\n      <td>28 240 10 27 5922 12478 11</td>\n    </tr>\n    <tr>\n      <th>10</th>\n      <td>28 240 2157</td>\n      <td>240 2157 27 1248 28 27 2 34 289 144 4422 31 11</td>\n    </tr>\n    <tr>\n      <th>11</th>\n      <td>28 240 2157</td>\n      <td>240 2157 36 28 9351 2868</td>\n    </tr>\n    <tr>\n      <th>12</th>\n      <td>28 240 2157</td>\n      <td>28 240 10 27 32 31 2578 40 66664 31</td>\n    </tr>\n    <tr>\n      <th>13</th>\n      <td>28 240 2157</td>\n      <td>28 10 240 11898 64751 18179 13627 28 27 613 40...</td>\n    </tr>\n    <tr>\n      <th>14</th>\n      <td>28 240 2157</td>\n      <td>28 240 10 1098 27 34152 31</td>\n    </tr>\n    <tr>\n      <th>15</th>\n      <td>1117 4269 35410</td>\n      <td>1117 485 783 18 4077 19 211 183 4269 35410 27 ...</td>\n    </tr>\n    <tr>\n      <th>16</th>\n      <td>1117 4269 35410</td>\n      <td>1117 12984 27 18 4269 35410 19 6573 5908 37 11...</td>\n    </tr>\n    <tr>\n      <th>17</th>\n      <td>1117 4269 35410</td>\n      <td>1117 36 4269 35410 286 14 4315 15 747 16 2701 ...</td>\n    </tr>\n    <tr>\n      <th>18</th>\n      <td>1117 4269 35410</td>\n      <td>1117 36 635 253 114 4269 35410 27 311 6135 407...</td>\n    </tr>\n    <tr>\n      <th>19</th>\n      <td>1117 4269 35410</td>\n      <td>1117 12984 27 1117 1522 5854 4269 35410 27 158...</td>\n    </tr>\n    <tr>\n      <th>20</th>\n      <td>1117 4269 35410</td>\n      <td>1117 12984 27 166 25587 18 4269 35410 19 27 10...</td>\n    </tr>\n    <tr>\n      <th>21</th>\n      <td>12072 1214 23026</td>\n      <td>447 1214 2721 23026 4944 15 4536 3052</td>\n    </tr>\n    <tr>\n      <th>22</th>\n      <td>12072 1214 23026</td>\n      <td>1753 11 6664 395 979 447 1214 4944 27 50497 14...</td>\n    </tr>\n    <tr>\n      <th>23</th>\n      <td>12072 1214 23026</td>\n      <td>52 4481 27 13663 15 447 1214 406 2114 14 4560 ...</td>\n    </tr>\n    <tr>\n      <th>24</th>\n      <td>12072 1214 23026</td>\n      <td>55431 1214 15 23026 6815</td>\n    </tr>\n    <tr>\n      <th>25</th>\n      <td>12072 1214 23026</td>\n      <td>447 1214 15356 23026 27 2721 3546 9669 24115 5...</td>\n    </tr>\n    <tr>\n      <th>26</th>\n      <td>2816 121 14 1898</td>\n      <td>72670 65010 52517 27 42 755 64 5218 2504 2816 ...</td>\n    </tr>\n    <tr>\n      <th>27</th>\n      <td>2816 121 14 1898</td>\n      <td>2816 4360 5834 1603 3918 16446 27 26889 329 81...</td>\n    </tr>\n    <tr>\n      <th>28</th>\n      <td>2816 121 14 1898</td>\n      <td>3739 58257 46717 16446 21449 27 1266 14 38787 ...</td>\n    </tr>\n    <tr>\n      <th>29</th>\n      <td>123635 1842</td>\n      <td>123635 26197 58 13 2969 3500 123635 15 1921 48...</td>\n    </tr>\n    <tr>\n      <th>...</th>\n      <td>...</td>\n      <td>...</td>\n    </tr>\n    <tr>\n      <th>4999970</th>\n      <td>13807 462173</td>\n      <td>3176 483 31 56197 15 74581 27 247 679 24187 13...</td>\n    </tr>\n    <tr>\n      <th>4999971</th>\n      <td>13807 462173</td>\n      <td>543 27 41816 331 1137 75 13807 462173 416 31 3...</td>\n    </tr>\n    <tr>\n      <th>4999972</th>\n      <td>13807 462173</td>\n      <td>3176 59 483 31 74581 27 247 679 3327 13807 462...</td>\n    </tr>\n    <tr>\n      <th>4999973</th>\n      <td>13807 462173</td>\n      <td>14692 36 548 651 563 15 419 27 288680 6298 108...</td>\n    </tr>\n    <tr>\n      <th>4999974</th>\n      <td>166630 997 19737</td>\n      <td>81069 36 166630 2874 186208 860 15 323 34 961 11</td>\n    </tr>\n    <tr>\n      <th>4999975</th>\n      <td>166630 997 19737</td>\n      <td>166630 36 3002</td>\n    </tr>\n    <tr>\n      <th>4999976</th>\n      <td>166630 997 19737</td>\n      <td>64166 746 5232 997 11259 1224 48 14977 237512 ...</td>\n    </tr>\n    <tr>\n      <th>4999977</th>\n      <td>781 49481</td>\n      <td>781 49481 2138 27 22 1591 22 9 113 802</td>\n    </tr>\n    <tr>\n      <th>4999978</th>\n      <td>781 49481</td>\n      <td>781 83535 508 21502 27 32 15 17 14296 5881 31 ...</td>\n    </tr>\n    <tr>\n      <th>4999979</th>\n      <td>781 49481</td>\n      <td>49481 12601 11 247 49481 1824 21502 1032 43 44...</td>\n    </tr>\n    <tr>\n      <th>4999980</th>\n      <td>127710 39 19167 15 10</td>\n      <td>396 36 19167 6143 3280 4873 27 11901 59189 15 ...</td>\n    </tr>\n    <tr>\n      <th>4999981</th>\n      <td>127710 39 19167 15 10</td>\n      <td>396 36 1416 54 68 1100 6377 16003 4959 5034 37...</td>\n    </tr>\n    <tr>\n      <th>4999982</th>\n      <td>127710 39 19167 15 10</td>\n      <td>439 19167 3920 94 27 209 19167 15 221 3912 11</td>\n    </tr>\n    <tr>\n      <th>4999983</th>\n      <td>127710 39 19167 15 10</td>\n      <td>396 56449 36 7203 1276 2827 647 13 65 1670 37 ...</td>\n    </tr>\n    <tr>\n      <th>4999984</th>\n      <td>127710 39 19167 15 10</td>\n      <td>4 917 79 913 396 6012 15 25820 27 145 37215 19...</td>\n    </tr>\n    <tr>\n      <th>4999985</th>\n      <td>127710 39 19167 15 10</td>\n      <td>130 3849 1560 11 56449 39 19167 1391 21 1481 1...</td>\n    </tr>\n    <tr>\n      <th>4999986</th>\n      <td>127710 39 19167 15 10</td>\n      <td>396 36 811 632 134 1100 44088 330 102397 508 8...</td>\n    </tr>\n    <tr>\n      <th>4999987</th>\n      <td>127710 39 19167 15 10</td>\n      <td>396 36 85 1416 15 1499 39 835 38 108 247 19167...</td>\n    </tr>\n    <tr>\n      <th>4999988</th>\n      <td>76936 663</td>\n      <td>48550 36 335 75 5976 1491 1297 35 663 207 9623...</td>\n    </tr>\n    <tr>\n      <th>4999989</th>\n      <td>76936 663</td>\n      <td>10121 5992 35 10121 1604 87 87 76936 1476 2195...</td>\n    </tr>\n    <tr>\n      <th>4999990</th>\n      <td>76936 663</td>\n      <td>76936 3244 1066 48079</td>\n    </tr>\n    <tr>\n      <th>4999991</th>\n      <td>76936 663</td>\n      <td>731 75 19762 42697 11491 8345 3998 73649 564 7...</td>\n    </tr>\n    <tr>\n      <th>4999992</th>\n      <td>240 4077 122 1544</td>\n      <td>240 4077 1544 2 23628 292 83 24098 2991 13 125...</td>\n    </tr>\n    <tr>\n      <th>4999993</th>\n      <td>240 4077 122 1544</td>\n      <td>240 4077 1544 2308 344791 5956 55 182 37 23628...</td>\n    </tr>\n    <tr>\n      <th>4999994</th>\n      <td>240 4077 122 1544</td>\n      <td>240 4077 1544 183 70130 15 16 490 2000 37 4313...</td>\n    </tr>\n    <tr>\n      <th>4999995</th>\n      <td>240 4077 122 1544</td>\n      <td>240 4077 1544 24 107206 23 15 118748 22 872 30...</td>\n    </tr>\n    <tr>\n      <th>4999996</th>\n      <td>240 4077 122 1544</td>\n      <td>2804 4077 36 2701 771 18510 27 1879 240</td>\n    </tr>\n    <tr>\n      <th>4999997</th>\n      <td>240 4077 122 1544</td>\n      <td>21440 36 88691 71 18 1108 4077 19 70 1425 3204...</td>\n    </tr>\n    <tr>\n      <th>4999998</th>\n      <td>240 4077 122 1544</td>\n      <td>59449 11 4077 122 69275 41793 5628 6313 19403</td>\n    </tr>\n    <tr>\n      <th>4999999</th>\n      <td>240 4077 122 1544</td>\n      <td>240 7644 2372 36 4077 122 1948 15 24431 27 188...</td>\n    </tr>\n  </tbody>\n</table>\n<p>5000000 rows × 2 columns</p>\n</div>"},"execution_count":3}],"source":"all_data_test_list=[]\nfor epoch,part_data in enumerate(pd.read_csv(data_path+'test.csv',sep=',',names=['query_id','query','query_title_id','title'],usecols=['query','title'],chunksize=1000000,header=None)):\n    all_data_test_list.append(part_data)\ntest_data=pd.concat(all_data_test_list,axis=0)\ntest_data[['title']]=test_data[['title']].applymap(lambda x:x.replace('\\t',''))#####测试集的title 有\\t，需要处理掉\n#,usecols=[1,3]\ntest_data"},{"cell_type":"markdown","metadata":{"id":"535F6A6DCB7E4AE3AF8606448E67C4CF","mdEditEnable":false},"source":"## 读训练集（全量）（最后会成numpy形式）"},{"cell_type":"code","execution_count":1,"metadata":{"scrolled":true,"id":"10434B68549245D088F9672388405699","collapsed":false},"outputs":[{"output_type":"stream","text":"0\n1\n2\n3\n4\n5\n6\n7\n8\n9\n10\n11\n12\n13\n14\n15\n16\n17\n18\n19\n20\n21\n22\n23\n24\n25\n26\n27\n28\n29\n30\n31\n32\n33\n34\n35\n36\n37\n38\n39\n40\n41\n42\n43\n44\n45\n46\n47\n48\n49\n50\n51\n","name":"stdout"}],"source":"all_data_train_list=[]\nfor epoch,part_data in enumerate(pd.read_csv(data_path+\"train.csv\",names=['query_id','query','query_title_id','title','label'],usecols=[1,3,4],chunksize=1000000,header=None)):\n    all_data_train_list.append(part_data)\n    print(epoch)\nall_data_train=pd.concat(all_data_train_list,axis=0)\ndel all_data_train_list"},{"metadata":{"id":"35BD3C24C8744A7895435CC9EDD9DE7F","mdEditEnable":false},"cell_type":"markdown","source":"# 读已经存储好的train_data"},{"metadata":{"id":"5716CD2812D744B6BBD5979A51765C17","collapsed":false,"scrolled":false},"cell_type":"code","outputs":[],"source":"# train_data_ctr=pd.read_csv(data_store_path+\"train_data_ctr.csv\")#读已经做好ctr特征的训练集\n# test_data_ctr=pd.read_csv(data_store_path+\"test_data_ctr.csv\")#读已经做好ctr特征的测试集\n# train_data=pd.read_csv(data_store_path+\"train_data.csv\")#读训练集（已经用shuffle分好了的）\n# train_data=pd.concat([train_data,train_data_ctr],axis=1)\n# test_data=pd.concat([test_data,test_data_ctr],axis=1)\n# del train_data_ctr\n# del test_data_ctr\n# train_data\n##########################################################################################\n# train_data=pd.read_csv(data_store_path+\"train_feature_first_no_query_and_title_data.csv\")#读已经做好ctr特征的训练集\n# test_data=pd.read_csv(data_store_path+\"test_feature_first_no_query_and_title_data.csv\")#读已经做好ctr特征的测试集\n###################################################下面这个是为了去做 相似度特征\n# train_data=pd.read_csv(data_store_path+\"train_feature_first_data.csv\")#读已经做好ctr特征的训练集\n# test_data=pd.read_csv(data_store_path+\"test_feature_first_data.csv\")#读已经做好ctr特征的测试集\n\n############################################\ntrain_data=pd.read_csv(data_store_path+\"data_sets/train_data_ctr.csv\")#读已经做好ctr特征的训练集\ntest_data=pd.read_csv(data_store_path+\"data_sets/test_data_ctr.csv\")#读已经做好ctr特征的测试集\n\n# import h5py\n# f = h5py.File(data_store_path+\"train_data.hd5\",'r')   #打开h5文件\n# print(f.keys)\n# a = f['train']                    #取出主键为data的所有的键值\n# f.close()\n# print(a)","execution_count":5},{"cell_type":"markdown","metadata":{"id":"8B823B0C4BBB4ED188ED729A5067FF0A","mdEditEnable":false},"source":"## 随机取1000W训练集 9000W数据用于做CTR特征"},{"metadata":{"id":"6423DED808074CD99DF35D7D870B5EA7","mdEditEnable":false},"cell_type":"markdown","source":"### 这里是因为第一次打比赛不太懂,所以用了StratifiedShuffleSplit来取数据\n### 实际上用DataFrame.sample()来取更加简单。\n### 但是StratifiedShuffleSplit还是有好处的，他可以保证取到的数据 label  0、1的比例和和原来是相同的"},{"cell_type":"code","execution_count":5,"metadata":{"id":"853BFE0DA0624FB7A376B4A65FCECD61","collapsed":false,"scrolled":false},"outputs":[{"output_type":"stream","text":"StratifiedShuffleSplit ended\n开始存训练集\n存训练集总共用时： 71.92939972877502\n","name":"stdout"}],"source":"data_split=StratifiedShuffleSplit(n_splits=5,test_size=0.90,random_state=1)\r\ntrain_index,feature_index=data_split.split(all_data_train['label'],all_data_train['label']).__next__()\r\nprint('StratifiedShuffleSplit ended')\r\n# all_data_train=all_data_train.values\r\ntrain_data=all_data_train.iloc[train_index]\r\n# train_data=pd.DataFrame(train_data,columns=['query','title','label'])#用于训练的数据\r\nfeature_data=all_data_train.iloc[feature_index]\r\n# feature_data=pd.DataFrame(feature_data,columns=['query','title','label'])#用于做特征的数据\r\ndel all_data_train\r\n# print('start store')\r\nprint(\"开始存训练集\")\r\ntime1=time()\r\ntrain_data.to_csv(data_store_path+\"data_sets/train_data.csv\",index=None)\r\ntime2=time()\r\nprint(\"存训练集总共用时：\",time2-time1)\r\ndel feature_index\r\ndel train_index"},{"metadata":{"id":"84CB845F48454D4CB9FE1749D876D48A","collapsed":false,"scrolled":false},"cell_type":"code","outputs":[{"output_type":"stream","text":"开始存CTR特征集集\n存CTR特征集总共用时： 561.6834108829498\n","name":"stdout"}],"source":"# print(\"开始存CTR特征集集\")\n# time1=time()\n# feature_data[0:20000000].to_csv(data_store_path+\"feature_data1.csv\",index=None)\n# feature_data[20000000:40000000].to_csv(data_store_path+\"feature_data2.csv\",index=None)\n# feature_data[40000000:60000000].to_csv(data_store_path+\"feature_data3.csv\",index=None)\n# feature_data[60000000:80000000].to_csv(data_store_path+\"feature_data4.csv\",index=None)\n# feature_data[80000000:].to_csv(data_store_path+\"feature_data5.csv\",index=None)\n# time2=time()\n# print(\"存CTR特征集总共用时：\",time2-time1)","execution_count":5},{"metadata":{"id":"F9E88B9DBBD34A5790BFDC5AE77C1D29","mdEditEnable":false},"cell_type":"markdown","source":"### test_data为测试集"},{"cell_type":"markdown","metadata":{"id":"39B00107E14E49FF8473D62D12B36DEB","mdEditEnable":false},"source":"# 用9000W数据groupby出来ctr，然后按title拼接到1000W上，做1000W的CTR特征"},{"metadata":{"id":"98BD8D56B10F4BFC86D744AEA1353F55","mdEditEnable":false},"cell_type":"markdown","source":"### 下面的代码用了矩估计方法，做了贝叶斯平滑"},{"cell_type":"code","execution_count":9,"metadata":{"id":"7293F43FC26C4B2EA2BD6AF245B7D3F5","scrolled":true,"collapsed":false},"outputs":[{"output_type":"stream","text":"ctr基本做完，开始平滑\n平滑完成\n保存做好ctr特征的数据\n保存完毕\n","name":"stdout"}],"source":"# feature_data\ndef getBayesSmoothParam(origion_rate):\n    origion_rate_mean = origion_rate.mean()\n    origion_rate_var = origion_rate.var()\n    alpha = origion_rate_mean / origion_rate_var * (origion_rate_mean * (1 - origion_rate_mean) - origion_rate_var)\n    beta = (1 - origion_rate_mean) / origion_rate_var * (origion_rate_mean * (1 - origion_rate_mean) - origion_rate_var)\n    return alpha, beta\n    \nfeature_data['label']=feature_data['label'].astype(int)\n# query_title_ctr=feature_data.groupby(['query','title'], as_index = False)['label'].agg({ 'query_title_count':'count','query_title_ctr':'mean'})\n# train_data = pd.merge(train_data, query_title_ctr, on=['query','title'], how='left')\n# test_data= pd.merge(test_data, query_title_ctr, on=['query','title'], how='left')\n# del query_title_ctr\n# ########################################################\n# query_ctr=feature_data.groupby('query', as_index = False)['label'].agg({ 'query_count':'count','query_ctr':'mean'})\n# feature_data=feature_data.drop(['query'],axis=1)\n# train_data = pd.merge(train_data, query_ctr, on='query', how='left')\n# test_data= pd.merge(test_data, query_ctr, on='query', how='left')\n# del query_ctr\n#######################################\ntitle_ctr=feature_data.groupby('title', as_index = False)['label'].agg({ 'title_count':'count','title_ctr':'mean','title_sum':'sum'})\ndel feature_data\nprint(\"ctr基本做完，开始平滑\")\nalpha, beta = getBayesSmoothParam(title_ctr['title_ctr'])\ntitle_ctr['title_ctr'] = (title_ctr['title_sum'] + alpha) / (title_ctr['title_count'] + alpha + beta)\nprint(\"平滑完成\")\ntrain_data = pd.merge(train_data, title_ctr, on='title', how='left')\ntest_data= pd.merge(test_data, title_ctr, on='title', how='left')\ndel title_ctr\n\nprint(\"保存做好ctr特征的数据\")\ntrain_data.to_csv(data_store_path+\"data_sets/train_data_ctr.csv\",index=None)\ntest_data.to_csv(data_store_path+\"data_sets/test_data_ctr.csv\",index=None)\nprint(\"保存完毕\")"},{"metadata":{"id":"CC7A5951CEAB49B69D09888BE4AB11B9","collapsed":false,"scrolled":false},"cell_type":"code","outputs":[],"source":"train_data[['query']]=train_data[['query']].applymap(lambda x:x.split(' '))\ntrain_data[['title']]=train_data[['title']].applymap(lambda x:x.split(' '))\ntest_data[['query']]=test_data[['query']].applymap(lambda x:x.split(' '))\ntest_data[['title']]=test_data[['title']].applymap(lambda x:x.split(' '))","execution_count":6},{"metadata":{"id":"803CC7DA1F3F4B5CBF3C1CB139F201FA","mdEditEnable":false},"cell_type":"markdown","source":"### 现在train_data为训练数据（包含训练集和测试集，还没切分），feature_data(用于给train_data做CTR特征的数据)"},{"metadata":{"id":"3428AF898F0B425995045934953040E0","mdEditEnable":false},"cell_type":"markdown","source":"# 新加入的特征"},{"metadata":{"id":"F8B03073B4674A85A63D1B473BEB9AA7"},"cell_type":"code","outputs":[],"source":"#词的个数（文本长度）\r\nfor feature in ['query', 'title']:\r\n    train_data[feature + '_word_num'] = train_data[feature].apply(lambda x: len(x.split()))\r\n\r\n\r\ntrain_data['prefix_nunique_title'] = train_data.groupby('query').title.transform('nunique')\r\ntrain_data['title_nunique_prefix'] = train_data.groupby('title').prefix.transform('nunique')\r\n\r\n\r\n######下面这段代码最终被弃用，\r\n#######原因，apply实际上就相当于一个for循环！！！如果你不需要做一些特殊的操作，而是只需要做一些加减乘除max std\r\n######建议用比如 df['第一列']+df['第二列'] 这样写，会比用apply(lambda x:x[0]+x[1])快几百倍（数据量大时）\r\nfor feature in [max, min, np.mean, np.std]\r\n    train_data[\"similarity_\"+feature.__name__] = train_data[['title_query_dot_similarity', 'title_query_norm_similarity','title_query_cosine_similarity'].apply(lambda x:feature([x[0],x[1],x[2]]),axis=1)\r\n\r\n","execution_count":null},{"cell_type":"markdown","metadata":{"id":"A34ACCC867B84326A83B49A5768EFAA3","mdEditEnable":false},"source":"# 统计特征（除CTR特征）"},{"cell_type":"code","execution_count":8,"metadata":{"id":"202F89A0ECA74482948B20BEE30FB5B7","collapsed":false,"scrolled":false},"outputs":[],"source":"eooch_tongji=0\n\n#query是否在title中\ndef get_is_query_in_title_feature(df):\n    global eooch_tongji\n    eooch_tongji+=1\n    if eooch_tongji%5000==0:\n        sys.stdout.write(str(eooch_tongji) + '\\r')\n        sys.stdout.flush()\n    x = df['query']\n    y = df['title']\n    is_query_in_title = np.nan\n    if x in y:\n        is_query_in_title = 1\n    else:\n        is_query_in_title = 0\n    return is_query_in_title\n\n# 统计一些交叉种类特征\n###################################这个特征是在别的大佬那里淘来的，\n#################就是！！！把测试集和训练集合一起，然后统计 每个title下query的数量\n##########该特征会造成严重的过拟合！！！！！！\ndef get_jiaocha_type_feature(train_df, valid_df, jiaocha_type_list):\n    global eooch_tongji\n    eooch_tongji+=1\n    if eooch_tongji%5000==0:\n        sys.stdout.write(str(eooch_tongji) + '\\r')\n        sys.stdout.flush()\n\n    for jiaocha_type in jiaocha_type_list:\n        fea1 = jiaocha_type[0]\n        fea2 = jiaocha_type[1]\n        temp_df = pd.concat([train_df, valid_df])\n        temp_pivot_table = pd.pivot_table(temp_df[[fea1, fea2, 'label']], index=[fea1, fea2], values='label', aggfunc=len)\n        temp_pivot_table.reset_index(inplace=True)\n        final_pivot_table = pd.pivot_table(temp_pivot_table, index=fea1, values=fea2, aggfunc=len)\n        final_pivot_table.reset_index(inplace=True)\n        final_pivot_table.rename(columns={fea2 : fea1 + '_' + fea2 + '_types'}, inplace=True)\n        train_df = pd.merge(train_df, final_pivot_table[[fea1, fea1 + '_' + fea2 + '_types']], on=fea1, how='left')\n        valid_df = pd.merge(valid_df, final_pivot_table[[fea1, fea1 + '_' + fea2 + '_types']], on=fea1, how='left')\n    return train_df, valid_df\n\n# 统计一些跟字符串长度相关的特征\n###########################初赛的时候我傻傻的按照字级别来统计的句子长度\n#########也就是统计了字的数量， 比如  251214  12472 126  这三个词组成的 query，我统计出来的长度是6+5+3=14\n##############这个特征的重要性意外的高！！！！，甚至在我把 分词之后按词数量统计的长度加上之后\n########这个特征的重要性甚至比按词统计长度 的特征重要性要高1倍\n#######但是实际上文本已经被脱敏了，词（比如251214）中的251214 数字的大小不能代表词的长度\n########至今我也没有想明白为什么这个特征的重要性会高\ndef get_string_len_feature(df):\n    global eooch_tongji\n    eooch_tongji+=1\n    if eooch_tongji%5000==0:\n        sys.stdout.write(str(eooch_tongji) + '\\r')\n        sys.stdout.flush()\n    df['query_len'] = df['query'].map(lambda x : len(x))\n    df['title_len'] = df['title'].map(lambda x : len(x))\n    df['len_title-query'] = df['title_len'] - df['query_len']\n    df['len_query/title'] = df['query_len'] / df['title_len']\n    return df\n\n\n\n# 统计title跟query的编辑距离\ndef get_title_query_levenshtein_distance(df):\n    global eooch_tongji\n    eooch_tongji+=1\n    if eooch_tongji%5000==0:\n        sys.stdout.write(str(eooch_tongji) + '\\r')\n        sys.stdout.flush()\n    title = df['title']\n    query = df['query']\n    return Levenshtein.distance(title, query)\n\n#实际上就是用title的长度做了一个平滑！！！\ndef get_title_query_levenshtein_distance_rate(df):\n    global eooch_tongji\n    eooch_tongji+=1\n    if eooch_tongji%5000==0:\n        sys.stdout.write(str(eooch_tongji) + '\\r')\n        sys.stdout.flush()\n    title_query_leven = df['title_query_leven']\n    title = df['title']\n    return (title_query_leven / (len(title) + 3))\n\n"},{"cell_type":"markdown","metadata":{"id":"1E65E45354534A14835032AC1B35DFA7","mdEditEnable":false},"source":"# query/title 共现词之类的特征统计"},{"metadata":{"id":"7C6597B20D934D66845ED77D245174A5","mdEditEnable":false},"cell_type":"markdown","source":"### 泡面姐说fuzzy是个自杀库，我以为不好使，比赛结束哭了，还是得自己试验！\n### 用了fuzzy的大佬们，他们的共现词特征重要度都出奇的高，但我的共现词重要度都忽上忽下"},{"cell_type":"code","metadata":{"id":"B3720EA093C0435F8A4840BA67F3D554","collapsed":false,"scrolled":false},"outputs":[],"source":"epoch=0\ndef word_match_share(df):\n    q1words = {}\n    q2words = {}\n    for word in df[0]:\n        q1words[word] = 1\n    for word in df[1]:\n        q2words[word] = 1\n    if len(q1words) == 0 or len(q2words) == 0:\n        # The computer-generated chaff includes a few questions that are nothing but stopwords\n        return 0\n    shared_words_in_q1 = [w for w in q1words.keys() if w in q2words]\n    shared_words_in_q2 = [w for w in q2words.keys() if w in q1words]\n    R = (len(shared_words_in_q1) + len(shared_words_in_q2))/(len(q1words) + len(q2words))\n    global epoch\n    epoch+=1\n    if epoch%5000==0:\n        sys.stdout.write(str(epoch) + '\\r')\n        sys.stdout.flush()\n    return R\n\n\n\ndef jaccard(df):\n    wic = set(df[0]).intersection(set(df[1]))\n    uw = set(df[0]).union(df[1])\n    if len(uw) == 0:\n        uw = [1]\n    global epoch\n    epoch+=1\n    if epoch%5000==0:\n        sys.stdout.write(str(epoch) + '\\r')\n        sys.stdout.flush()\n    return (len(wic) / len(uw))\n\n\n\ndef common_words(df):\n    global epoch\n    epoch+=1\n    if epoch%5000==0:\n        sys.stdout.write(str(epoch) + '\\r')\n        sys.stdout.flush()\n    return len(set(df[0]).intersection(set(df[1])))\n\n\n\ndef total_unique_words(df):\n    global epoch\n    epoch+=1\n    if epoch%5000==0:\n        sys.stdout.write(str(epoch) + '\\r')\n        sys.stdout.flush()\n    return len(set(df[0]).union(df[1]))\n\n\n\ndef wc_diff(df):\n    global epoch\n    epoch+=1\n    if epoch%5000==0:\n        sys.stdout.write(str(epoch) + '\\r')\n        sys.stdout.flush()\n    return abs(len(df[0]) - len(df[1]))\n\n\n\ndef wc_ratio(df):\n    global epoch\n    epoch+=1\n    if epoch%5000==0:\n        sys.stdout.write(str(epoch) + '\\r')\n        sys.stdout.flush()\n    l1 = len(df[0])*1.0\n    l2 = len(df[1])\n    if l2 == 0:\n        return np.nan\n\n    if l1 / l2:\n        return l2 / l1\n\n    else:\n        return l1 / l2\n\n\n\ndef wc_diff_unique(df):\n    global epoch\n    epoch+=1\n    if epoch%5000==0:\n        sys.stdout.write(str(epoch) + '\\r')\n        sys.stdout.flush()\n    return abs(len(set(df[0])) - len(set(df[1])))\n\n\n\ndef wc_ratio_unique(df):\n    global epoch\n    epoch+=1\n    if epoch%5000==0:\n        sys.stdout.write(str(epoch) + '\\r')\n        sys.stdout.flush()\n    l1 = len(set(df[0])) * 1.0\n    l2 = len(set(df[1]))\n    if l2 == 0:\n        return np.nan\n    if l1 / l2:\n        return l2 / l1\n    else:\n        return l1 / l2\n\n\n\n\n\ndef deal_word_for_all(train_df, fea1, fea2, func, colName):\n    train_df[colName] = train_df[[fea1, fea2]].apply(func, axis=1)\n#     valid_df[colName] = valid_df[[fea1, fea2]].apply(func, axis=1)\n    print(colName + ' finish!!!')\n    return train_df\n\n\ndef get_word_statistic_feature(train_df, col_list):\n    for col in col_list:\n        fea1 = col[0]\n        fea2 = col[1]\n        train_df = deal_word_for_all(train_df, fea1, fea2, word_match_share, fea1[0] + '_' + fea2[0] + '_word_match')\n        train_df = deal_word_for_all(train_df, fea1, fea2, common_words, fea1[0] + '_' + fea2[0] + '_common_words')\n        train_df = deal_word_for_all(train_df, fea1, fea2, total_unique_words, fea1[0] + '_' + fea2[0] + '_total_unique_words')\n        train_df = deal_word_for_all(train_df, fea1, fea2, wc_diff, fea1[0] + '_' + fea2[0] + '_wc_diff')\n        train_df = deal_word_for_all(train_df, fea1, fea2, wc_ratio, fea1[0] + '_' + fea2[0] + '_wc_ratio')\n        train_df = deal_word_for_all(train_df, fea1, fea2, wc_diff_unique, fea1[0] + '_' + fea2[0] + '_wc_diff_unique')\n        train_df = deal_word_for_all(train_df, fea1, fea2, wc_ratio_unique, fea1[0] + '_' + fea2[0] + '_wc_ratio_unique')\n#         f = functools.partial(tfidf_word_match_share, weights=weights)\n#         train_df, valid_df = deal_word_for_all(train_df, valid_df, fea1, fea2, f, fea1[0] + '_' + fea2[0] + '_tfidf_word_match_share')\n    return train_df","execution_count":9},{"metadata":{"id":"0B2DF2CCFEA749B78BAFE9C07E905488","mdEditEnable":false},"cell_type":"markdown","source":"# 基于词向量的query title相似度"},{"metadata":{"id":"CBBA85B791104749910D0F161A4C74C3","collapsed":false,"scrolled":false},"cell_type":"code","outputs":[],"source":"import numpy as np\r\nimport pandas as pd\r\nimport time\r\nimport sys\r\nimport datetime\r\nimport gc\r\nfrom sklearn.model_selection import StratifiedShuffleSplit\r\nfrom sklearn.model_selection import KFold, cross_val_score, train_test_split\r\nfrom sklearn.model_selection import StratifiedKFold\r\nfrom sklearn.metrics import roc_auc_score, log_loss\r\nimport lightgbm as lgb\r\nfrom sklearn.preprocessing import OneHotEncoder, LabelEncoder\r\nfrom sklearn.feature_extraction.text import CountVectorizer\r\nfrom sklearn.feature_selection import chi2, SelectPercentile\r\nimport math\r\nfrom sklearn.metrics import f1_score\r\nimport jieba\r\nimport jieba.posseg as psg\r\nfrom collections import Counter\r\nimport functools\r\nfrom time import time\r\nimport Levenshtein\r\nfrom gensim.models import Word2Vec\r\nfrom time import sleep\r\ndata_path='/home/kesci/input/bytedance/first-round/'\r\ndata_store_path='/home/kesci/work/test/'\r\n\r\n\r\nepoch_w2v=0\r\n\r\ndef get_w2v_feature(word_list, word_wv, w2v_size):\r\n    word_list[0]=word_list[0].split(' ')\r\n    word_list[1]=word_list[1].split(' ')\r\n    \r\n    \r\n    wmd_dist = w2v_model.wmdistance(word_list[0], word_list[1])\r\n    \r\n    if wmd_dist==np.inf:\r\n        wmd_dist=np.nan\r\n    \r\n    global epoch_w2v\r\n    epoch_w2v+=1\r\n    if epoch_w2v%5000==0:\r\n        sys.stdout.write(str(epoch_w2v) + '\\r')\r\n        sys.stdout.flush()\r\n    word_vectors_query = np.zeros((len(word_list[0]), w2v_size))\r\n    word_vectors_title = np.zeros((len(word_list[1]), w2v_size))\r\n    \r\n    for i in range(len(word_list[0])):\r\n        if str(word_list[0][i]) in word_wv.vocab.keys():\r\n            word_vectors_query[i][:] = word_wv[str(word_list[0][i])]\r\n    for i in range(len(word_list[1])):\r\n        if str(word_list[1][i]) in word_wv.vocab.keys():\r\n            word_vectors_title[i][:] = word_wv[str(word_list[1][i])]\r\n\r\n    mean_array_query = np.mean(word_vectors_query, axis=0)\r\n    mean_array_title = np.mean(word_vectors_title, axis=0)\r\n    ####################以上是句子向量的做法\r\n    ############就是先初始化一个全0矩阵，shape是：句子长度（即词个数）*每个词embedding_size\r\n    ###########然后每个词对应和行，填充词对应的embedding。\r\n    ############最后竖向求平均。\r\n    \r\n    ######下面是向量距离的做法\r\n    dot_similarity = np.dot(mean_array_title, mean_array_query)\r\n    norm_similarity = np.linalg.norm(mean_array_title - mean_array_query)\r\n    cosine_similarity=np.dot(mean_array_title,mean_array_query) / (np.linalg.norm(mean_array_title) * np.linalg.norm(mean_array_query))\r\n    return [dot_similarity,norm_similarity,cosine_similarity,wmd_dist]\r\n    \r\n\r\n\r\nprint(\"~~~~~~~~~~~~~~~~~~~~~~测试集词向量特征~~~~~~~~~~~~~~~~~~~~~~~~~~\")\r\nimport gc\r\n\r\nw2v_model = Word2Vec.load('w2v_model/w2v_all_data_model.txt')\r\nword_wv = w2v_model.wv\r\n\r\ntrain_data=pd.read_csv(\"/home/kesci/work/1000W_stacking/data_sets/test_data_ctr.csv\",usecols=['query','title'])#读已经做好ctr特征的训练集\r\ntrain_data = train_data.apply(lambda x : get_w2v_feature(x, word_wv, w2v_model.vector_size),axis=1,result_type='expand')\r\ntrain_data.columns=['title_query_dot_similarity', 'title_query_norm_similarity','title_query_cosine_similarity','title_query_wmd_dis']\r\ntrain_data.to_csv(\"/home/kesci/work/1000W_stacking/data_sets/w2v_feature_test.csv\",index=None)\r\ndel train_data\r\n\r\n","execution_count":5},{"cell_type":"markdown","metadata":{"id":"FDC6786F67844721980231B2DFB55629","mdEditEnable":false},"source":"# 以下是main"},{"cell_type":"code","metadata":{"id":"8E51F2144FE548C9920E13E505928953","collapsed":true,"scrolled":true},"outputs":[{"output_type":"stream","text":"~~~~~~~~~~~~~~~~~~~~~~交叉特征~~~~~~~~~~~~~~~~~~~~~~~~~~\n","name":"stdout"},{"output_type":"error","ename":"NameError","evalue":"name 'train_data' is not defined","traceback":["\u001b[0;31m---------------------------------------------------------------------------\u001b[0m","\u001b[0;31mNameError\u001b[0m                                 Traceback (most recent call last)","\u001b[0;32m<ipython-input-6-000e81c90d23>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[1;32m      1\u001b[0m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"~~~~~~~~~~~~~~~~~~~~~~交叉特征~~~~~~~~~~~~~~~~~~~~~~~~~~\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m      2\u001b[0m \u001b[0mtime1\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mtime\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 3\u001b[0;31m \u001b[0mtrain_data\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m'is_query_in_title'\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtrain_data\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m'query'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m'title'\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mapply\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mget_is_query_in_title_feature\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0maxis\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;36m1\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m      4\u001b[0m \u001b[0mtest_data\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m'is_query_in_title'\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtest_data\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m'query'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m'title'\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mapply\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mget_is_query_in_title_feature\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0maxis\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;36m1\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m      5\u001b[0m \u001b[0mjiaocha_type_list\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m'query'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m'title'\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;31mNameError\u001b[0m: name 'train_data' is not defined"]}],"source":"print(\"~~~~~~~~~~~~~~~~~~~~~~交叉特征~~~~~~~~~~~~~~~~~~~~~~~~~~\")\ntime1=time()\ntrain_data['is_query_in_title'] = train_data[['query', 'title']].apply(get_is_query_in_title_feature, axis = 1)\ntest_data['is_query_in_title'] = test_data[['query', 'title']].apply(get_is_query_in_title_feature, axis = 1)\njiaocha_type_list = [['query', 'title']]\ntrain_data, test_data = get_jiaocha_type_feature(train_data, test_data, jiaocha_type_list)\ntrain_data = get_string_len_feature(train_data)\ntest_data = get_string_len_feature(test_data)\ntime2=time()\nprint(\"时间：\",time2-time1)\nprint(\"~~~~~~~~~~~~~~~~~~~~~~编辑距离特征~~~~~~~~~~~~~~~~~~~~~~~~~~\")\ntrain_data['title_query_leven'] = train_data[['title', 'query']].apply(get_title_query_levenshtein_distance, axis=1)\ntrain_data['title_query_leven_rate'] = train_data[['title', 'title_query_leven']].apply(get_title_query_levenshtein_distance_rate, axis=1)\ntest_data['title_query_leven'] = test_data[['title', 'query']].apply(get_title_query_levenshtein_distance, axis=1)\ntest_data['title_query_leven_rate'] = test_data[['title', 'title_query_leven']].apply(get_title_query_levenshtein_distance_rate, axis=1)\ntime3=time()\nprint(\"时间：\",time3-time2)\nprint(\"~~~~~~~~~~~~~~~~~~~~~~距离特征~~~~~~~~~~~~~~~~~~~~~~~~~~\")\ntrain_data[['query']]=train_data[['query']].applymap(lambda x:x.split(' '))\ntrain_data[['title']]=train_data[['title']].applymap(lambda x:x.split(' '))\n\n\ntime4=time()\nprint(\"转成list词表时间：\",time4-time3)\ncol_list = [ ['query', 'title']]\ntrain_data = get_word_statistic_feature(train_data, col_list)\ntrain_data.to_csv(data_store_path+\"data_sets/train_feature_second_data.csv\",index=None)\ntrain_data=train_data.drop(['query','title'],axis=1)\ntrain_data.to_csv(data_store_path+\"data_sets/train_feature_second_no_query_and_title_data.csv\",index=None)\ndel train_data\nimport gc\ngc.collect()\ntest_data[['query']]=test_data[['query']].applymap(lambda x:x.split(' '))\ntest_data[['title']]=test_data[['title']].applymap(lambda x:x.split(' '))\ntest_data = get_word_statistic_feature(test_data, col_list)\ntest_data.to_csv(data_store_path+\"data_sets/test_feature_second_data.csv\",index=None)\ntest_data=test_data.drop(['query','title'],axis=1)\ntest_data.to_csv(data_store_path+\"data_sets/test_feature_second_no_query_and_title_data.csv\",index=None)\ndel test_data\ngc.collect()\ntime5=time()\nprint(\"距离特征时间：\",time5-time4)\n# print(\"~~~~~~~~~~~~~~~~~~~~~~词向量特征~~~~~~~~~~~~~~~~~~~~~~~~~~\")\n# w2v_model = Word2Vec.load('w2v_all_data_model.txt')\n# word_wv = w2v_model.wv\n# train_data['title_array'] = train_data['title'].map(lambda x : get_w2v_array(x, word_wv, w2v_model.vector_size))\n# train_data['query_array'] = train_data['query'].map(lambda x : get_w2v_array(x, word_wv, w2v_model.vector_size))\n# test_data['title_array'] = test_data['title'].map(lambda x : get_w2v_array(x, word_wv, w2v_model.vector_size))\n# test_data['query_array'] = test_data['query'].map(lambda x : get_w2v_array(x, word_wv, w2v_model.vector_size))\n# train_data = get_similarity_feature(train_data)\n# test_data = get_similarity_feature(test_data)\n\n","execution_count":6},{"metadata":{"id":"EA8E688B2344482583B63A3243220F41","mdEditEnable":false},"cell_type":"markdown","source":"# 下面是把已经做好的特征read"},{"metadata":{"id":"228EC02B9DC5464C85CDB07CE4E8BA18","collapsed":false,"scrolled":false},"cell_type":"code","outputs":[],"source":"train_data=pd.read_csv(data_store_path+\"data_sets/train_feature_second_no_query_and_title_data.csv\")\ntest_data=pd.read_csv(data_store_path+\"data_sets/test_feature_second_no_query_and_title_data.csv\")\ntrain_data_similarity=pd.read_csv(data_store_path+\"word2vec_feature_parts/train_data_second_similarity.csv\")\ntest_data_similarity=pd.read_csv(data_store_path+\"word2vec_feature_parts/test_data_second_similarity.csv\")\ntrain_data=pd.concat([train_data,train_data_similarity],axis=1)\ntest_data=pd.concat([test_data,train_data_similarity],axis=1)\ndel train_data_similarity\ndel test_data_similarity\nimport gc\ngc.collect()","execution_count":null},{"metadata":{"id":"80AE9223609449078B1CD74B38838F9D","mdEditEnable":false},"cell_type":"markdown","source":"# 训练"},{"metadata":{"id":"CA4A06057E794573869B97037184EFD2","collapsed":true},"cell_type":"code","outputs":[{"output_type":"stream","text":"/opt/conda/lib/python3.6/site-packages/lightgbm/engine.py:118: UserWarning: Found `num_boost_round` in params. Will use it instead of argument\n  warnings.warn(\"Found `{}` in params. Will use it instead of argument\".format(alias))\n","name":"stderr"},{"output_type":"error","ename":"ValueError","evalue":"DataFrame.dtypes for data must be int, float or bool.\nDid not expect the data types in fields query, title","traceback":["\u001b[0;31m---------------------------------------------------------------------------\u001b[0m","\u001b[0;31mValueError\u001b[0m                                Traceback (most recent call last)","\u001b[0;32m<ipython-input-8-9d75c2d02ccc>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[1;32m     18\u001b[0m                 \u001b[0mnum_boost_round\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m530\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     19\u001b[0m                 \u001b[0mvalid_sets\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mlgb_eval\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 20\u001b[0;31m                 \u001b[0mverbose_eval\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m2\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mcategorical_feature\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m\"query_tfidf_kmeans\"\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m     21\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     22\u001b[0m                 \u001b[0;31m#init_model=data_store_path+'/lgb_model/model_1000.txt'\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;32m/opt/conda/lib/python3.6/site-packages/lightgbm/engine.py\u001b[0m in \u001b[0;36mtrain\u001b[0;34m(params, train_set, num_boost_round, valid_sets, valid_names, fobj, feval, init_model, feature_name, categorical_feature, early_stopping_rounds, evals_result, verbose_eval, learning_rates, keep_training_booster, callbacks)\u001b[0m\n\u001b[1;32m    195\u001b[0m     \u001b[0;31m# construct booster\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    196\u001b[0m     \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 197\u001b[0;31m         \u001b[0mbooster\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mBooster\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mparams\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mparams\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtrain_set\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mtrain_set\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    198\u001b[0m         \u001b[0;32mif\u001b[0m \u001b[0mis_valid_contain_train\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    199\u001b[0m             \u001b[0mbooster\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mset_train_data_name\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtrain_data_name\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;32m/opt/conda/lib/python3.6/site-packages/lightgbm/basic.py\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self, params, train_set, model_file, silent)\u001b[0m\n\u001b[1;32m   1550\u001b[0m             \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mhandle\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mctypes\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mc_void_p\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1551\u001b[0m             _safe_call(_LIB.LGBM_BoosterCreate(\n\u001b[0;32m-> 1552\u001b[0;31m                 \u001b[0mtrain_set\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mconstruct\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mhandle\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m   1553\u001b[0m                 \u001b[0mc_str\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mparams_str\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1554\u001b[0m                 ctypes.byref(self.handle)))\n","\u001b[0;32m/opt/conda/lib/python3.6/site-packages/lightgbm/basic.py\u001b[0m in \u001b[0;36mconstruct\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m    999\u001b[0m                                 \u001b[0minit_score\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0minit_score\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mpredictor\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_predictor\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1000\u001b[0m                                 \u001b[0msilent\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msilent\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfeature_name\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfeature_name\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1001\u001b[0;31m                                 categorical_feature=self.categorical_feature, params=self.params)\n\u001b[0m\u001b[1;32m   1002\u001b[0m             \u001b[0;32mif\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfree_raw_data\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1003\u001b[0m                 \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdata\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;32m/opt/conda/lib/python3.6/site-packages/lightgbm/basic.py\u001b[0m in \u001b[0;36m_lazy_init\u001b[0;34m(self, data, label, reference, weight, group, init_score, predictor, silent, feature_name, categorical_feature, params)\u001b[0m\n\u001b[1;32m    727\u001b[0m                                                                                              \u001b[0mfeature_name\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    728\u001b[0m                                                                                              \u001b[0mcategorical_feature\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 729\u001b[0;31m                                                                                              self.pandas_categorical)\n\u001b[0m\u001b[1;32m    730\u001b[0m         \u001b[0mlabel\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0m_label_from_pandas\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mlabel\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    731\u001b[0m         \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdata_has_header\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;32mFalse\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;32m/opt/conda/lib/python3.6/site-packages/lightgbm/basic.py\u001b[0m in \u001b[0;36m_data_from_pandas\u001b[0;34m(data, feature_name, categorical_feature, pandas_categorical)\u001b[0m\n\u001b[1;32m    275\u001b[0m             msg = (\"DataFrame.dtypes for data must be int, float or bool.\\n\"\n\u001b[1;32m    276\u001b[0m                    \"Did not expect the data types in fields \")\n\u001b[0;32m--> 277\u001b[0;31m             \u001b[0;32mraise\u001b[0m \u001b[0mValueError\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmsg\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0;34m', '\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mjoin\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mbad_fields\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    278\u001b[0m         \u001b[0mdata\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mdata\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mvalues\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mastype\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'float'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    279\u001b[0m     \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;31mValueError\u001b[0m: DataFrame.dtypes for data must be int, float or bool.\nDid not expect the data types in fields query, title"]}],"source":"\nfea=list(train_data.columns)\nfea.remove('label')\ndata_split=StratifiedShuffleSplit(n_splits=2,test_size=0.02,random_state=2018)\ntrain_index,vaild_index=data_split.split(train_data['label'],train_data['label']).__next__()\nX_vaild,y_vaild=train_data[fea].iloc[vaild_index],train_data['label'].iloc[vaild_index]\nparams = {\n        'boosting_type':'gbdt', 'num_leaves':127, 'max_depth':-1, 'n_estimators':5000, 'objective':'binary',\n        'subsample':0.8, 'colsample_bytree':1, 'subsample_freq':1,\n        'learning_rate':0.008, 'random_state':666\n        ,  'num_boost_round':500,'silent':False,'verbose':2\n}\n\nlgb_train = lgb.Dataset(train_data[fea], train_data['label'],categorical_feature=[\"query_tfidf_kmeans\"])\nlgb_eval = lgb.Dataset(X_vaild, y_vaild, reference=lgb_train,categorical_feature=[\"query_tfidf_kmeans\"])\ngbm = lgb.train(params,\n                lgb_train,\n                num_boost_round=530,\n                valid_sets=lgb_eval,\n                verbose_eval=2,categorical_feature=[\"query_tfidf_kmeans\"]\n                \n                #init_model=data_store_path+'/lgb_model/model_1000.txt'\n                )#early_stopping_rounds=20,\ngbm.save_model(data_store_path+'/lgb_model/model_1130.txt')\nprint(gbm.best_score['valid_0'])","execution_count":8},{"metadata":{"id":"9A025A6F313640048EF8089574C114AC","mdEditEnable":false},"cell_type":"markdown","source":"# 输出模型训练后的一些信息（不包括预测）"},{"metadata":{"id":"12ABF40E85C442869199486054813E87","collapsed":false,"scrolled":true},"cell_type":"code","outputs":[{"output_type":"stream","text":"title_count : 6101\ntitle_ctr : 11374\ntitle_sum : 5211\nis_query_in_title : 1815\nquery_title_types : 4530\nquery_len : 15679\ntitle_len : 4014\nlen_title-query : 1591\nlen_query/title : 9399\ntitle_query_leven : 2717\ntitle_query_leven_rate : 7064\nq_t_word_match : 7733\nq_t_common_words : 5263\nq_t_total_unique_words : 7286\nq_t_wc_diff : 1403\nq_t_wc_ratio : 8594\nq_t_wc_diff_unique : 995\nq_t_wc_ratio_unique : 9391\ntitle_query_dot_similarity : 10086\ntitle_query_norm_similarity : 11233\ntitle_query_cosine_similarity : 10901\n","name":"stdout"}],"source":"for index,i in enumerate(fea):\n    print(i,\":\",gbm.feature_importance()[index])","execution_count":21},{"metadata":{"id":"9976166278694CB49F73984287CAB371","mdEditEnable":false},"cell_type":"markdown","source":"# 保存模型"},{"metadata":{"id":"0EF91977FD8741FE84E0C31CBD0725A2","collapsed":false,"scrolled":false},"cell_type":"code","outputs":[{"output_type":"stream","text":"{'binary_logloss': 0.5336765283668315}\n","name":"stdout"}],"source":"gbm.save_model(data_store_path+'/lgb_model/model.txt')\nprint(gbm.best_score['valid_0'])","execution_count":8},{"metadata":{"id":"09419E7CAEED4365A9243558A928FD09","mdEditEnable":false},"cell_type":"markdown","source":"# 预测+结果平滑"},{"metadata":{"id":"E16F15547B4A466A94C50F6C109D576D","mdEditEnable":false},"cell_type":"markdown","source":"## 结果平滑(这个没什么用，因为query中train和test中重复的很少)\n## 使用的是阈值平滑"},{"metadata":{"id":"8499ABC3F6B940708203E08007F41D4D","collapsed":false,"scrolled":false},"cell_type":"code","outputs":[],"source":"# ############平滑函数\n# #定义调整函数\n\n# def resultAdjustment(result_df, t):\n#     result_df_temp = result_df.copy()\n#     result_df_temp['x'] = result_df_temp.predicted_score.map(lambda x: -(math.log(((1 - x) / x), math.e)))\n#     result_df_temp['adjust_result'] = result_df_temp.x.map(lambda x: 1 / (1 + math.exp(-(x + t))))\n#     print(result_df_temp['adjust_result'].mean())\n#     return result_df_temp['adjust_result']","execution_count":36},{"metadata":{"id":"0F41D762E4E24B5682EAE42609AC2178","collapsed":true,"scrolled":false},"cell_type":"code","outputs":[{"output_type":"execute_result","metadata":{},"data":{"text/plain":"                0  is_query_in_title\n0        0.121394                  0\n1        0.123962                  0\n2        0.194063                  0\n3        0.118342                  0\n4        0.302090                  0\n5        0.295319                  0\n6        0.254328                  0\n7        0.074963                  0\n8        0.093010                  0\n9        0.089078                  0\n10       0.108305                  0\n11       0.090672                  0\n12       0.097364                  0\n13       0.305283                  0\n14       0.089078                  0\n15       0.261940                  0\n16       0.156779                  0\n17       0.122425                  0\n18       0.123829                  0\n19       0.118665                  0\n20       0.173421                  0\n21       0.100967                  0\n22       0.186004                  0\n23       0.245395                  0\n24       0.079377                  0\n25       0.197617                  0\n26       0.195732                  0\n27       0.273569                  0\n28       0.213380                  0\n29       0.167646                  0\n...           ...                ...\n4999970  0.119625                  1\n4999971  0.174626                  1\n4999972  0.121119                  1\n4999973  0.182720                  1\n4999974  0.192769                  0\n4999975  0.071363                  0\n4999976  0.197265                  0\n4999977  0.183121                  1\n4999978  0.102794                  0\n4999979  0.155970                  0\n4999980  0.101361                  0\n4999981  0.100471                  0\n4999982  0.094023                  0\n4999983  0.264850                  0\n4999984  0.099429                  0\n4999985  0.103738                  0\n4999986  0.094954                  0\n4999987  0.111041                  0\n4999988  0.106247                  0\n4999989  0.171292                  0\n4999990  0.078595                  0\n4999991  0.105094                  0\n4999992  0.197095                  0\n4999993  0.113042                  0\n4999994  0.271771                  0\n4999995  0.208554                  0\n4999996  0.107807                  0\n4999997  0.215828                  0\n4999998  0.093277                  0\n4999999  0.114174                  0\n\n[5000000 rows x 2 columns]","text/html":"<div>\n<style scoped>\n    .dataframe tbody tr th:only-of-type {\n        vertical-align: middle;\n    }\n\n    .dataframe tbody tr th {\n        vertical-align: top;\n    }\n\n    .dataframe thead th {\n        text-align: right;\n    }\n</style>\n<table border=\"1\" class=\"dataframe\">\n  <thead>\n    <tr style=\"text-align: right;\">\n      <th></th>\n      <th>0</th>\n      <th>is_query_in_title</th>\n    </tr>\n  </thead>\n  <tbody>\n    <tr>\n      <th>0</th>\n      <td>0.121394</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>1</th>\n      <td>0.123962</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>2</th>\n      <td>0.194063</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>3</th>\n      <td>0.118342</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>4</th>\n      <td>0.302090</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>5</th>\n      <td>0.295319</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>6</th>\n      <td>0.254328</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>7</th>\n      <td>0.074963</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>8</th>\n      <td>0.093010</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>9</th>\n      <td>0.089078</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>10</th>\n      <td>0.108305</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>11</th>\n      <td>0.090672</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>12</th>\n      <td>0.097364</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>13</th>\n      <td>0.305283</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>14</th>\n      <td>0.089078</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>15</th>\n      <td>0.261940</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>16</th>\n      <td>0.156779</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>17</th>\n      <td>0.122425</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>18</th>\n      <td>0.123829</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>19</th>\n      <td>0.118665</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>20</th>\n      <td>0.173421</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>21</th>\n      <td>0.100967</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>22</th>\n      <td>0.186004</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>23</th>\n      <td>0.245395</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>24</th>\n      <td>0.079377</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>25</th>\n      <td>0.197617</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>26</th>\n      <td>0.195732</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>27</th>\n      <td>0.273569</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>28</th>\n      <td>0.213380</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>29</th>\n      <td>0.167646</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>...</th>\n      <td>...</td>\n      <td>...</td>\n    </tr>\n    <tr>\n      <th>4999970</th>\n      <td>0.119625</td>\n      <td>1</td>\n    </tr>\n    <tr>\n      <th>4999971</th>\n      <td>0.174626</td>\n      <td>1</td>\n    </tr>\n    <tr>\n      <th>4999972</th>\n      <td>0.121119</td>\n      <td>1</td>\n    </tr>\n    <tr>\n      <th>4999973</th>\n      <td>0.182720</td>\n      <td>1</td>\n    </tr>\n    <tr>\n      <th>4999974</th>\n      <td>0.192769</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>4999975</th>\n      <td>0.071363</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>4999976</th>\n      <td>0.197265</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>4999977</th>\n      <td>0.183121</td>\n      <td>1</td>\n    </tr>\n    <tr>\n      <th>4999978</th>\n      <td>0.102794</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>4999979</th>\n      <td>0.155970</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>4999980</th>\n      <td>0.101361</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>4999981</th>\n      <td>0.100471</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>4999982</th>\n      <td>0.094023</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>4999983</th>\n      <td>0.264850</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>4999984</th>\n      <td>0.099429</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>4999985</th>\n      <td>0.103738</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>4999986</th>\n      <td>0.094954</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>4999987</th>\n      <td>0.111041</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>4999988</th>\n      <td>0.106247</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>4999989</th>\n      <td>0.171292</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>4999990</th>\n      <td>0.078595</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>4999991</th>\n      <td>0.105094</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>4999992</th>\n      <td>0.197095</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>4999993</th>\n      <td>0.113042</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>4999994</th>\n      <td>0.271771</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>4999995</th>\n      <td>0.208554</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>4999996</th>\n      <td>0.107807</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>4999997</th>\n      <td>0.215828</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>4999998</th>\n      <td>0.093277</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>4999999</th>\n      <td>0.114174</td>\n      <td>0</td>\n    </tr>\n  </tbody>\n</table>\n<p>5000000 rows × 2 columns</p>\n</div>"},"execution_count":35}],"source":"# query_train_list=[]\n# for epoch,part_data in enumerate(pd.read_csv(data_path+\"train.csv\",names=['query_id','query','query_title_id','title','label'],usecols=[1,3,4],chunksize=1000000,header=None)):\n#     all_data_train_list.append(part_data)\n#     print(epoch)\n# all_data_train=pd.concat(all_data_train_list,axis=0)\n# del all_data_train_list\n\n# ###################此处为一个trick，就是给结果平滑一下\n# y_pred_df\n# is_query_in_title_feature=pd.read_csv('test_feature_first_no_query_and_title_data.csv',sep=',',usecols=['is_query_in_title'])\n# y_pred_df_query_in=pd.concat([y_pred_df,is_query_in_title_feature],axis=1)\n# y_pred_df_query_in\n# ########################加载进了 is_query_in_title_feature\n# ######下面是平滑操作\n","execution_count":35},{"metadata":{"id":"AB7F8FB3BF654AC09978274ACB5746BA","mdEditEnable":false},"cell_type":"markdown","source":"# 这个是加载已训练lgb模型"},{"metadata":{"id":"3BD7DC6F7F834BD8865384AB9FF5A985","collapsed":false,"scrolled":false},"cell_type":"code","outputs":[],"source":"# model2=lgb.Booster(model_file='model_first.txt')  #init model\n# model2.feature_name()","execution_count":33},{"metadata":{"id":"819E5822CA0D42A787F5CD003884A839","mdEditEnable":false},"cell_type":"markdown","source":"## 预测"},{"metadata":{"id":"ACF785992FC249628EACBFFC522BAEC5","collapsed":true,"scrolled":false},"cell_type":"code","outputs":[{"output_type":"execute_result","metadata":{},"data":{"text/plain":"                0\n0        0.124806\n1        0.134846\n2        0.193158\n3        0.122989\n4        0.343902\n5        0.311168\n6        0.279804\n7        0.069874\n8        0.070957\n9        0.062102\n10       0.089635\n11       0.075339\n12       0.070402\n13       0.259307\n14       0.064506\n15       0.299004\n16       0.178426\n17       0.119241\n18       0.116218\n19       0.118594\n20       0.179241\n21       0.096436\n22       0.177628\n23       0.256226\n24       0.064564\n25       0.145517\n26       0.164664\n27       0.264395\n28       0.206900\n29       0.163022\n...           ...\n4999970  0.148689\n4999971  0.251935\n4999972  0.161969\n4999973  0.234953\n4999974  0.220775\n4999975  0.043422\n4999976  0.201651\n4999977  0.210616\n4999978  0.116159\n4999979  0.148626\n4999980  0.080823\n4999981  0.086149\n4999982  0.064921\n4999983  0.233967\n4999984  0.072611\n4999985  0.081725\n4999986  0.075732\n4999987  0.081922\n4999988  0.101279\n4999989  0.155103\n4999990  0.075159\n4999991  0.089169\n4999992  0.089025\n4999993  0.087974\n4999994  0.269730\n4999995  0.158783\n4999996  0.105844\n4999997  0.194556\n4999998  0.062046\n4999999  0.103190\n\n[5000000 rows x 1 columns]","text/html":"<div>\n<style scoped>\n    .dataframe tbody tr th:only-of-type {\n        vertical-align: middle;\n    }\n\n    .dataframe tbody tr th {\n        vertical-align: top;\n    }\n\n    .dataframe thead th {\n        text-align: right;\n    }\n</style>\n<table border=\"1\" class=\"dataframe\">\n  <thead>\n    <tr style=\"text-align: right;\">\n      <th></th>\n      <th>0</th>\n    </tr>\n  </thead>\n  <tbody>\n    <tr>\n      <th>0</th>\n      <td>0.124806</td>\n    </tr>\n    <tr>\n      <th>1</th>\n      <td>0.134846</td>\n    </tr>\n    <tr>\n      <th>2</th>\n      <td>0.193158</td>\n    </tr>\n    <tr>\n      <th>3</th>\n      <td>0.122989</td>\n    </tr>\n    <tr>\n      <th>4</th>\n      <td>0.343902</td>\n    </tr>\n    <tr>\n      <th>5</th>\n      <td>0.311168</td>\n    </tr>\n    <tr>\n      <th>6</th>\n      <td>0.279804</td>\n    </tr>\n    <tr>\n      <th>7</th>\n      <td>0.069874</td>\n    </tr>\n    <tr>\n      <th>8</th>\n      <td>0.070957</td>\n    </tr>\n    <tr>\n      <th>9</th>\n      <td>0.062102</td>\n    </tr>\n    <tr>\n      <th>10</th>\n      <td>0.089635</td>\n    </tr>\n    <tr>\n      <th>11</th>\n      <td>0.075339</td>\n    </tr>\n    <tr>\n      <th>12</th>\n      <td>0.070402</td>\n    </tr>\n    <tr>\n      <th>13</th>\n      <td>0.259307</td>\n    </tr>\n    <tr>\n      <th>14</th>\n      <td>0.064506</td>\n    </tr>\n    <tr>\n      <th>15</th>\n      <td>0.299004</td>\n    </tr>\n    <tr>\n      <th>16</th>\n      <td>0.178426</td>\n    </tr>\n    <tr>\n      <th>17</th>\n      <td>0.119241</td>\n    </tr>\n    <tr>\n      <th>18</th>\n      <td>0.116218</td>\n    </tr>\n    <tr>\n      <th>19</th>\n      <td>0.118594</td>\n    </tr>\n    <tr>\n      <th>20</th>\n      <td>0.179241</td>\n    </tr>\n    <tr>\n      <th>21</th>\n      <td>0.096436</td>\n    </tr>\n    <tr>\n      <th>22</th>\n      <td>0.177628</td>\n    </tr>\n    <tr>\n      <th>23</th>\n      <td>0.256226</td>\n    </tr>\n    <tr>\n      <th>24</th>\n      <td>0.064564</td>\n    </tr>\n    <tr>\n      <th>25</th>\n      <td>0.145517</td>\n    </tr>\n    <tr>\n      <th>26</th>\n      <td>0.164664</td>\n    </tr>\n    <tr>\n      <th>27</th>\n      <td>0.264395</td>\n    </tr>\n    <tr>\n      <th>28</th>\n      <td>0.206900</td>\n    </tr>\n    <tr>\n      <th>29</th>\n      <td>0.163022</td>\n    </tr>\n    <tr>\n      <th>...</th>\n      <td>...</td>\n    </tr>\n    <tr>\n      <th>4999970</th>\n      <td>0.148689</td>\n    </tr>\n    <tr>\n      <th>4999971</th>\n      <td>0.251935</td>\n    </tr>\n    <tr>\n      <th>4999972</th>\n      <td>0.161969</td>\n    </tr>\n    <tr>\n      <th>4999973</th>\n      <td>0.234953</td>\n    </tr>\n    <tr>\n      <th>4999974</th>\n      <td>0.220775</td>\n    </tr>\n    <tr>\n      <th>4999975</th>\n      <td>0.043422</td>\n    </tr>\n    <tr>\n      <th>4999976</th>\n      <td>0.201651</td>\n    </tr>\n    <tr>\n      <th>4999977</th>\n      <td>0.210616</td>\n    </tr>\n    <tr>\n      <th>4999978</th>\n      <td>0.116159</td>\n    </tr>\n    <tr>\n      <th>4999979</th>\n      <td>0.148626</td>\n    </tr>\n    <tr>\n      <th>4999980</th>\n      <td>0.080823</td>\n    </tr>\n    <tr>\n      <th>4999981</th>\n      <td>0.086149</td>\n    </tr>\n    <tr>\n      <th>4999982</th>\n      <td>0.064921</td>\n    </tr>\n    <tr>\n      <th>4999983</th>\n      <td>0.233967</td>\n    </tr>\n    <tr>\n      <th>4999984</th>\n      <td>0.072611</td>\n    </tr>\n    <tr>\n      <th>4999985</th>\n      <td>0.081725</td>\n    </tr>\n    <tr>\n      <th>4999986</th>\n      <td>0.075732</td>\n    </tr>\n    <tr>\n      <th>4999987</th>\n      <td>0.081922</td>\n    </tr>\n    <tr>\n      <th>4999988</th>\n      <td>0.101279</td>\n    </tr>\n    <tr>\n      <th>4999989</th>\n      <td>0.155103</td>\n    </tr>\n    <tr>\n      <th>4999990</th>\n      <td>0.075159</td>\n    </tr>\n    <tr>\n      <th>4999991</th>\n      <td>0.089169</td>\n    </tr>\n    <tr>\n      <th>4999992</th>\n      <td>0.089025</td>\n    </tr>\n    <tr>\n      <th>4999993</th>\n      <td>0.087974</td>\n    </tr>\n    <tr>\n      <th>4999994</th>\n      <td>0.269730</td>\n    </tr>\n    <tr>\n      <th>4999995</th>\n      <td>0.158783</td>\n    </tr>\n    <tr>\n      <th>4999996</th>\n      <td>0.105844</td>\n    </tr>\n    <tr>\n      <th>4999997</th>\n      <td>0.194556</td>\n    </tr>\n    <tr>\n      <th>4999998</th>\n      <td>0.062046</td>\n    </tr>\n    <tr>\n      <th>4999999</th>\n      <td>0.103190</td>\n    </tr>\n  </tbody>\n</table>\n<p>5000000 rows × 1 columns</p>\n</div>"},"execution_count":22}],"source":"y_pred = gbm.predict(test_data[fea], num_iteration=gbm.best_iteration)\ny_pred_df=pd.DataFrame(y_pred)\ny_pred_df.to_csv(data_store_path+\"sub/result_1130.csv\",index=None)\ny_pred_df######这个结果只是单列结果，需要和query_id和title_id拼接后提交","execution_count":22},{"metadata":{"id":"38E743F1934348AE89776541865B6EBE","collapsed":true,"scrolled":false},"cell_type":"code","outputs":[{"output_type":"execute_result","metadata":{},"data":{"text/plain":"         query_id  query_title_id\n0               1               3\n1               1               1\n2               1               4\n3               1               2\n4               2               1\n5               2               2\n6               2               3\n7               3               6\n8               3               5\n9               3               8\n10              3               2\n11              3               1\n12              3               4\n13              3               3\n14              3               7\n15              4               1\n16              4               2\n17              4               3\n18              4               5\n19              4               4\n20              4               6\n21              5               3\n22              5               5\n23              5               4\n24              5               2\n25              5               1\n26              6               3\n27              6               1\n28              6               2\n29              7               1\n...           ...             ...\n4999970    979559               1\n4999971    979559               3\n4999972    979559               2\n4999973    979559               4\n4999974    979560               1\n4999975    979560               2\n4999976    979560               3\n4999977    979561               3\n4999978    979561               2\n4999979    979561               1\n4999980    979562               5\n4999981    979562               1\n4999982    979562               8\n4999983    979562               2\n4999984    979562               7\n4999985    979562               3\n4999986    979562               4\n4999987    979562               6\n4999988    979563               4\n4999989    979563               3\n4999990    979563               2\n4999991    979563               1\n4999992    979564               6\n4999993    979564               5\n4999994    979564               4\n4999995    979564               3\n4999996    979564               1\n4999997    979564               8\n4999998    979564               7\n4999999    979564               2\n\n[5000000 rows x 2 columns]","text/html":"<div>\n<style scoped>\n    .dataframe tbody tr th:only-of-type {\n        vertical-align: middle;\n    }\n\n    .dataframe tbody tr th {\n        vertical-align: top;\n    }\n\n    .dataframe thead th {\n        text-align: right;\n    }\n</style>\n<table border=\"1\" class=\"dataframe\">\n  <thead>\n    <tr style=\"text-align: right;\">\n      <th></th>\n      <th>query_id</th>\n      <th>query_title_id</th>\n    </tr>\n  </thead>\n  <tbody>\n    <tr>\n      <th>0</th>\n      <td>1</td>\n      <td>3</td>\n    </tr>\n    <tr>\n      <th>1</th>\n      <td>1</td>\n      <td>1</td>\n    </tr>\n    <tr>\n      <th>2</th>\n      <td>1</td>\n      <td>4</td>\n    </tr>\n    <tr>\n      <th>3</th>\n      <td>1</td>\n      <td>2</td>\n    </tr>\n    <tr>\n      <th>4</th>\n      <td>2</td>\n      <td>1</td>\n    </tr>\n    <tr>\n      <th>5</th>\n      <td>2</td>\n      <td>2</td>\n    </tr>\n    <tr>\n      <th>6</th>\n      <td>2</td>\n      <td>3</td>\n    </tr>\n    <tr>\n      <th>7</th>\n      <td>3</td>\n      <td>6</td>\n    </tr>\n    <tr>\n      <th>8</th>\n      <td>3</td>\n      <td>5</td>\n    </tr>\n    <tr>\n      <th>9</th>\n      <td>3</td>\n      <td>8</td>\n    </tr>\n    <tr>\n      <th>10</th>\n      <td>3</td>\n      <td>2</td>\n    </tr>\n    <tr>\n      <th>11</th>\n      <td>3</td>\n      <td>1</td>\n    </tr>\n    <tr>\n      <th>12</th>\n      <td>3</td>\n      <td>4</td>\n    </tr>\n    <tr>\n      <th>13</th>\n      <td>3</td>\n      <td>3</td>\n    </tr>\n    <tr>\n      <th>14</th>\n      <td>3</td>\n      <td>7</td>\n    </tr>\n    <tr>\n      <th>15</th>\n      <td>4</td>\n      <td>1</td>\n    </tr>\n    <tr>\n      <th>16</th>\n      <td>4</td>\n      <td>2</td>\n    </tr>\n    <tr>\n      <th>17</th>\n      <td>4</td>\n      <td>3</td>\n    </tr>\n    <tr>\n      <th>18</th>\n      <td>4</td>\n      <td>5</td>\n    </tr>\n    <tr>\n      <th>19</th>\n      <td>4</td>\n      <td>4</td>\n    </tr>\n    <tr>\n      <th>20</th>\n      <td>4</td>\n      <td>6</td>\n    </tr>\n    <tr>\n      <th>21</th>\n      <td>5</td>\n      <td>3</td>\n    </tr>\n    <tr>\n      <th>22</th>\n      <td>5</td>\n      <td>5</td>\n    </tr>\n    <tr>\n      <th>23</th>\n      <td>5</td>\n      <td>4</td>\n    </tr>\n    <tr>\n      <th>24</th>\n      <td>5</td>\n      <td>2</td>\n    </tr>\n    <tr>\n      <th>25</th>\n      <td>5</td>\n      <td>1</td>\n    </tr>\n    <tr>\n      <th>26</th>\n      <td>6</td>\n      <td>3</td>\n    </tr>\n    <tr>\n      <th>27</th>\n      <td>6</td>\n      <td>1</td>\n    </tr>\n    <tr>\n      <th>28</th>\n      <td>6</td>\n      <td>2</td>\n    </tr>\n    <tr>\n      <th>29</th>\n      <td>7</td>\n      <td>1</td>\n    </tr>\n    <tr>\n      <th>...</th>\n      <td>...</td>\n      <td>...</td>\n    </tr>\n    <tr>\n      <th>4999970</th>\n      <td>979559</td>\n      <td>1</td>\n    </tr>\n    <tr>\n      <th>4999971</th>\n      <td>979559</td>\n      <td>3</td>\n    </tr>\n    <tr>\n      <th>4999972</th>\n      <td>979559</td>\n      <td>2</td>\n    </tr>\n    <tr>\n      <th>4999973</th>\n      <td>979559</td>\n      <td>4</td>\n    </tr>\n    <tr>\n      <th>4999974</th>\n      <td>979560</td>\n      <td>1</td>\n    </tr>\n    <tr>\n      <th>4999975</th>\n      <td>979560</td>\n      <td>2</td>\n    </tr>\n    <tr>\n      <th>4999976</th>\n      <td>979560</td>\n      <td>3</td>\n    </tr>\n    <tr>\n      <th>4999977</th>\n      <td>979561</td>\n      <td>3</td>\n    </tr>\n    <tr>\n      <th>4999978</th>\n      <td>979561</td>\n      <td>2</td>\n    </tr>\n    <tr>\n      <th>4999979</th>\n      <td>979561</td>\n      <td>1</td>\n    </tr>\n    <tr>\n      <th>4999980</th>\n      <td>979562</td>\n      <td>5</td>\n    </tr>\n    <tr>\n      <th>4999981</th>\n      <td>979562</td>\n      <td>1</td>\n    </tr>\n    <tr>\n      <th>4999982</th>\n      <td>979562</td>\n      <td>8</td>\n    </tr>\n    <tr>\n      <th>4999983</th>\n      <td>979562</td>\n      <td>2</td>\n    </tr>\n    <tr>\n      <th>4999984</th>\n      <td>979562</td>\n      <td>7</td>\n    </tr>\n    <tr>\n      <th>4999985</th>\n      <td>979562</td>\n      <td>3</td>\n    </tr>\n    <tr>\n      <th>4999986</th>\n      <td>979562</td>\n      <td>4</td>\n    </tr>\n    <tr>\n      <th>4999987</th>\n      <td>979562</td>\n      <td>6</td>\n    </tr>\n    <tr>\n      <th>4999988</th>\n      <td>979563</td>\n      <td>4</td>\n    </tr>\n    <tr>\n      <th>4999989</th>\n      <td>979563</td>\n      <td>3</td>\n    </tr>\n    <tr>\n      <th>4999990</th>\n      <td>979563</td>\n      <td>2</td>\n    </tr>\n    <tr>\n      <th>4999991</th>\n      <td>979563</td>\n      <td>1</td>\n    </tr>\n    <tr>\n      <th>4999992</th>\n      <td>979564</td>\n      <td>6</td>\n    </tr>\n    <tr>\n      <th>4999993</th>\n      <td>979564</td>\n      <td>5</td>\n    </tr>\n    <tr>\n      <th>4999994</th>\n      <td>979564</td>\n      <td>4</td>\n    </tr>\n    <tr>\n      <th>4999995</th>\n      <td>979564</td>\n      <td>3</td>\n    </tr>\n    <tr>\n      <th>4999996</th>\n      <td>979564</td>\n      <td>1</td>\n    </tr>\n    <tr>\n      <th>4999997</th>\n      <td>979564</td>\n      <td>8</td>\n    </tr>\n    <tr>\n      <th>4999998</th>\n      <td>979564</td>\n      <td>7</td>\n    </tr>\n    <tr>\n      <th>4999999</th>\n      <td>979564</td>\n      <td>2</td>\n    </tr>\n  </tbody>\n</table>\n<p>5000000 rows × 2 columns</p>\n</div>"},"execution_count":23}],"source":"all_data_test_list_pred_need=[]\nfor epoch,part_data in enumerate(pd.read_csv(data_path+'test.csv',sep=',',names=['query_id','query','query_title_id','title'],usecols=['query_id','query_title_id'],chunksize=1000000,header=None)):\n    all_data_test_list_pred_need.append(part_data)\ntest_data_pred_need=pd.concat(all_data_test_list_pred_need,axis=0)\n#,usecols=[1,3]\ntest_data_pred_need#整出query_id和title_id","execution_count":23},{"metadata":{"id":"2936FAE3E1D9404B86EE4F651F86E475","mdEditEnable":false},"cell_type":"markdown","source":"# 提交sub"},{"metadata":{"id":"D7BB429968F047B9BBB71850E94294BA","collapsed":true,"scrolled":false},"cell_type":"code","outputs":[{"output_type":"execute_result","metadata":{},"data":{"text/plain":"         query_id  query_title_id         0\n0               1               3  0.124806\n1               1               1  0.134846\n2               1               4  0.193158\n3               1               2  0.122989\n4               2               1  0.343902\n5               2               2  0.311168\n6               2               3  0.279804\n7               3               6  0.069874\n8               3               5  0.070957\n9               3               8  0.062102\n10              3               2  0.089635\n11              3               1  0.075339\n12              3               4  0.070402\n13              3               3  0.259307\n14              3               7  0.064506\n15              4               1  0.299004\n16              4               2  0.178426\n17              4               3  0.119241\n18              4               5  0.116218\n19              4               4  0.118594\n20              4               6  0.179241\n21              5               3  0.096436\n22              5               5  0.177628\n23              5               4  0.256226\n24              5               2  0.064564\n25              5               1  0.145517\n26              6               3  0.164664\n27              6               1  0.264395\n28              6               2  0.206900\n29              7               1  0.163022\n...           ...             ...       ...\n4999970    979559               1  0.148689\n4999971    979559               3  0.251935\n4999972    979559               2  0.161969\n4999973    979559               4  0.234953\n4999974    979560               1  0.220775\n4999975    979560               2  0.043422\n4999976    979560               3  0.201651\n4999977    979561               3  0.210616\n4999978    979561               2  0.116159\n4999979    979561               1  0.148626\n4999980    979562               5  0.080823\n4999981    979562               1  0.086149\n4999982    979562               8  0.064921\n4999983    979562               2  0.233967\n4999984    979562               7  0.072611\n4999985    979562               3  0.081725\n4999986    979562               4  0.075732\n4999987    979562               6  0.081922\n4999988    979563               4  0.101279\n4999989    979563               3  0.155103\n4999990    979563               2  0.075159\n4999991    979563               1  0.089169\n4999992    979564               6  0.089025\n4999993    979564               5  0.087974\n4999994    979564               4  0.269730\n4999995    979564               3  0.158783\n4999996    979564               1  0.105844\n4999997    979564               8  0.194556\n4999998    979564               7  0.062046\n4999999    979564               2  0.103190\n\n[5000000 rows x 3 columns]","text/html":"<div>\n<style scoped>\n    .dataframe tbody tr th:only-of-type {\n        vertical-align: middle;\n    }\n\n    .dataframe tbody tr th {\n        vertical-align: top;\n    }\n\n    .dataframe thead th {\n        text-align: right;\n    }\n</style>\n<table border=\"1\" class=\"dataframe\">\n  <thead>\n    <tr style=\"text-align: right;\">\n      <th></th>\n      <th>query_id</th>\n      <th>query_title_id</th>\n      <th>0</th>\n    </tr>\n  </thead>\n  <tbody>\n    <tr>\n      <th>0</th>\n      <td>1</td>\n      <td>3</td>\n      <td>0.124806</td>\n    </tr>\n    <tr>\n      <th>1</th>\n      <td>1</td>\n      <td>1</td>\n      <td>0.134846</td>\n    </tr>\n    <tr>\n      <th>2</th>\n      <td>1</td>\n      <td>4</td>\n      <td>0.193158</td>\n    </tr>\n    <tr>\n      <th>3</th>\n      <td>1</td>\n      <td>2</td>\n      <td>0.122989</td>\n    </tr>\n    <tr>\n      <th>4</th>\n      <td>2</td>\n      <td>1</td>\n      <td>0.343902</td>\n    </tr>\n    <tr>\n      <th>5</th>\n      <td>2</td>\n      <td>2</td>\n      <td>0.311168</td>\n    </tr>\n    <tr>\n      <th>6</th>\n      <td>2</td>\n      <td>3</td>\n      <td>0.279804</td>\n    </tr>\n    <tr>\n      <th>7</th>\n      <td>3</td>\n      <td>6</td>\n      <td>0.069874</td>\n    </tr>\n    <tr>\n      <th>8</th>\n      <td>3</td>\n      <td>5</td>\n      <td>0.070957</td>\n    </tr>\n    <tr>\n      <th>9</th>\n      <td>3</td>\n      <td>8</td>\n      <td>0.062102</td>\n    </tr>\n    <tr>\n      <th>10</th>\n      <td>3</td>\n      <td>2</td>\n      <td>0.089635</td>\n    </tr>\n    <tr>\n      <th>11</th>\n      <td>3</td>\n      <td>1</td>\n      <td>0.075339</td>\n    </tr>\n    <tr>\n      <th>12</th>\n      <td>3</td>\n      <td>4</td>\n      <td>0.070402</td>\n    </tr>\n    <tr>\n      <th>13</th>\n      <td>3</td>\n      <td>3</td>\n      <td>0.259307</td>\n    </tr>\n    <tr>\n      <th>14</th>\n      <td>3</td>\n      <td>7</td>\n      <td>0.064506</td>\n    </tr>\n    <tr>\n      <th>15</th>\n      <td>4</td>\n      <td>1</td>\n      <td>0.299004</td>\n    </tr>\n    <tr>\n      <th>16</th>\n      <td>4</td>\n      <td>2</td>\n      <td>0.178426</td>\n    </tr>\n    <tr>\n      <th>17</th>\n      <td>4</td>\n      <td>3</td>\n      <td>0.119241</td>\n    </tr>\n    <tr>\n      <th>18</th>\n      <td>4</td>\n      <td>5</td>\n      <td>0.116218</td>\n    </tr>\n    <tr>\n      <th>19</th>\n      <td>4</td>\n      <td>4</td>\n      <td>0.118594</td>\n    </tr>\n    <tr>\n      <th>20</th>\n      <td>4</td>\n      <td>6</td>\n      <td>0.179241</td>\n    </tr>\n    <tr>\n      <th>21</th>\n      <td>5</td>\n      <td>3</td>\n      <td>0.096436</td>\n    </tr>\n    <tr>\n      <th>22</th>\n      <td>5</td>\n      <td>5</td>\n      <td>0.177628</td>\n    </tr>\n    <tr>\n      <th>23</th>\n      <td>5</td>\n      <td>4</td>\n      <td>0.256226</td>\n    </tr>\n    <tr>\n      <th>24</th>\n      <td>5</td>\n      <td>2</td>\n      <td>0.064564</td>\n    </tr>\n    <tr>\n      <th>25</th>\n      <td>5</td>\n      <td>1</td>\n      <td>0.145517</td>\n    </tr>\n    <tr>\n      <th>26</th>\n      <td>6</td>\n      <td>3</td>\n      <td>0.164664</td>\n    </tr>\n    <tr>\n      <th>27</th>\n      <td>6</td>\n      <td>1</td>\n      <td>0.264395</td>\n    </tr>\n    <tr>\n      <th>28</th>\n      <td>6</td>\n      <td>2</td>\n      <td>0.206900</td>\n    </tr>\n    <tr>\n      <th>29</th>\n      <td>7</td>\n      <td>1</td>\n      <td>0.163022</td>\n    </tr>\n    <tr>\n      <th>...</th>\n      <td>...</td>\n      <td>...</td>\n      <td>...</td>\n    </tr>\n    <tr>\n      <th>4999970</th>\n      <td>979559</td>\n      <td>1</td>\n      <td>0.148689</td>\n    </tr>\n    <tr>\n      <th>4999971</th>\n      <td>979559</td>\n      <td>3</td>\n      <td>0.251935</td>\n    </tr>\n    <tr>\n      <th>4999972</th>\n      <td>979559</td>\n      <td>2</td>\n      <td>0.161969</td>\n    </tr>\n    <tr>\n      <th>4999973</th>\n      <td>979559</td>\n      <td>4</td>\n      <td>0.234953</td>\n    </tr>\n    <tr>\n      <th>4999974</th>\n      <td>979560</td>\n      <td>1</td>\n      <td>0.220775</td>\n    </tr>\n    <tr>\n      <th>4999975</th>\n      <td>979560</td>\n      <td>2</td>\n      <td>0.043422</td>\n    </tr>\n    <tr>\n      <th>4999976</th>\n      <td>979560</td>\n      <td>3</td>\n      <td>0.201651</td>\n    </tr>\n    <tr>\n      <th>4999977</th>\n      <td>979561</td>\n      <td>3</td>\n      <td>0.210616</td>\n    </tr>\n    <tr>\n      <th>4999978</th>\n      <td>979561</td>\n      <td>2</td>\n      <td>0.116159</td>\n    </tr>\n    <tr>\n      <th>4999979</th>\n      <td>979561</td>\n      <td>1</td>\n      <td>0.148626</td>\n    </tr>\n    <tr>\n      <th>4999980</th>\n      <td>979562</td>\n      <td>5</td>\n      <td>0.080823</td>\n    </tr>\n    <tr>\n      <th>4999981</th>\n      <td>979562</td>\n      <td>1</td>\n      <td>0.086149</td>\n    </tr>\n    <tr>\n      <th>4999982</th>\n      <td>979562</td>\n      <td>8</td>\n      <td>0.064921</td>\n    </tr>\n    <tr>\n      <th>4999983</th>\n      <td>979562</td>\n      <td>2</td>\n      <td>0.233967</td>\n    </tr>\n    <tr>\n      <th>4999984</th>\n      <td>979562</td>\n      <td>7</td>\n      <td>0.072611</td>\n    </tr>\n    <tr>\n      <th>4999985</th>\n      <td>979562</td>\n      <td>3</td>\n      <td>0.081725</td>\n    </tr>\n    <tr>\n      <th>4999986</th>\n      <td>979562</td>\n      <td>4</td>\n      <td>0.075732</td>\n    </tr>\n    <tr>\n      <th>4999987</th>\n      <td>979562</td>\n      <td>6</td>\n      <td>0.081922</td>\n    </tr>\n    <tr>\n      <th>4999988</th>\n      <td>979563</td>\n      <td>4</td>\n      <td>0.101279</td>\n    </tr>\n    <tr>\n      <th>4999989</th>\n      <td>979563</td>\n      <td>3</td>\n      <td>0.155103</td>\n    </tr>\n    <tr>\n      <th>4999990</th>\n      <td>979563</td>\n      <td>2</td>\n      <td>0.075159</td>\n    </tr>\n    <tr>\n      <th>4999991</th>\n      <td>979563</td>\n      <td>1</td>\n      <td>0.089169</td>\n    </tr>\n    <tr>\n      <th>4999992</th>\n      <td>979564</td>\n      <td>6</td>\n      <td>0.089025</td>\n    </tr>\n    <tr>\n      <th>4999993</th>\n      <td>979564</td>\n      <td>5</td>\n      <td>0.087974</td>\n    </tr>\n    <tr>\n      <th>4999994</th>\n      <td>979564</td>\n      <td>4</td>\n      <td>0.269730</td>\n    </tr>\n    <tr>\n      <th>4999995</th>\n      <td>979564</td>\n      <td>3</td>\n      <td>0.158783</td>\n    </tr>\n    <tr>\n      <th>4999996</th>\n      <td>979564</td>\n      <td>1</td>\n      <td>0.105844</td>\n    </tr>\n    <tr>\n      <th>4999997</th>\n      <td>979564</td>\n      <td>8</td>\n      <td>0.194556</td>\n    </tr>\n    <tr>\n      <th>4999998</th>\n      <td>979564</td>\n      <td>7</td>\n      <td>0.062046</td>\n    </tr>\n    <tr>\n      <th>4999999</th>\n      <td>979564</td>\n      <td>2</td>\n      <td>0.103190</td>\n    </tr>\n  </tbody>\n</table>\n<p>5000000 rows × 3 columns</p>\n</div>"},"execution_count":36}],"source":"\nresult=pd.concat([test_data_pred_need,y_pred_df],axis=1)\nresult.to_csv(data_store_path+\"sub/sub_1130_rong_700.csv\",header=None,index=None)#####合成三列做 最终的提交结果\n# y_pred_df\n# result[0]\nresult","execution_count":36},{"metadata":{"id":"8BBC821E8B45475888D91FE8A34AF65C","collapsed":false,"scrolled":false},"cell_type":"code","outputs":[{"output_type":"stream","text":"wget: /opt/conda/lib/libcrypto.so.1.0.0: no version information available (required by wget)\nwget: /opt/conda/lib/libssl.so.1.0.0: no version information available (required by wget)\nwget: /opt/conda/lib/libssl.so.1.0.0: no version information available (required by wget)\n2019-06-14 03:22:49 URL:https://www.heywhale.com/kesci_submit [7842088/7842088] -> \"kesci_submit\" [1]\nKesci Submit Tool\nResult File: /home/kesci/work/1000Wlgb/sub/sub_1130_rong_700.csv (136.77 MiB)\nUploaded.       \n====================\nSubmit Success.\n{\"Stage\":0,\"Status\":0,\"ShownInHistory\":true,\"IsAucResult\":true,\"Selected\":false,\"_id\":\"5d031318902bad002c294e02\",\"Competition\":\"5cc51043f71088002c5b8840\",\"Team\":\"5cea3ae3cde7ed002b540c8d\",\"UploadDate\":\"2019-06-14T03:23:04.381Z\",\"Final\":true,\"Response\":\"\",\"SubmissionResults\":[],\"IP\":\"52.82.19.99\",\"FingerPrint\":\"\",\"UserAgent\":\"Go-http-client/1.1\",\"ResultFileName\":\"1560482571202c6f049.csv\",\"ResultFileRealName\":\"sub_1130_rong_700.csv\",\"ResultFileSize\":0,\"ReviewInfos\":[],\"__v\":0}\n\n","name":"stdout"}],"source":"!wget -nv -O kesci_submit https://www.heywhale.com/kesci_submit&&chmod +x kesci_submit\n!./kesci_submit -token 03111ac783b57b48 -file /home/kesci/work/1000Wlgb/sub/sub_1130_rong_700.csv","execution_count":37},{"metadata":{"id":"F10E6AADF1BE40BFA84102DD87E84509"},"cell_type":"code","outputs":[],"source":"","execution_count":null}],"metadata":{"kernelspec":{"name":"python3","display_name":"Python 3","language":"python"},"language_info":{"name":"python","version":"3.6.4","mimetype":"text/x-python","codemirror_mode":{"name":"ipython","version":3},"pygments_lexer":"ipython3","nbconvert_exporter":"python","file_extension":".py"}},"nbformat":4,"nbformat_minor":2}