{"cells":[{"cell_type":"markdown","source":"# 饭店流量预测","metadata":{}},{"cell_type":"markdown","source":"饭店来客数据","metadata":{}},{"cell_type":"code","source":"import pandas as pd\n\nair_visit = pd.read_csv('./datalab/62822/air_visit_data.csv')\nair_visit.head()","metadata":{"trusted":true},"execution_count":1,"outputs":[{"execution_count":1,"output_type":"execute_result","data":{"text/plain":"           air_store_id  visit_date  visitors\n0  air_ba937bf13d40fb24  2016-01-13        25\n1  air_ba937bf13d40fb24  2016-01-14        32\n2  air_ba937bf13d40fb24  2016-01-15        29\n3  air_ba937bf13d40fb24  2016-01-16        22\n4  air_ba937bf13d40fb24  2016-01-18         6","text/html":"<div>\n<style scoped>\n    .dataframe tbody tr th:only-of-type {\n        vertical-align: middle;\n    }\n\n    .dataframe tbody tr th {\n        vertical-align: top;\n    }\n\n    .dataframe thead th {\n        text-align: right;\n    }\n</style>\n<table border=\"1\" class=\"dataframe\">\n  <thead>\n    <tr style=\"text-align: right;\">\n      <th></th>\n      <th>air_store_id</th>\n      <th>visit_date</th>\n      <th>visitors</th>\n    </tr>\n  </thead>\n  <tbody>\n    <tr>\n      <th>0</th>\n      <td>air_ba937bf13d40fb24</td>\n      <td>2016-01-13</td>\n      <td>25</td>\n    </tr>\n    <tr>\n      <th>1</th>\n      <td>air_ba937bf13d40fb24</td>\n      <td>2016-01-14</td>\n      <td>32</td>\n    </tr>\n    <tr>\n      <th>2</th>\n      <td>air_ba937bf13d40fb24</td>\n      <td>2016-01-15</td>\n      <td>29</td>\n    </tr>\n    <tr>\n      <th>3</th>\n      <td>air_ba937bf13d40fb24</td>\n      <td>2016-01-16</td>\n      <td>22</td>\n    </tr>\n    <tr>\n      <th>4</th>\n      <td>air_ba937bf13d40fb24</td>\n      <td>2016-01-18</td>\n      <td>6</td>\n    </tr>\n  </tbody>\n</table>\n</div>"},"metadata":{}}]},{"cell_type":"code","source":"air_visit.index = pd.to_datetime(air_visit['visit_date'])\nair_visit.head()","metadata":{"trusted":true},"execution_count":2,"outputs":[{"execution_count":2,"output_type":"execute_result","data":{"text/plain":"                    air_store_id  visit_date  visitors\nvisit_date                                            \n2016-01-13  air_ba937bf13d40fb24  2016-01-13        25\n2016-01-14  air_ba937bf13d40fb24  2016-01-14        32\n2016-01-15  air_ba937bf13d40fb24  2016-01-15        29\n2016-01-16  air_ba937bf13d40fb24  2016-01-16        22\n2016-01-18  air_ba937bf13d40fb24  2016-01-18         6","text/html":"<div>\n<style scoped>\n    .dataframe tbody tr th:only-of-type {\n        vertical-align: middle;\n    }\n\n    .dataframe tbody tr th {\n        vertical-align: top;\n    }\n\n    .dataframe thead th {\n        text-align: right;\n    }\n</style>\n<table border=\"1\" class=\"dataframe\">\n  <thead>\n    <tr style=\"text-align: right;\">\n      <th></th>\n      <th>air_store_id</th>\n      <th>visit_date</th>\n      <th>visitors</th>\n    </tr>\n    <tr>\n      <th>visit_date</th>\n      <th></th>\n      <th></th>\n      <th></th>\n    </tr>\n  </thead>\n  <tbody>\n    <tr>\n      <th>2016-01-13</th>\n      <td>air_ba937bf13d40fb24</td>\n      <td>2016-01-13</td>\n      <td>25</td>\n    </tr>\n    <tr>\n      <th>2016-01-14</th>\n      <td>air_ba937bf13d40fb24</td>\n      <td>2016-01-14</td>\n      <td>32</td>\n    </tr>\n    <tr>\n      <th>2016-01-15</th>\n      <td>air_ba937bf13d40fb24</td>\n      <td>2016-01-15</td>\n      <td>29</td>\n    </tr>\n    <tr>\n      <th>2016-01-16</th>\n      <td>air_ba937bf13d40fb24</td>\n      <td>2016-01-16</td>\n      <td>22</td>\n    </tr>\n    <tr>\n      <th>2016-01-18</th>\n      <td>air_ba937bf13d40fb24</td>\n      <td>2016-01-18</td>\n      <td>6</td>\n    </tr>\n  </tbody>\n</table>\n</div>"},"metadata":{}}]},{"cell_type":"markdown","source":"按天来算","metadata":{}},{"cell_type":"code","source":"air_visit = air_visit.groupby('air_store_id').apply(lambda g: g['visitors'].resample('1d').sum()).reset_index()\nair_visit.head()","metadata":{"trusted":true},"execution_count":3,"outputs":[{"execution_count":3,"output_type":"execute_result","data":{"text/plain":"           air_store_id visit_date  visitors\n0  air_00a91d42b08b08d9 2016-07-01        35\n1  air_00a91d42b08b08d9 2016-07-02         9\n2  air_00a91d42b08b08d9 2016-07-03         0\n3  air_00a91d42b08b08d9 2016-07-04        20\n4  air_00a91d42b08b08d9 2016-07-05        25","text/html":"<div>\n<style scoped>\n    .dataframe tbody tr th:only-of-type {\n        vertical-align: middle;\n    }\n\n    .dataframe tbody tr th {\n        vertical-align: top;\n    }\n\n    .dataframe thead th {\n        text-align: right;\n    }\n</style>\n<table border=\"1\" class=\"dataframe\">\n  <thead>\n    <tr style=\"text-align: right;\">\n      <th></th>\n      <th>air_store_id</th>\n      <th>visit_date</th>\n      <th>visitors</th>\n    </tr>\n  </thead>\n  <tbody>\n    <tr>\n      <th>0</th>\n      <td>air_00a91d42b08b08d9</td>\n      <td>2016-07-01</td>\n      <td>35</td>\n    </tr>\n    <tr>\n      <th>1</th>\n      <td>air_00a91d42b08b08d9</td>\n      <td>2016-07-02</td>\n      <td>9</td>\n    </tr>\n    <tr>\n      <th>2</th>\n      <td>air_00a91d42b08b08d9</td>\n      <td>2016-07-03</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>3</th>\n      <td>air_00a91d42b08b08d9</td>\n      <td>2016-07-04</td>\n      <td>20</td>\n    </tr>\n    <tr>\n      <th>4</th>\n      <td>air_00a91d42b08b08d9</td>\n      <td>2016-07-05</td>\n      <td>25</td>\n    </tr>\n  </tbody>\n</table>\n</div>"},"metadata":{}}]},{"cell_type":"markdown","source":"缺失值填0","metadata":{}},{"cell_type":"code","source":"air_visit['visit_date'] = air_visit['visit_date'].dt.strftime('%Y-%m-%d')\nair_visit['was_nil'] = air_visit['visitors'].isnull()\nair_visit['visitors'].fillna(0, inplace=True)\n\nair_visit.head()","metadata":{"trusted":true},"execution_count":4,"outputs":[{"execution_count":4,"output_type":"execute_result","data":{"text/plain":"           air_store_id  visit_date  visitors  was_nil\n0  air_00a91d42b08b08d9  2016-07-01        35    False\n1  air_00a91d42b08b08d9  2016-07-02         9    False\n2  air_00a91d42b08b08d9  2016-07-03         0    False\n3  air_00a91d42b08b08d9  2016-07-04        20    False\n4  air_00a91d42b08b08d9  2016-07-05        25    False","text/html":"<div>\n<style scoped>\n    .dataframe tbody tr th:only-of-type {\n        vertical-align: middle;\n    }\n\n    .dataframe tbody tr th {\n        vertical-align: top;\n    }\n\n    .dataframe thead th {\n        text-align: right;\n    }\n</style>\n<table border=\"1\" class=\"dataframe\">\n  <thead>\n    <tr style=\"text-align: right;\">\n      <th></th>\n      <th>air_store_id</th>\n      <th>visit_date</th>\n      <th>visitors</th>\n      <th>was_nil</th>\n    </tr>\n  </thead>\n  <tbody>\n    <tr>\n      <th>0</th>\n      <td>air_00a91d42b08b08d9</td>\n      <td>2016-07-01</td>\n      <td>35</td>\n      <td>False</td>\n    </tr>\n    <tr>\n      <th>1</th>\n      <td>air_00a91d42b08b08d9</td>\n      <td>2016-07-02</td>\n      <td>9</td>\n      <td>False</td>\n    </tr>\n    <tr>\n      <th>2</th>\n      <td>air_00a91d42b08b08d9</td>\n      <td>2016-07-03</td>\n      <td>0</td>\n      <td>False</td>\n    </tr>\n    <tr>\n      <th>3</th>\n      <td>air_00a91d42b08b08d9</td>\n      <td>2016-07-04</td>\n      <td>20</td>\n      <td>False</td>\n    </tr>\n    <tr>\n      <th>4</th>\n      <td>air_00a91d42b08b08d9</td>\n      <td>2016-07-05</td>\n      <td>25</td>\n      <td>False</td>\n    </tr>\n  </tbody>\n</table>\n</div>"},"metadata":{}}]},{"cell_type":"markdown","source":"日历数据","metadata":{}},{"cell_type":"code","source":"date_info = pd.read_csv('./datalab/62822/date_info.csv')\ndate_info.head()","metadata":{"trusted":true},"execution_count":5,"outputs":[{"execution_count":5,"output_type":"execute_result","data":{"text/plain":"  calendar_date day_of_week  holiday_flg\n0    2016-01-01      Friday            1\n1    2016-01-02    Saturday            1\n2    2016-01-03      Sunday            1\n3    2016-01-04      Monday            0\n4    2016-01-05     Tuesday            0","text/html":"<div>\n<style scoped>\n    .dataframe tbody tr th:only-of-type {\n        vertical-align: middle;\n    }\n\n    .dataframe tbody tr th {\n        vertical-align: top;\n    }\n\n    .dataframe thead th {\n        text-align: right;\n    }\n</style>\n<table border=\"1\" class=\"dataframe\">\n  <thead>\n    <tr style=\"text-align: right;\">\n      <th></th>\n      <th>calendar_date</th>\n      <th>day_of_week</th>\n      <th>holiday_flg</th>\n    </tr>\n  </thead>\n  <tbody>\n    <tr>\n      <th>0</th>\n      <td>2016-01-01</td>\n      <td>Friday</td>\n      <td>1</td>\n    </tr>\n    <tr>\n      <th>1</th>\n      <td>2016-01-02</td>\n      <td>Saturday</td>\n      <td>1</td>\n    </tr>\n    <tr>\n      <th>2</th>\n      <td>2016-01-03</td>\n      <td>Sunday</td>\n      <td>1</td>\n    </tr>\n    <tr>\n      <th>3</th>\n      <td>2016-01-04</td>\n      <td>Monday</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>4</th>\n      <td>2016-01-05</td>\n      <td>Tuesday</td>\n      <td>0</td>\n    </tr>\n  </tbody>\n</table>\n</div>"},"metadata":{}}]},{"cell_type":"markdown","source":"shift()操作对数据进行移动，可以观察前一天和后天是不是节假日。","metadata":{}},{"cell_type":"code","source":"date_info.rename(columns={'holiday_flg': 'is_holiday', 'calendar_date': 'visit_date'}, inplace=True)\ndate_info['prev_day_is_holiday'] = date_info['is_holiday'].shift().fillna(0)\ndate_info['next_day_is_holiday'] = date_info['is_holiday'].shift(-1).fillna(0)\ndate_info.head()","metadata":{"trusted":true},"execution_count":6,"outputs":[{"execution_count":6,"output_type":"execute_result","data":{"text/plain":"   visit_date day_of_week  is_holiday  prev_day_is_holiday  \\\n0  2016-01-01      Friday           1                  0.0   \n1  2016-01-02    Saturday           1                  1.0   \n2  2016-01-03      Sunday           1                  1.0   \n3  2016-01-04      Monday           0                  1.0   \n4  2016-01-05     Tuesday           0                  0.0   \n\n   next_day_is_holiday  \n0                  1.0  \n1                  1.0  \n2                  0.0  \n3                  0.0  \n4                  0.0  ","text/html":"<div>\n<style scoped>\n    .dataframe tbody tr th:only-of-type {\n        vertical-align: middle;\n    }\n\n    .dataframe tbody tr th {\n        vertical-align: top;\n    }\n\n    .dataframe thead th {\n        text-align: right;\n    }\n</style>\n<table border=\"1\" class=\"dataframe\">\n  <thead>\n    <tr style=\"text-align: right;\">\n      <th></th>\n      <th>visit_date</th>\n      <th>day_of_week</th>\n      <th>is_holiday</th>\n      <th>prev_day_is_holiday</th>\n      <th>next_day_is_holiday</th>\n    </tr>\n  </thead>\n  <tbody>\n    <tr>\n      <th>0</th>\n      <td>2016-01-01</td>\n      <td>Friday</td>\n      <td>1</td>\n      <td>0.0</td>\n      <td>1.0</td>\n    </tr>\n    <tr>\n      <th>1</th>\n      <td>2016-01-02</td>\n      <td>Saturday</td>\n      <td>1</td>\n      <td>1.0</td>\n      <td>1.0</td>\n    </tr>\n    <tr>\n      <th>2</th>\n      <td>2016-01-03</td>\n      <td>Sunday</td>\n      <td>1</td>\n      <td>1.0</td>\n      <td>0.0</td>\n    </tr>\n    <tr>\n      <th>3</th>\n      <td>2016-01-04</td>\n      <td>Monday</td>\n      <td>0</td>\n      <td>1.0</td>\n      <td>0.0</td>\n    </tr>\n    <tr>\n      <th>4</th>\n      <td>2016-01-05</td>\n      <td>Tuesday</td>\n      <td>0</td>\n      <td>0.0</td>\n      <td>0.0</td>\n    </tr>\n  </tbody>\n</table>\n</div>"},"metadata":{}}]},{"cell_type":"markdown","source":"地区数据","metadata":{}},{"cell_type":"code","source":"air_store_info = pd.read_csv('./datalab/62822/air_store_info.csv')\n\nair_store_info.head()","metadata":{"trusted":true},"execution_count":7,"outputs":[{"execution_count":7,"output_type":"execute_result","data":{"text/plain":"           air_store_id  air_genre_name                 air_area_name  \\\n0  air_0f0cdeee6c9bf3d7  Italian/French  Hyōgo-ken Kōbe-shi Kumoidōri   \n1  air_7cc17a324ae5c7dc  Italian/French  Hyōgo-ken Kōbe-shi Kumoidōri   \n2  air_fee8dcf4d619598e  Italian/French  Hyōgo-ken Kōbe-shi Kumoidōri   \n3  air_a17f0778617c76e2  Italian/French  Hyōgo-ken Kōbe-shi Kumoidōri   \n4  air_83db5aff8f50478e  Italian/French  Tōkyō-to Minato-ku Shibakōen   \n\n    latitude   longitude  \n0  34.695124  135.197852  \n1  34.695124  135.197852  \n2  34.695124  135.197852  \n3  34.695124  135.197852  \n4  35.658068  139.751599  ","text/html":"<div>\n<style scoped>\n    .dataframe tbody tr th:only-of-type {\n        vertical-align: middle;\n    }\n\n    .dataframe tbody tr th {\n        vertical-align: top;\n    }\n\n    .dataframe thead th {\n        text-align: right;\n    }\n</style>\n<table border=\"1\" class=\"dataframe\">\n  <thead>\n    <tr style=\"text-align: right;\">\n      <th></th>\n      <th>air_store_id</th>\n      <th>air_genre_name</th>\n      <th>air_area_name</th>\n      <th>latitude</th>\n      <th>longitude</th>\n    </tr>\n  </thead>\n  <tbody>\n    <tr>\n      <th>0</th>\n      <td>air_0f0cdeee6c9bf3d7</td>\n      <td>Italian/French</td>\n      <td>Hyōgo-ken Kōbe-shi Kumoidōri</td>\n      <td>34.695124</td>\n      <td>135.197852</td>\n    </tr>\n    <tr>\n      <th>1</th>\n      <td>air_7cc17a324ae5c7dc</td>\n      <td>Italian/French</td>\n      <td>Hyōgo-ken Kōbe-shi Kumoidōri</td>\n      <td>34.695124</td>\n      <td>135.197852</td>\n    </tr>\n    <tr>\n      <th>2</th>\n      <td>air_fee8dcf4d619598e</td>\n      <td>Italian/French</td>\n      <td>Hyōgo-ken Kōbe-shi Kumoidōri</td>\n      <td>34.695124</td>\n      <td>135.197852</td>\n    </tr>\n    <tr>\n      <th>3</th>\n      <td>air_a17f0778617c76e2</td>\n      <td>Italian/French</td>\n      <td>Hyōgo-ken Kōbe-shi Kumoidōri</td>\n      <td>34.695124</td>\n      <td>135.197852</td>\n    </tr>\n    <tr>\n      <th>4</th>\n      <td>air_83db5aff8f50478e</td>\n      <td>Italian/French</td>\n      <td>Tōkyō-to Minato-ku Shibakōen</td>\n      <td>35.658068</td>\n      <td>139.751599</td>\n    </tr>\n  </tbody>\n</table>\n</div>"},"metadata":{}}]},{"cell_type":"markdown","source":"测试集","metadata":{}},{"cell_type":"code","source":"import numpy as np\n\nsubmission = pd.read_csv('./datalab/62822/sample_submission.csv')\nsubmission['air_store_id'] = submission['id'].str.slice(0, 20)\nsubmission['visit_date'] = submission['id'].str.slice(21)\nsubmission['is_test'] = True\nsubmission['visitors'] = np.nan\nsubmission['test_number'] = range(len(submission))\n\nsubmission.head()","metadata":{"trusted":true},"execution_count":8,"outputs":[{"execution_count":8,"output_type":"execute_result","data":{"text/plain":"                                id  visitors          air_store_id  \\\n0  air_00a91d42b08b08d9_2017-04-23       NaN  air_00a91d42b08b08d9   \n1  air_00a91d42b08b08d9_2017-04-24       NaN  air_00a91d42b08b08d9   \n2  air_00a91d42b08b08d9_2017-04-25       NaN  air_00a91d42b08b08d9   \n3  air_00a91d42b08b08d9_2017-04-26       NaN  air_00a91d42b08b08d9   \n4  air_00a91d42b08b08d9_2017-04-27       NaN  air_00a91d42b08b08d9   \n\n   visit_date  is_test  test_number  \n0  2017-04-23     True            0  \n1  2017-04-24     True            1  \n2  2017-04-25     True            2  \n3  2017-04-26     True            3  \n4  2017-04-27     True            4  ","text/html":"<div>\n<style scoped>\n    .dataframe tbody tr th:only-of-type {\n        vertical-align: middle;\n    }\n\n    .dataframe tbody tr th {\n        vertical-align: top;\n    }\n\n    .dataframe thead th {\n        text-align: right;\n    }\n</style>\n<table border=\"1\" class=\"dataframe\">\n  <thead>\n    <tr style=\"text-align: right;\">\n      <th></th>\n      <th>id</th>\n      <th>visitors</th>\n      <th>air_store_id</th>\n      <th>visit_date</th>\n      <th>is_test</th>\n      <th>test_number</th>\n    </tr>\n  </thead>\n  <tbody>\n    <tr>\n      <th>0</th>\n      <td>air_00a91d42b08b08d9_2017-04-23</td>\n      <td>NaN</td>\n      <td>air_00a91d42b08b08d9</td>\n      <td>2017-04-23</td>\n      <td>True</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>1</th>\n      <td>air_00a91d42b08b08d9_2017-04-24</td>\n      <td>NaN</td>\n      <td>air_00a91d42b08b08d9</td>\n      <td>2017-04-24</td>\n      <td>True</td>\n      <td>1</td>\n    </tr>\n    <tr>\n      <th>2</th>\n      <td>air_00a91d42b08b08d9_2017-04-25</td>\n      <td>NaN</td>\n      <td>air_00a91d42b08b08d9</td>\n      <td>2017-04-25</td>\n      <td>True</td>\n      <td>2</td>\n    </tr>\n    <tr>\n      <th>3</th>\n      <td>air_00a91d42b08b08d9_2017-04-26</td>\n      <td>NaN</td>\n      <td>air_00a91d42b08b08d9</td>\n      <td>2017-04-26</td>\n      <td>True</td>\n      <td>3</td>\n    </tr>\n    <tr>\n      <th>4</th>\n      <td>air_00a91d42b08b08d9_2017-04-27</td>\n      <td>NaN</td>\n      <td>air_00a91d42b08b08d9</td>\n      <td>2017-04-27</td>\n      <td>True</td>\n      <td>4</td>\n    </tr>\n  </tbody>\n</table>\n</div>"},"metadata":{}}]},{"cell_type":"markdown","source":"所有数据信息汇总","metadata":{}},{"cell_type":"code","source":"data = pd.concat((air_visit, submission.drop('id', axis='columns')))\ndata.head()","metadata":{"trusted":true},"execution_count":9,"outputs":[{"execution_count":9,"output_type":"execute_result","data":{"text/plain":"           air_store_id is_test  test_number  visit_date  visitors was_nil\n0  air_00a91d42b08b08d9     NaN          NaN  2016-07-01      35.0   False\n1  air_00a91d42b08b08d9     NaN          NaN  2016-07-02       9.0   False\n2  air_00a91d42b08b08d9     NaN          NaN  2016-07-03       0.0   False\n3  air_00a91d42b08b08d9     NaN          NaN  2016-07-04      20.0   False\n4  air_00a91d42b08b08d9     NaN          NaN  2016-07-05      25.0   False","text/html":"<div>\n<style scoped>\n    .dataframe tbody tr th:only-of-type {\n        vertical-align: middle;\n    }\n\n    .dataframe tbody tr th {\n        vertical-align: top;\n    }\n\n    .dataframe thead th {\n        text-align: right;\n    }\n</style>\n<table border=\"1\" class=\"dataframe\">\n  <thead>\n    <tr style=\"text-align: right;\">\n      <th></th>\n      <th>air_store_id</th>\n      <th>is_test</th>\n      <th>test_number</th>\n      <th>visit_date</th>\n      <th>visitors</th>\n      <th>was_nil</th>\n    </tr>\n  </thead>\n  <tbody>\n    <tr>\n      <th>0</th>\n      <td>air_00a91d42b08b08d9</td>\n      <td>NaN</td>\n      <td>NaN</td>\n      <td>2016-07-01</td>\n      <td>35.0</td>\n      <td>False</td>\n    </tr>\n    <tr>\n      <th>1</th>\n      <td>air_00a91d42b08b08d9</td>\n      <td>NaN</td>\n      <td>NaN</td>\n      <td>2016-07-02</td>\n      <td>9.0</td>\n      <td>False</td>\n    </tr>\n    <tr>\n      <th>2</th>\n      <td>air_00a91d42b08b08d9</td>\n      <td>NaN</td>\n      <td>NaN</td>\n      <td>2016-07-03</td>\n      <td>0.0</td>\n      <td>False</td>\n    </tr>\n    <tr>\n      <th>3</th>\n      <td>air_00a91d42b08b08d9</td>\n      <td>NaN</td>\n      <td>NaN</td>\n      <td>2016-07-04</td>\n      <td>20.0</td>\n      <td>False</td>\n    </tr>\n    <tr>\n      <th>4</th>\n      <td>air_00a91d42b08b08d9</td>\n      <td>NaN</td>\n      <td>NaN</td>\n      <td>2016-07-05</td>\n      <td>25.0</td>\n      <td>False</td>\n    </tr>\n  </tbody>\n</table>\n</div>"},"metadata":{}}]},{"cell_type":"code","source":"data['is_test'].fillna(False, inplace=True)\ndata = pd.merge(left=data, right=date_info, on='visit_date', how='left')\ndata = pd.merge(left=data, right=air_store_info, on='air_store_id', how='left')\ndata['visitors'] = data['visitors'].astype(float)\n\ndata.head()","metadata":{"trusted":true},"execution_count":10,"outputs":[{"execution_count":10,"output_type":"execute_result","data":{"text/plain":"           air_store_id  is_test  test_number  visit_date  visitors was_nil  \\\n0  air_00a91d42b08b08d9    False          NaN  2016-07-01      35.0   False   \n1  air_00a91d42b08b08d9    False          NaN  2016-07-02       9.0   False   \n2  air_00a91d42b08b08d9    False          NaN  2016-07-03       0.0   False   \n3  air_00a91d42b08b08d9    False          NaN  2016-07-04      20.0   False   \n4  air_00a91d42b08b08d9    False          NaN  2016-07-05      25.0   False   \n\n  day_of_week  is_holiday  prev_day_is_holiday  next_day_is_holiday  \\\n0      Friday           0                  0.0                  0.0   \n1    Saturday           0                  0.0                  0.0   \n2      Sunday           0                  0.0                  0.0   \n3      Monday           0                  0.0                  0.0   \n4     Tuesday           0                  0.0                  0.0   \n\n   air_genre_name                    air_area_name   latitude   longitude  \n0  Italian/French  Tōkyō-to Chiyoda-ku Kudanminami  35.694003  139.753595  \n1  Italian/French  Tōkyō-to Chiyoda-ku Kudanminami  35.694003  139.753595  \n2  Italian/French  Tōkyō-to Chiyoda-ku Kudanminami  35.694003  139.753595  \n3  Italian/French  Tōkyō-to Chiyoda-ku Kudanminami  35.694003  139.753595  \n4  Italian/French  Tōkyō-to Chiyoda-ku Kudanminami  35.694003  139.753595  ","text/html":"<div>\n<style scoped>\n    .dataframe tbody tr th:only-of-type {\n        vertical-align: middle;\n    }\n\n    .dataframe tbody tr th {\n        vertical-align: top;\n    }\n\n    .dataframe thead th {\n        text-align: right;\n    }\n</style>\n<table border=\"1\" class=\"dataframe\">\n  <thead>\n    <tr style=\"text-align: right;\">\n      <th></th>\n      <th>air_store_id</th>\n      <th>is_test</th>\n      <th>test_number</th>\n      <th>visit_date</th>\n      <th>visitors</th>\n      <th>was_nil</th>\n      <th>day_of_week</th>\n      <th>is_holiday</th>\n      <th>prev_day_is_holiday</th>\n      <th>next_day_is_holiday</th>\n      <th>air_genre_name</th>\n      <th>air_area_name</th>\n      <th>latitude</th>\n      <th>longitude</th>\n    </tr>\n  </thead>\n  <tbody>\n    <tr>\n      <th>0</th>\n      <td>air_00a91d42b08b08d9</td>\n      <td>False</td>\n      <td>NaN</td>\n      <td>2016-07-01</td>\n      <td>35.0</td>\n      <td>False</td>\n      <td>Friday</td>\n      <td>0</td>\n      <td>0.0</td>\n      <td>0.0</td>\n      <td>Italian/French</td>\n      <td>Tōkyō-to Chiyoda-ku Kudanminami</td>\n      <td>35.694003</td>\n      <td>139.753595</td>\n    </tr>\n    <tr>\n      <th>1</th>\n      <td>air_00a91d42b08b08d9</td>\n      <td>False</td>\n      <td>NaN</td>\n      <td>2016-07-02</td>\n      <td>9.0</td>\n      <td>False</td>\n      <td>Saturday</td>\n      <td>0</td>\n      <td>0.0</td>\n      <td>0.0</td>\n      <td>Italian/French</td>\n      <td>Tōkyō-to Chiyoda-ku Kudanminami</td>\n      <td>35.694003</td>\n      <td>139.753595</td>\n    </tr>\n    <tr>\n      <th>2</th>\n      <td>air_00a91d42b08b08d9</td>\n      <td>False</td>\n      <td>NaN</td>\n      <td>2016-07-03</td>\n      <td>0.0</td>\n      <td>False</td>\n      <td>Sunday</td>\n      <td>0</td>\n      <td>0.0</td>\n      <td>0.0</td>\n      <td>Italian/French</td>\n      <td>Tōkyō-to Chiyoda-ku Kudanminami</td>\n      <td>35.694003</td>\n      <td>139.753595</td>\n    </tr>\n    <tr>\n      <th>3</th>\n      <td>air_00a91d42b08b08d9</td>\n      <td>False</td>\n      <td>NaN</td>\n      <td>2016-07-04</td>\n      <td>20.0</td>\n      <td>False</td>\n      <td>Monday</td>\n      <td>0</td>\n      <td>0.0</td>\n      <td>0.0</td>\n      <td>Italian/French</td>\n      <td>Tōkyō-to Chiyoda-ku Kudanminami</td>\n      <td>35.694003</td>\n      <td>139.753595</td>\n    </tr>\n    <tr>\n      <th>4</th>\n      <td>air_00a91d42b08b08d9</td>\n      <td>False</td>\n      <td>NaN</td>\n      <td>2016-07-05</td>\n      <td>25.0</td>\n      <td>False</td>\n      <td>Tuesday</td>\n      <td>0</td>\n      <td>0.0</td>\n      <td>0.0</td>\n      <td>Italian/French</td>\n      <td>Tōkyō-to Chiyoda-ku Kudanminami</td>\n      <td>35.694003</td>\n      <td>139.753595</td>\n    </tr>\n  </tbody>\n</table>\n</div>"},"metadata":{}}]},{"cell_type":"markdown","source":"拿到天气数据","metadata":{}},{"cell_type":"code","source":"import glob\n\nweather_dfs = []\n\nfor path in glob.glob('./datalab/62857/*.csv'):\n    weather_df = pd.read_csv(path)\n    weather_df['station_id'] = path.split('\\\\')[-1].rstrip('.csv')\n    weather_dfs.append(weather_df)\n\nweather = pd.concat(weather_dfs, axis='rows')\nweather.rename(columns={'calendar_date': 'visit_date'}, inplace=True)\n\nweather.head()","metadata":{"trusted":true},"execution_count":12,"outputs":[{"traceback":["\u001b[0;31m---------------------------------------------------------------------------\u001b[0m","\u001b[0;31mFileNotFoundError\u001b[0m                         Traceback (most recent call last)","\u001b[0;32m<ipython-input-12-20e1fa01ac4d>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m()\u001b[0m\n\u001b[1;32m      4\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m      5\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mpath\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mglob\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mglob\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'./datalab/62857/*.csv'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 6\u001b[0;31m     \u001b[0mweather_df\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mpd\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mread_csv\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mpath\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m      7\u001b[0m     \u001b[0mweather_df\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m'station_id'\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mpath\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msplit\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'\\\\'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m-\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrstrip\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'.csv'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m      8\u001b[0m     \u001b[0mweather_dfs\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mappend\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mweather_df\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;32m/opt/conda/lib/python3.6/site-packages/pandas/io/parsers.py\u001b[0m in \u001b[0;36mparser_f\u001b[0;34m(filepath_or_buffer, sep, delimiter, header, names, index_col, usecols, squeeze, prefix, mangle_dupe_cols, dtype, engine, converters, true_values, false_values, skipinitialspace, skiprows, nrows, na_values, keep_default_na, na_filter, verbose, skip_blank_lines, parse_dates, infer_datetime_format, keep_date_col, date_parser, dayfirst, iterator, chunksize, compression, thousands, decimal, lineterminator, quotechar, quoting, escapechar, comment, encoding, dialect, tupleize_cols, error_bad_lines, warn_bad_lines, skipfooter, skip_footer, doublequote, delim_whitespace, as_recarray, compact_ints, use_unsigned, low_memory, buffer_lines, memory_map, float_precision)\u001b[0m\n\u001b[1;32m    707\u001b[0m                     skip_blank_lines=skip_blank_lines)\n\u001b[1;32m    708\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 709\u001b[0;31m         \u001b[0;32mreturn\u001b[0m \u001b[0m_read\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfilepath_or_buffer\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mkwds\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    710\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    711\u001b[0m     \u001b[0mparser_f\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m__name__\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mname\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;32m/opt/conda/lib/python3.6/site-packages/pandas/io/parsers.py\u001b[0m in \u001b[0;36m_read\u001b[0;34m(filepath_or_buffer, kwds)\u001b[0m\n\u001b[1;32m    447\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    448\u001b[0m     \u001b[0;31m# Create the parser.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 449\u001b[0;31m     \u001b[0mparser\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mTextFileReader\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfilepath_or_buffer\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwds\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    450\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    451\u001b[0m     \u001b[0;32mif\u001b[0m \u001b[0mchunksize\u001b[0m \u001b[0;32mor\u001b[0m \u001b[0miterator\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;32m/opt/conda/lib/python3.6/site-packages/pandas/io/parsers.py\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self, f, engine, **kwds)\u001b[0m\n\u001b[1;32m    816\u001b[0m             \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0moptions\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m'has_index_names'\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mkwds\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m'has_index_names'\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    817\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 818\u001b[0;31m         \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_make_engine\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mengine\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    819\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    820\u001b[0m     \u001b[0;32mdef\u001b[0m \u001b[0mclose\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;32m/opt/conda/lib/python3.6/site-packages/pandas/io/parsers.py\u001b[0m in \u001b[0;36m_make_engine\u001b[0;34m(self, engine)\u001b[0m\n\u001b[1;32m   1047\u001b[0m     \u001b[0;32mdef\u001b[0m \u001b[0m_make_engine\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mengine\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m'c'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1048\u001b[0m         \u001b[0;32mif\u001b[0m \u001b[0mengine\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0;34m'c'\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1049\u001b[0;31m             \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_engine\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mCParserWrapper\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mf\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0moptions\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m   1050\u001b[0m         \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1051\u001b[0m             \u001b[0;32mif\u001b[0m \u001b[0mengine\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0;34m'python'\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;32m/opt/conda/lib/python3.6/site-packages/pandas/io/parsers.py\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self, src, **kwds)\u001b[0m\n\u001b[1;32m   1693\u001b[0m         \u001b[0mkwds\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m'allow_leading_cols'\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mindex_col\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0;32mFalse\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1694\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1695\u001b[0;31m         \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_reader\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mparsers\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mTextReader\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0msrc\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwds\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m   1696\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1697\u001b[0m         \u001b[0;31m# XXX\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;32mpandas/_libs/parsers.pyx\u001b[0m in \u001b[0;36mpandas._libs.parsers.TextReader.__cinit__\u001b[0;34m()\u001b[0m\n","\u001b[0;32mpandas/_libs/parsers.pyx\u001b[0m in \u001b[0;36mpandas._libs.parsers.TextReader._setup_parser_source\u001b[0;34m()\u001b[0m\n","\u001b[0;31mFileNotFoundError\u001b[0m: File b'./datalab/62857/aichi__toyohashi-kana__toyohashi.csv' does not exist"],"ename":"FileNotFoundError","evalue":"File b'./datalab/62857/aichi__toyohashi-kana__toyohashi.csv' does not exist","output_type":"error"}]},{"cell_type":"markdown","source":"用各个小地方数据求出平均气温","metadata":{}},{"cell_type":"code","source":"means = weather.groupby('visit_date')[['avg_temperature', 'precipitation']].mean().reset_index()\nmeans.rename(columns={'avg_temperature': 'global_avg_temperature', 'precipitation': 'global_precipitation'}, inplace=True)\nweather = pd.merge(left=weather, right=means, on='visit_date', how='left')\nweather['avg_temperature'].fillna(weather['global_avg_temperature'], inplace=True)\nweather['precipitation'].fillna(weather['global_precipitation'], inplace=True)\n\nweather[['visit_date', 'avg_temperature', 'precipitation']].head()","metadata":{},"execution_count":15,"outputs":[{"execution_count":15,"output_type":"execute_result","data":{"text/html":["<div>\n","<style>\n","    .dataframe thead tr:only-child th {\n","        text-align: right;\n","    }\n","\n","    .dataframe thead th {\n","        text-align: left;\n","    }\n","\n","    .dataframe tbody tr th {\n","        vertical-align: top;\n","    }\n","</style>\n","<table border=\"1\" class=\"dataframe\">\n","  <thead>\n","    <tr style=\"text-align: right;\">\n","      <th></th>\n","      <th>visit_date</th>\n","      <th>avg_temperature</th>\n","      <th>precipitation</th>\n","    </tr>\n","  </thead>\n","  <tbody>\n","    <tr>\n","      <th>0</th>\n","      <td>2016-01-01</td>\n","      <td>6.0</td>\n","      <td>0.0</td>\n","    </tr>\n","    <tr>\n","      <th>1</th>\n","      <td>2016-01-02</td>\n","      <td>4.7</td>\n","      <td>0.0</td>\n","    </tr>\n","    <tr>\n","      <th>2</th>\n","      <td>2016-01-03</td>\n","      <td>7.0</td>\n","      <td>0.0</td>\n","    </tr>\n","    <tr>\n","      <th>3</th>\n","      <td>2016-01-04</td>\n","      <td>8.8</td>\n","      <td>0.0</td>\n","    </tr>\n","    <tr>\n","      <th>4</th>\n","      <td>2016-01-05</td>\n","      <td>8.9</td>\n","      <td>0.0</td>\n","    </tr>\n","  </tbody>\n","</table>\n","</div>"],"text/plain":["   visit_date  avg_temperature  precipitation\n","0  2016-01-01              6.0            0.0\n","1  2016-01-02              4.7            0.0\n","2  2016-01-03              7.0            0.0\n","3  2016-01-04              8.8            0.0\n","4  2016-01-05              8.9            0.0"]},"metadata":{}}]},{"cell_type":"markdown","source":"信息数据","metadata":{}},{"cell_type":"code","source":"data['visit_date'] = pd.to_datetime(data['visit_date'])\ndata.index = data['visit_date']\ndata.sort_values(['air_store_id', 'visit_date'], inplace=True)\n\ndata.head()","metadata":{},"execution_count":16,"outputs":[{"execution_count":16,"output_type":"execute_result","data":{"text/html":["<div>\n","<style>\n","    .dataframe thead tr:only-child th {\n","        text-align: right;\n","    }\n","\n","    .dataframe thead th {\n","        text-align: left;\n","    }\n","\n","    .dataframe tbody tr th {\n","        vertical-align: top;\n","    }\n","</style>\n","<table border=\"1\" class=\"dataframe\">\n","  <thead>\n","    <tr style=\"text-align: right;\">\n","      <th></th>\n","      <th>air_store_id</th>\n","      <th>is_test</th>\n","      <th>test_number</th>\n","      <th>visit_date</th>\n","      <th>visitors</th>\n","      <th>was_nil</th>\n","      <th>day_of_week</th>\n","      <th>is_holiday</th>\n","      <th>prev_day_is_holiday</th>\n","      <th>next_day_is_holiday</th>\n","      <th>air_genre_name</th>\n","      <th>air_area_name</th>\n","      <th>latitude</th>\n","      <th>longitude</th>\n","    </tr>\n","    <tr>\n","      <th>visit_date</th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","    </tr>\n","  </thead>\n","  <tbody>\n","    <tr>\n","      <th>2016-07-01</th>\n","      <td>air_00a91d42b08b08d9</td>\n","      <td>False</td>\n","      <td>NaN</td>\n","      <td>2016-07-01</td>\n","      <td>35.0</td>\n","      <td>False</td>\n","      <td>Friday</td>\n","      <td>0</td>\n","      <td>0.0</td>\n","      <td>0.0</td>\n","      <td>Italian/French</td>\n","      <td>Tōkyō-to Chiyoda-ku Kudanminami</td>\n","      <td>35.694003</td>\n","      <td>139.753595</td>\n","    </tr>\n","    <tr>\n","      <th>2016-07-02</th>\n","      <td>air_00a91d42b08b08d9</td>\n","      <td>False</td>\n","      <td>NaN</td>\n","      <td>2016-07-02</td>\n","      <td>9.0</td>\n","      <td>False</td>\n","      <td>Saturday</td>\n","      <td>0</td>\n","      <td>0.0</td>\n","      <td>0.0</td>\n","      <td>Italian/French</td>\n","      <td>Tōkyō-to Chiyoda-ku Kudanminami</td>\n","      <td>35.694003</td>\n","      <td>139.753595</td>\n","    </tr>\n","    <tr>\n","      <th>2016-07-03</th>\n","      <td>air_00a91d42b08b08d9</td>\n","      <td>False</td>\n","      <td>NaN</td>\n","      <td>2016-07-03</td>\n","      <td>0.0</td>\n","      <td>True</td>\n","      <td>Sunday</td>\n","      <td>0</td>\n","      <td>0.0</td>\n","      <td>0.0</td>\n","      <td>Italian/French</td>\n","      <td>Tōkyō-to Chiyoda-ku Kudanminami</td>\n","      <td>35.694003</td>\n","      <td>139.753595</td>\n","    </tr>\n","    <tr>\n","      <th>2016-07-04</th>\n","      <td>air_00a91d42b08b08d9</td>\n","      <td>False</td>\n","      <td>NaN</td>\n","      <td>2016-07-04</td>\n","      <td>20.0</td>\n","      <td>False</td>\n","      <td>Monday</td>\n","      <td>0</td>\n","      <td>0.0</td>\n","      <td>0.0</td>\n","      <td>Italian/French</td>\n","      <td>Tōkyō-to Chiyoda-ku Kudanminami</td>\n","      <td>35.694003</td>\n","      <td>139.753595</td>\n","    </tr>\n","    <tr>\n","      <th>2016-07-05</th>\n","      <td>air_00a91d42b08b08d9</td>\n","      <td>False</td>\n","      <td>NaN</td>\n","      <td>2016-07-05</td>\n","      <td>25.0</td>\n","      <td>False</td>\n","      <td>Tuesday</td>\n","      <td>0</td>\n","      <td>0.0</td>\n","      <td>0.0</td>\n","      <td>Italian/French</td>\n","      <td>Tōkyō-to Chiyoda-ku Kudanminami</td>\n","      <td>35.694003</td>\n","      <td>139.753595</td>\n","    </tr>\n","  </tbody>\n","</table>\n","</div>"],"text/plain":["                    air_store_id  is_test  test_number visit_date  visitors  \\\n","visit_date                                                                    \n","2016-07-01  air_00a91d42b08b08d9    False          NaN 2016-07-01      35.0   \n","2016-07-02  air_00a91d42b08b08d9    False          NaN 2016-07-02       9.0   \n","2016-07-03  air_00a91d42b08b08d9    False          NaN 2016-07-03       0.0   \n","2016-07-04  air_00a91d42b08b08d9    False          NaN 2016-07-04      20.0   \n","2016-07-05  air_00a91d42b08b08d9    False          NaN 2016-07-05      25.0   \n","\n","           was_nil day_of_week  is_holiday  prev_day_is_holiday  \\\n","visit_date                                                        \n","2016-07-01   False      Friday           0                  0.0   \n","2016-07-02   False    Saturday           0                  0.0   \n","2016-07-03    True      Sunday           0                  0.0   \n","2016-07-04   False      Monday           0                  0.0   \n","2016-07-05   False     Tuesday           0                  0.0   \n","\n","            next_day_is_holiday  air_genre_name  \\\n","visit_date                                        \n","2016-07-01                  0.0  Italian/French   \n","2016-07-02                  0.0  Italian/French   \n","2016-07-03                  0.0  Italian/French   \n","2016-07-04                  0.0  Italian/French   \n","2016-07-05                  0.0  Italian/French   \n","\n","                              air_area_name   latitude   longitude  \n","visit_date                                                          \n","2016-07-01  Tōkyō-to Chiyoda-ku Kudanminami  35.694003  139.753595  \n","2016-07-02  Tōkyō-to Chiyoda-ku Kudanminami  35.694003  139.753595  \n","2016-07-03  Tōkyō-to Chiyoda-ku Kudanminami  35.694003  139.753595  \n","2016-07-04  Tōkyō-to Chiyoda-ku Kudanminami  35.694003  139.753595  \n","2016-07-05  Tōkyō-to Chiyoda-ku Kudanminami  35.694003  139.753595  "]},"metadata":{}}]},{"cell_type":"markdown","source":"异常点问题，数据中存在部分异常点，以正太分布为出发点，认为95%的是正常的，所以选择了1.96这个值。对异常点来规范，让特别大的点等于正常中最大的。","metadata":{}},{"cell_type":"code","source":"def find_outliers(series):\n    return (series - series.mean()) > 1.96 * series.std()\n\n\ndef cap_values(series):\n    outliers = find_outliers(series)\n    max_val = series[~outliers].max()\n    series[outliers] = max_val\n    return series\n\n\nstores = data.groupby('air_store_id')\ndata['is_outlier'] = stores.apply(lambda g: find_outliers(g['visitors'])).values\ndata['visitors_capped'] = stores.apply(lambda g: cap_values(g['visitors'])).values\ndata['visitors_capped_log1p'] = np.log1p(data['visitors_capped'])\n\ndata.head()","metadata":{},"execution_count":17,"outputs":[{"execution_count":17,"output_type":"execute_result","data":{"text/html":["<div>\n","<style>\n","    .dataframe thead tr:only-child th {\n","        text-align: right;\n","    }\n","\n","    .dataframe thead th {\n","        text-align: left;\n","    }\n","\n","    .dataframe tbody tr th {\n","        vertical-align: top;\n","    }\n","</style>\n","<table border=\"1\" class=\"dataframe\">\n","  <thead>\n","    <tr style=\"text-align: right;\">\n","      <th></th>\n","      <th>air_store_id</th>\n","      <th>is_test</th>\n","      <th>test_number</th>\n","      <th>visit_date</th>\n","      <th>visitors</th>\n","      <th>was_nil</th>\n","      <th>day_of_week</th>\n","      <th>is_holiday</th>\n","      <th>prev_day_is_holiday</th>\n","      <th>next_day_is_holiday</th>\n","      <th>air_genre_name</th>\n","      <th>air_area_name</th>\n","      <th>latitude</th>\n","      <th>longitude</th>\n","      <th>is_outlier</th>\n","      <th>visitors_capped</th>\n","      <th>visitors_capped_log1p</th>\n","    </tr>\n","    <tr>\n","      <th>visit_date</th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","    </tr>\n","  </thead>\n","  <tbody>\n","    <tr>\n","      <th>2016-07-01</th>\n","      <td>air_00a91d42b08b08d9</td>\n","      <td>False</td>\n","      <td>NaN</td>\n","      <td>2016-07-01</td>\n","      <td>35.0</td>\n","      <td>False</td>\n","      <td>Friday</td>\n","      <td>0</td>\n","      <td>0.0</td>\n","      <td>0.0</td>\n","      <td>Italian/French</td>\n","      <td>Tōkyō-to Chiyoda-ku Kudanminami</td>\n","      <td>35.694003</td>\n","      <td>139.753595</td>\n","      <td>False</td>\n","      <td>35.0</td>\n","      <td>3.583519</td>\n","    </tr>\n","    <tr>\n","      <th>2016-07-02</th>\n","      <td>air_00a91d42b08b08d9</td>\n","      <td>False</td>\n","      <td>NaN</td>\n","      <td>2016-07-02</td>\n","      <td>9.0</td>\n","      <td>False</td>\n","      <td>Saturday</td>\n","      <td>0</td>\n","      <td>0.0</td>\n","      <td>0.0</td>\n","      <td>Italian/French</td>\n","      <td>Tōkyō-to Chiyoda-ku Kudanminami</td>\n","      <td>35.694003</td>\n","      <td>139.753595</td>\n","      <td>False</td>\n","      <td>9.0</td>\n","      <td>2.302585</td>\n","    </tr>\n","    <tr>\n","      <th>2016-07-03</th>\n","      <td>air_00a91d42b08b08d9</td>\n","      <td>False</td>\n","      <td>NaN</td>\n","      <td>2016-07-03</td>\n","      <td>0.0</td>\n","      <td>True</td>\n","      <td>Sunday</td>\n","      <td>0</td>\n","      <td>0.0</td>\n","      <td>0.0</td>\n","      <td>Italian/French</td>\n","      <td>Tōkyō-to Chiyoda-ku Kudanminami</td>\n","      <td>35.694003</td>\n","      <td>139.753595</td>\n","      <td>False</td>\n","      <td>0.0</td>\n","      <td>0.000000</td>\n","    </tr>\n","    <tr>\n","      <th>2016-07-04</th>\n","      <td>air_00a91d42b08b08d9</td>\n","      <td>False</td>\n","      <td>NaN</td>\n","      <td>2016-07-04</td>\n","      <td>20.0</td>\n","      <td>False</td>\n","      <td>Monday</td>\n","      <td>0</td>\n","      <td>0.0</td>\n","      <td>0.0</td>\n","      <td>Italian/French</td>\n","      <td>Tōkyō-to Chiyoda-ku Kudanminami</td>\n","      <td>35.694003</td>\n","      <td>139.753595</td>\n","      <td>False</td>\n","      <td>20.0</td>\n","      <td>3.044522</td>\n","    </tr>\n","    <tr>\n","      <th>2016-07-05</th>\n","      <td>air_00a91d42b08b08d9</td>\n","      <td>False</td>\n","      <td>NaN</td>\n","      <td>2016-07-05</td>\n","      <td>25.0</td>\n","      <td>False</td>\n","      <td>Tuesday</td>\n","      <td>0</td>\n","      <td>0.0</td>\n","      <td>0.0</td>\n","      <td>Italian/French</td>\n","      <td>Tōkyō-to Chiyoda-ku Kudanminami</td>\n","      <td>35.694003</td>\n","      <td>139.753595</td>\n","      <td>False</td>\n","      <td>25.0</td>\n","      <td>3.258097</td>\n","    </tr>\n","  </tbody>\n","</table>\n","</div>"],"text/plain":["                    air_store_id  is_test  test_number visit_date  visitors  \\\n","visit_date                                                                    \n","2016-07-01  air_00a91d42b08b08d9    False          NaN 2016-07-01      35.0   \n","2016-07-02  air_00a91d42b08b08d9    False          NaN 2016-07-02       9.0   \n","2016-07-03  air_00a91d42b08b08d9    False          NaN 2016-07-03       0.0   \n","2016-07-04  air_00a91d42b08b08d9    False          NaN 2016-07-04      20.0   \n","2016-07-05  air_00a91d42b08b08d9    False          NaN 2016-07-05      25.0   \n","\n","           was_nil day_of_week  is_holiday  prev_day_is_holiday  \\\n","visit_date                                                        \n","2016-07-01   False      Friday           0                  0.0   \n","2016-07-02   False    Saturday           0                  0.0   \n","2016-07-03    True      Sunday           0                  0.0   \n","2016-07-04   False      Monday           0                  0.0   \n","2016-07-05   False     Tuesday           0                  0.0   \n","\n","            next_day_is_holiday  air_genre_name  \\\n","visit_date                                        \n","2016-07-01                  0.0  Italian/French   \n","2016-07-02                  0.0  Italian/French   \n","2016-07-03                  0.0  Italian/French   \n","2016-07-04                  0.0  Italian/French   \n","2016-07-05                  0.0  Italian/French   \n","\n","                              air_area_name   latitude   longitude  \\\n","visit_date                                                           \n","2016-07-01  Tōkyō-to Chiyoda-ku Kudanminami  35.694003  139.753595   \n","2016-07-02  Tōkyō-to Chiyoda-ku Kudanminami  35.694003  139.753595   \n","2016-07-03  Tōkyō-to Chiyoda-ku Kudanminami  35.694003  139.753595   \n","2016-07-04  Tōkyō-to Chiyoda-ku Kudanminami  35.694003  139.753595   \n","2016-07-05  Tōkyō-to Chiyoda-ku Kudanminami  35.694003  139.753595   \n","\n","            is_outlier  visitors_capped  visitors_capped_log1p  \n","visit_date                                                      \n","2016-07-01       False             35.0               3.583519  \n","2016-07-02       False              9.0               2.302585  \n","2016-07-03       False              0.0               0.000000  \n","2016-07-04       False             20.0               3.044522  \n","2016-07-05       False             25.0               3.258097  "]},"metadata":{}}]},{"cell_type":"markdown","source":"日期特征","metadata":{}},{"cell_type":"code","source":"data['is_weekend'] = data['day_of_week'].isin(['Saturday', 'Sunday']).astype(int)\ndata['day_of_month'] = data['visit_date'].dt.day\ndata.head()","metadata":{},"execution_count":18,"outputs":[{"execution_count":18,"output_type":"execute_result","data":{"text/html":["<div>\n","<style>\n","    .dataframe thead tr:only-child th {\n","        text-align: right;\n","    }\n","\n","    .dataframe thead th {\n","        text-align: left;\n","    }\n","\n","    .dataframe tbody tr th {\n","        vertical-align: top;\n","    }\n","</style>\n","<table border=\"1\" class=\"dataframe\">\n","  <thead>\n","    <tr style=\"text-align: right;\">\n","      <th></th>\n","      <th>air_store_id</th>\n","      <th>is_test</th>\n","      <th>test_number</th>\n","      <th>visit_date</th>\n","      <th>visitors</th>\n","      <th>was_nil</th>\n","      <th>day_of_week</th>\n","      <th>is_holiday</th>\n","      <th>prev_day_is_holiday</th>\n","      <th>next_day_is_holiday</th>\n","      <th>air_genre_name</th>\n","      <th>air_area_name</th>\n","      <th>latitude</th>\n","      <th>longitude</th>\n","      <th>is_outlier</th>\n","      <th>visitors_capped</th>\n","      <th>visitors_capped_log1p</th>\n","      <th>is_weekend</th>\n","      <th>day_of_month</th>\n","    </tr>\n","    <tr>\n","      <th>visit_date</th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","    </tr>\n","  </thead>\n","  <tbody>\n","    <tr>\n","      <th>2016-07-01</th>\n","      <td>air_00a91d42b08b08d9</td>\n","      <td>False</td>\n","      <td>NaN</td>\n","      <td>2016-07-01</td>\n","      <td>35.0</td>\n","      <td>False</td>\n","      <td>Friday</td>\n","      <td>0</td>\n","      <td>0.0</td>\n","      <td>0.0</td>\n","      <td>Italian/French</td>\n","      <td>Tōkyō-to Chiyoda-ku Kudanminami</td>\n","      <td>35.694003</td>\n","      <td>139.753595</td>\n","      <td>False</td>\n","      <td>35.0</td>\n","      <td>3.583519</td>\n","      <td>0</td>\n","      <td>1</td>\n","    </tr>\n","    <tr>\n","      <th>2016-07-02</th>\n","      <td>air_00a91d42b08b08d9</td>\n","      <td>False</td>\n","      <td>NaN</td>\n","      <td>2016-07-02</td>\n","      <td>9.0</td>\n","      <td>False</td>\n","      <td>Saturday</td>\n","      <td>0</td>\n","      <td>0.0</td>\n","      <td>0.0</td>\n","      <td>Italian/French</td>\n","      <td>Tōkyō-to Chiyoda-ku Kudanminami</td>\n","      <td>35.694003</td>\n","      <td>139.753595</td>\n","      <td>False</td>\n","      <td>9.0</td>\n","      <td>2.302585</td>\n","      <td>1</td>\n","      <td>2</td>\n","    </tr>\n","    <tr>\n","      <th>2016-07-03</th>\n","      <td>air_00a91d42b08b08d9</td>\n","      <td>False</td>\n","      <td>NaN</td>\n","      <td>2016-07-03</td>\n","      <td>0.0</td>\n","      <td>True</td>\n","      <td>Sunday</td>\n","      <td>0</td>\n","      <td>0.0</td>\n","      <td>0.0</td>\n","      <td>Italian/French</td>\n","      <td>Tōkyō-to Chiyoda-ku Kudanminami</td>\n","      <td>35.694003</td>\n","      <td>139.753595</td>\n","      <td>False</td>\n","      <td>0.0</td>\n","      <td>0.000000</td>\n","      <td>1</td>\n","      <td>3</td>\n","    </tr>\n","    <tr>\n","      <th>2016-07-04</th>\n","      <td>air_00a91d42b08b08d9</td>\n","      <td>False</td>\n","      <td>NaN</td>\n","      <td>2016-07-04</td>\n","      <td>20.0</td>\n","      <td>False</td>\n","      <td>Monday</td>\n","      <td>0</td>\n","      <td>0.0</td>\n","      <td>0.0</td>\n","      <td>Italian/French</td>\n","      <td>Tōkyō-to Chiyoda-ku Kudanminami</td>\n","      <td>35.694003</td>\n","      <td>139.753595</td>\n","      <td>False</td>\n","      <td>20.0</td>\n","      <td>3.044522</td>\n","      <td>0</td>\n","      <td>4</td>\n","    </tr>\n","    <tr>\n","      <th>2016-07-05</th>\n","      <td>air_00a91d42b08b08d9</td>\n","      <td>False</td>\n","      <td>NaN</td>\n","      <td>2016-07-05</td>\n","      <td>25.0</td>\n","      <td>False</td>\n","      <td>Tuesday</td>\n","      <td>0</td>\n","      <td>0.0</td>\n","      <td>0.0</td>\n","      <td>Italian/French</td>\n","      <td>Tōkyō-to Chiyoda-ku Kudanminami</td>\n","      <td>35.694003</td>\n","      <td>139.753595</td>\n","      <td>False</td>\n","      <td>25.0</td>\n","      <td>3.258097</td>\n","      <td>0</td>\n","      <td>5</td>\n","    </tr>\n","  </tbody>\n","</table>\n","</div>"],"text/plain":["                    air_store_id  is_test  test_number visit_date  visitors  \\\n","visit_date                                                                    \n","2016-07-01  air_00a91d42b08b08d9    False          NaN 2016-07-01      35.0   \n","2016-07-02  air_00a91d42b08b08d9    False          NaN 2016-07-02       9.0   \n","2016-07-03  air_00a91d42b08b08d9    False          NaN 2016-07-03       0.0   \n","2016-07-04  air_00a91d42b08b08d9    False          NaN 2016-07-04      20.0   \n","2016-07-05  air_00a91d42b08b08d9    False          NaN 2016-07-05      25.0   \n","\n","           was_nil day_of_week  is_holiday  prev_day_is_holiday  \\\n","visit_date                                                        \n","2016-07-01   False      Friday           0                  0.0   \n","2016-07-02   False    Saturday           0                  0.0   \n","2016-07-03    True      Sunday           0                  0.0   \n","2016-07-04   False      Monday           0                  0.0   \n","2016-07-05   False     Tuesday           0                  0.0   \n","\n","            next_day_is_holiday  air_genre_name  \\\n","visit_date                                        \n","2016-07-01                  0.0  Italian/French   \n","2016-07-02                  0.0  Italian/French   \n","2016-07-03                  0.0  Italian/French   \n","2016-07-04                  0.0  Italian/French   \n","2016-07-05                  0.0  Italian/French   \n","\n","                              air_area_name   latitude   longitude  \\\n","visit_date                                                           \n","2016-07-01  Tōkyō-to Chiyoda-ku Kudanminami  35.694003  139.753595   \n","2016-07-02  Tōkyō-to Chiyoda-ku Kudanminami  35.694003  139.753595   \n","2016-07-03  Tōkyō-to Chiyoda-ku Kudanminami  35.694003  139.753595   \n","2016-07-04  Tōkyō-to Chiyoda-ku Kudanminami  35.694003  139.753595   \n","2016-07-05  Tōkyō-to Chiyoda-ku Kudanminami  35.694003  139.753595   \n","\n","            is_outlier  visitors_capped  visitors_capped_log1p  is_weekend  \\\n","visit_date                                                                   \n","2016-07-01       False             35.0               3.583519           0   \n","2016-07-02       False              9.0               2.302585           1   \n","2016-07-03       False              0.0               0.000000           1   \n","2016-07-04       False             20.0               3.044522           0   \n","2016-07-05       False             25.0               3.258097           0   \n","\n","            day_of_month  \n","visit_date                \n","2016-07-01             1  \n","2016-07-02             2  \n","2016-07-03             3  \n","2016-07-04             4  \n","2016-07-05             5  "]},"metadata":{}}]},{"cell_type":"markdown","source":"指数加权移动平均(Exponential Weighted Moving Average)，反应时间序列变换趋势，需要我们给定alpha值，这里我们来优化求一个最合适的。\n","metadata":{}},{"cell_type":"code","source":"from scipy import optimize\n\n\ndef calc_shifted_ewm(series, alpha, adjust=True):\n    return series.shift().ewm(alpha=alpha, adjust=adjust).mean()\n\n\ndef find_best_signal(series, adjust=False, eps=10e-5):\n    \n    def f(alpha):\n        shifted_ewm = calc_shifted_ewm(series=series, alpha=min(max(alpha, 0), 1), adjust=adjust)\n        corr = np.mean(np.power(series - shifted_ewm, 2))\n        return corr\n     \n    res = optimize.differential_evolution(func=f, bounds=[(0 + eps, 1 - eps)])\n    \n    return calc_shifted_ewm(series=series, alpha=res['x'][0], adjust=adjust)\n\n\nroll = data.groupby(['air_store_id', 'day_of_week']).apply(lambda g: find_best_signal(g['visitors_capped']))\ndata['optimized_ewm_by_air_store_id_&_day_of_week'] = roll.sort_index(level=['air_store_id', 'visit_date']).values\n\nroll = data.groupby(['air_store_id', 'is_weekend']).apply(lambda g: find_best_signal(g['visitors_capped']))\ndata['optimized_ewm_by_air_store_id_&_is_weekend'] = roll.sort_index(level=['air_store_id', 'visit_date']).values\n\nroll = data.groupby(['air_store_id', 'day_of_week']).apply(lambda g: find_best_signal(g['visitors_capped_log1p']))\ndata['optimized_ewm_log1p_by_air_store_id_&_day_of_week'] = roll.sort_index(level=['air_store_id', 'visit_date']).values\n\nroll = data.groupby(['air_store_id', 'is_weekend']).apply(lambda g: find_best_signal(g['visitors_capped_log1p']))\ndata['optimized_ewm_log1p_by_air_store_id_&_is_weekend'] = roll.sort_index(level=['air_store_id', 'visit_date']).values\n\ndata.head()","metadata":{},"execution_count":21,"outputs":[{"execution_count":21,"output_type":"execute_result","data":{"text/html":["<div>\n","<style>\n","    .dataframe thead tr:only-child th {\n","        text-align: right;\n","    }\n","\n","    .dataframe thead th {\n","        text-align: left;\n","    }\n","\n","    .dataframe tbody tr th {\n","        vertical-align: top;\n","    }\n","</style>\n","<table border=\"1\" class=\"dataframe\">\n","  <thead>\n","    <tr style=\"text-align: right;\">\n","      <th></th>\n","      <th>air_store_id</th>\n","      <th>is_test</th>\n","      <th>test_number</th>\n","      <th>visit_date</th>\n","      <th>visitors</th>\n","      <th>was_nil</th>\n","      <th>day_of_week</th>\n","      <th>is_holiday</th>\n","      <th>prev_day_is_holiday</th>\n","      <th>next_day_is_holiday</th>\n","      <th>...</th>\n","      <th>longitude</th>\n","      <th>is_outlier</th>\n","      <th>visitors_capped</th>\n","      <th>visitors_capped_log1p</th>\n","      <th>is_weekend</th>\n","      <th>day_of_month</th>\n","      <th>optimized_ewm_by_air_store_id_&amp;_day_of_week</th>\n","      <th>optimized_ewm_by_air_store_id_&amp;_is_weekend</th>\n","      <th>optimized_ewm_log1p_by_air_store_id_&amp;_day_of_week</th>\n","      <th>optimized_ewm_log1p_by_air_store_id_&amp;_is_weekend</th>\n","    </tr>\n","    <tr>\n","      <th>visit_date</th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","    </tr>\n","  </thead>\n","  <tbody>\n","    <tr>\n","      <th>2016-07-01</th>\n","      <td>air_00a91d42b08b08d9</td>\n","      <td>False</td>\n","      <td>NaN</td>\n","      <td>2016-07-01</td>\n","      <td>35.0</td>\n","      <td>False</td>\n","      <td>Friday</td>\n","      <td>0</td>\n","      <td>0.0</td>\n","      <td>0.0</td>\n","      <td>...</td>\n","      <td>139.753595</td>\n","      <td>False</td>\n","      <td>35.0</td>\n","      <td>3.583519</td>\n","      <td>0</td>\n","      <td>1</td>\n","      <td>NaN</td>\n","      <td>NaN</td>\n","      <td>NaN</td>\n","      <td>NaN</td>\n","    </tr>\n","    <tr>\n","      <th>2016-07-02</th>\n","      <td>air_00a91d42b08b08d9</td>\n","      <td>False</td>\n","      <td>NaN</td>\n","      <td>2016-07-02</td>\n","      <td>9.0</td>\n","      <td>False</td>\n","      <td>Saturday</td>\n","      <td>0</td>\n","      <td>0.0</td>\n","      <td>0.0</td>\n","      <td>...</td>\n","      <td>139.753595</td>\n","      <td>False</td>\n","      <td>9.0</td>\n","      <td>2.302585</td>\n","      <td>1</td>\n","      <td>2</td>\n","      <td>NaN</td>\n","      <td>NaN</td>\n","      <td>NaN</td>\n","      <td>NaN</td>\n","    </tr>\n","    <tr>\n","      <th>2016-07-03</th>\n","      <td>air_00a91d42b08b08d9</td>\n","      <td>False</td>\n","      <td>NaN</td>\n","      <td>2016-07-03</td>\n","      <td>0.0</td>\n","      <td>True</td>\n","      <td>Sunday</td>\n","      <td>0</td>\n","      <td>0.0</td>\n","      <td>0.0</td>\n","      <td>...</td>\n","      <td>139.753595</td>\n","      <td>False</td>\n","      <td>0.0</td>\n","      <td>0.000000</td>\n","      <td>1</td>\n","      <td>3</td>\n","      <td>NaN</td>\n","      <td>9.000000</td>\n","      <td>NaN</td>\n","      <td>2.302585</td>\n","    </tr>\n","    <tr>\n","      <th>2016-07-04</th>\n","      <td>air_00a91d42b08b08d9</td>\n","      <td>False</td>\n","      <td>NaN</td>\n","      <td>2016-07-04</td>\n","      <td>20.0</td>\n","      <td>False</td>\n","      <td>Monday</td>\n","      <td>0</td>\n","      <td>0.0</td>\n","      <td>0.0</td>\n","      <td>...</td>\n","      <td>139.753595</td>\n","      <td>False</td>\n","      <td>20.0</td>\n","      <td>3.044522</td>\n","      <td>0</td>\n","      <td>4</td>\n","      <td>NaN</td>\n","      <td>35.000000</td>\n","      <td>NaN</td>\n","      <td>3.583519</td>\n","    </tr>\n","    <tr>\n","      <th>2016-07-05</th>\n","      <td>air_00a91d42b08b08d9</td>\n","      <td>False</td>\n","      <td>NaN</td>\n","      <td>2016-07-05</td>\n","      <td>25.0</td>\n","      <td>False</td>\n","      <td>Tuesday</td>\n","      <td>0</td>\n","      <td>0.0</td>\n","      <td>0.0</td>\n","      <td>...</td>\n","      <td>139.753595</td>\n","      <td>False</td>\n","      <td>25.0</td>\n","      <td>3.258097</td>\n","      <td>0</td>\n","      <td>5</td>\n","      <td>NaN</td>\n","      <td>33.429621</td>\n","      <td>NaN</td>\n","      <td>3.428513</td>\n","    </tr>\n","  </tbody>\n","</table>\n","<p>5 rows × 23 columns</p>\n","</div>"],"text/plain":["                    air_store_id  is_test  test_number visit_date  visitors  \\\n","visit_date                                                                    \n","2016-07-01  air_00a91d42b08b08d9    False          NaN 2016-07-01      35.0   \n","2016-07-02  air_00a91d42b08b08d9    False          NaN 2016-07-02       9.0   \n","2016-07-03  air_00a91d42b08b08d9    False          NaN 2016-07-03       0.0   \n","2016-07-04  air_00a91d42b08b08d9    False          NaN 2016-07-04      20.0   \n","2016-07-05  air_00a91d42b08b08d9    False          NaN 2016-07-05      25.0   \n","\n","           was_nil day_of_week  is_holiday  prev_day_is_holiday  \\\n","visit_date                                                        \n","2016-07-01   False      Friday           0                  0.0   \n","2016-07-02   False    Saturday           0                  0.0   \n","2016-07-03    True      Sunday           0                  0.0   \n","2016-07-04   False      Monday           0                  0.0   \n","2016-07-05   False     Tuesday           0                  0.0   \n","\n","            next_day_is_holiday  \\\n","visit_date                        \n","2016-07-01                  0.0   \n","2016-07-02                  0.0   \n","2016-07-03                  0.0   \n","2016-07-04                  0.0   \n","2016-07-05                  0.0   \n","\n","                                  ...                          longitude  \\\n","visit_date                        ...                                      \n","2016-07-01                        ...                         139.753595   \n","2016-07-02                        ...                         139.753595   \n","2016-07-03                        ...                         139.753595   \n","2016-07-04                        ...                         139.753595   \n","2016-07-05                        ...                         139.753595   \n","\n","           is_outlier  visitors_capped  visitors_capped_log1p  is_weekend  \\\n","visit_date                                                                  \n","2016-07-01      False             35.0               3.583519           0   \n","2016-07-02      False              9.0               2.302585           1   \n","2016-07-03      False              0.0               0.000000           1   \n","2016-07-04      False             20.0               3.044522           0   \n","2016-07-05      False             25.0               3.258097           0   \n","\n","            day_of_month  optimized_ewm_by_air_store_id_&_day_of_week  \\\n","visit_date                                                              \n","2016-07-01             1                                          NaN   \n","2016-07-02             2                                          NaN   \n","2016-07-03             3                                          NaN   \n","2016-07-04             4                                          NaN   \n","2016-07-05             5                                          NaN   \n","\n","            optimized_ewm_by_air_store_id_&_is_weekend  \\\n","visit_date                                               \n","2016-07-01                                         NaN   \n","2016-07-02                                         NaN   \n","2016-07-03                                    9.000000   \n","2016-07-04                                   35.000000   \n","2016-07-05                                   33.429621   \n","\n","            optimized_ewm_log1p_by_air_store_id_&_day_of_week  \\\n","visit_date                                                      \n","2016-07-01                                                NaN   \n","2016-07-02                                                NaN   \n","2016-07-03                                                NaN   \n","2016-07-04                                                NaN   \n","2016-07-05                                                NaN   \n","\n","            optimized_ewm_log1p_by_air_store_id_&_is_weekend  \n","visit_date                                                    \n","2016-07-01                                               NaN  \n","2016-07-02                                               NaN  \n","2016-07-03                                          2.302585  \n","2016-07-04                                          3.583519  \n","2016-07-05                                          3.428513  \n","\n","[5 rows x 23 columns]"]},"metadata":{}}]},{"cell_type":"markdown","source":"尽可能多的提取时间序列信息","metadata":{}},{"cell_type":"code","source":"def extract_precedent_statistics(df, on, group_by):\n    \n    df.sort_values(group_by + ['visit_date'], inplace=True)\n    \n    groups = df.groupby(group_by, sort=False)\n    \n    stats = {\n        'mean': [],\n        'median': [],\n        'std': [],\n        'count': [],\n        'max': [],\n        'min': []\n    }\n    \n    exp_alphas = [0.1, 0.25, 0.3, 0.5, 0.75]\n    stats.update({'exp_{}_mean'.format(alpha): [] for alpha in exp_alphas})\n    \n    for _, group in groups:\n        \n        shift = group[on].shift()\n        roll = shift.rolling(window=len(group), min_periods=1)\n        \n        stats['mean'].extend(roll.mean())\n        stats['median'].extend(roll.median())\n        stats['std'].extend(roll.std())\n        stats['count'].extend(roll.count())\n        stats['max'].extend(roll.max())\n        stats['min'].extend(roll.min())\n        \n        for alpha in exp_alphas:\n            exp = shift.ewm(alpha=alpha, adjust=False)\n            stats['exp_{}_mean'.format(alpha)].extend(exp.mean())\n    \n    suffix = '_&_'.join(group_by)\n    \n    for stat_name, values in stats.items():\n        df['{}_{}_by_{}'.format(on, stat_name, suffix)] = values\n\n\nextract_precedent_statistics(\n    df=data,\n    on='visitors_capped',\n    group_by=['air_store_id', 'day_of_week']\n)\n\nextract_precedent_statistics(\n    df=data,\n    on='visitors_capped',\n    group_by=['air_store_id', 'is_weekend']\n)\n\nextract_precedent_statistics(\n    df=data,\n    on='visitors_capped',\n    group_by=['air_store_id']\n)\n\nextract_precedent_statistics(\n    df=data,\n    on='visitors_capped_log1p',\n    group_by=['air_store_id', 'day_of_week']\n)\n\nextract_precedent_statistics(\n    df=data,\n    on='visitors_capped_log1p',\n    group_by=['air_store_id', 'is_weekend']\n)\n\nextract_precedent_statistics(\n    df=data,\n    on='visitors_capped_log1p',\n    group_by=['air_store_id']\n)\n\ndata.sort_values(['air_store_id', 'visit_date']).head()","metadata":{},"execution_count":22,"outputs":[{"execution_count":22,"output_type":"execute_result","data":{"text/html":["<div>\n","<style>\n","    .dataframe thead tr:only-child th {\n","        text-align: right;\n","    }\n","\n","    .dataframe thead th {\n","        text-align: left;\n","    }\n","\n","    .dataframe tbody tr th {\n","        vertical-align: top;\n","    }\n","</style>\n","<table border=\"1\" class=\"dataframe\">\n","  <thead>\n","    <tr style=\"text-align: right;\">\n","      <th></th>\n","      <th>air_store_id</th>\n","      <th>is_test</th>\n","      <th>test_number</th>\n","      <th>visit_date</th>\n","      <th>visitors</th>\n","      <th>was_nil</th>\n","      <th>day_of_week</th>\n","      <th>is_holiday</th>\n","      <th>prev_day_is_holiday</th>\n","      <th>next_day_is_holiday</th>\n","      <th>...</th>\n","      <th>visitors_capped_log1p_median_by_air_store_id</th>\n","      <th>visitors_capped_log1p_std_by_air_store_id</th>\n","      <th>visitors_capped_log1p_count_by_air_store_id</th>\n","      <th>visitors_capped_log1p_max_by_air_store_id</th>\n","      <th>visitors_capped_log1p_min_by_air_store_id</th>\n","      <th>visitors_capped_log1p_exp_0.1_mean_by_air_store_id</th>\n","      <th>visitors_capped_log1p_exp_0.25_mean_by_air_store_id</th>\n","      <th>visitors_capped_log1p_exp_0.3_mean_by_air_store_id</th>\n","      <th>visitors_capped_log1p_exp_0.5_mean_by_air_store_id</th>\n","      <th>visitors_capped_log1p_exp_0.75_mean_by_air_store_id</th>\n","    </tr>\n","    <tr>\n","      <th>visit_date</th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","    </tr>\n","  </thead>\n","  <tbody>\n","    <tr>\n","      <th>2016-07-01</th>\n","      <td>air_00a91d42b08b08d9</td>\n","      <td>False</td>\n","      <td>NaN</td>\n","      <td>2016-07-01</td>\n","      <td>35.0</td>\n","      <td>False</td>\n","      <td>Friday</td>\n","      <td>0</td>\n","      <td>0.0</td>\n","      <td>0.0</td>\n","      <td>...</td>\n","      <td>NaN</td>\n","      <td>NaN</td>\n","      <td>0.0</td>\n","      <td>NaN</td>\n","      <td>NaN</td>\n","      <td>NaN</td>\n","      <td>NaN</td>\n","      <td>NaN</td>\n","      <td>NaN</td>\n","      <td>NaN</td>\n","    </tr>\n","    <tr>\n","      <th>2016-07-02</th>\n","      <td>air_00a91d42b08b08d9</td>\n","      <td>False</td>\n","      <td>NaN</td>\n","      <td>2016-07-02</td>\n","      <td>9.0</td>\n","      <td>False</td>\n","      <td>Saturday</td>\n","      <td>0</td>\n","      <td>0.0</td>\n","      <td>0.0</td>\n","      <td>...</td>\n","      <td>3.583519</td>\n","      <td>NaN</td>\n","      <td>1.0</td>\n","      <td>3.583519</td>\n","      <td>3.583519</td>\n","      <td>3.583519</td>\n","      <td>3.583519</td>\n","      <td>3.583519</td>\n","      <td>3.583519</td>\n","      <td>3.583519</td>\n","    </tr>\n","    <tr>\n","      <th>2016-07-03</th>\n","      <td>air_00a91d42b08b08d9</td>\n","      <td>False</td>\n","      <td>NaN</td>\n","      <td>2016-07-03</td>\n","      <td>0.0</td>\n","      <td>True</td>\n","      <td>Sunday</td>\n","      <td>0</td>\n","      <td>0.0</td>\n","      <td>0.0</td>\n","      <td>...</td>\n","      <td>2.943052</td>\n","      <td>0.905757</td>\n","      <td>2.0</td>\n","      <td>3.583519</td>\n","      <td>2.302585</td>\n","      <td>3.455426</td>\n","      <td>3.263285</td>\n","      <td>3.199239</td>\n","      <td>2.943052</td>\n","      <td>2.622819</td>\n","    </tr>\n","    <tr>\n","      <th>2016-07-04</th>\n","      <td>air_00a91d42b08b08d9</td>\n","      <td>False</td>\n","      <td>NaN</td>\n","      <td>2016-07-04</td>\n","      <td>20.0</td>\n","      <td>False</td>\n","      <td>Monday</td>\n","      <td>0</td>\n","      <td>0.0</td>\n","      <td>0.0</td>\n","      <td>...</td>\n","      <td>2.302585</td>\n","      <td>1.815870</td>\n","      <td>3.0</td>\n","      <td>3.583519</td>\n","      <td>0.000000</td>\n","      <td>3.109883</td>\n","      <td>2.447464</td>\n","      <td>2.239467</td>\n","      <td>1.471526</td>\n","      <td>0.655705</td>\n","    </tr>\n","    <tr>\n","      <th>2016-07-05</th>\n","      <td>air_00a91d42b08b08d9</td>\n","      <td>False</td>\n","      <td>NaN</td>\n","      <td>2016-07-05</td>\n","      <td>25.0</td>\n","      <td>False</td>\n","      <td>Tuesday</td>\n","      <td>0</td>\n","      <td>0.0</td>\n","      <td>0.0</td>\n","      <td>...</td>\n","      <td>2.673554</td>\n","      <td>1.578354</td>\n","      <td>4.0</td>\n","      <td>3.583519</td>\n","      <td>0.000000</td>\n","      <td>3.103347</td>\n","      <td>2.596729</td>\n","      <td>2.480984</td>\n","      <td>2.258024</td>\n","      <td>2.447318</td>\n","    </tr>\n","  </tbody>\n","</table>\n","<p>5 rows × 89 columns</p>\n","</div>"],"text/plain":["                    air_store_id  is_test  test_number visit_date  visitors  \\\n","visit_date                                                                    \n","2016-07-01  air_00a91d42b08b08d9    False          NaN 2016-07-01      35.0   \n","2016-07-02  air_00a91d42b08b08d9    False          NaN 2016-07-02       9.0   \n","2016-07-03  air_00a91d42b08b08d9    False          NaN 2016-07-03       0.0   \n","2016-07-04  air_00a91d42b08b08d9    False          NaN 2016-07-04      20.0   \n","2016-07-05  air_00a91d42b08b08d9    False          NaN 2016-07-05      25.0   \n","\n","           was_nil day_of_week  is_holiday  prev_day_is_holiday  \\\n","visit_date                                                        \n","2016-07-01   False      Friday           0                  0.0   \n","2016-07-02   False    Saturday           0                  0.0   \n","2016-07-03    True      Sunday           0                  0.0   \n","2016-07-04   False      Monday           0                  0.0   \n","2016-07-05   False     Tuesday           0                  0.0   \n","\n","            next_day_is_holiday  \\\n","visit_date                        \n","2016-07-01                  0.0   \n","2016-07-02                  0.0   \n","2016-07-03                  0.0   \n","2016-07-04                  0.0   \n","2016-07-05                  0.0   \n","\n","                                   ...                           \\\n","visit_date                         ...                            \n","2016-07-01                         ...                            \n","2016-07-02                         ...                            \n","2016-07-03                         ...                            \n","2016-07-04                         ...                            \n","2016-07-05                         ...                            \n","\n","           visitors_capped_log1p_median_by_air_store_id  \\\n","visit_date                                                \n","2016-07-01                                          NaN   \n","2016-07-02                                     3.583519   \n","2016-07-03                                     2.943052   \n","2016-07-04                                     2.302585   \n","2016-07-05                                     2.673554   \n","\n","           visitors_capped_log1p_std_by_air_store_id  \\\n","visit_date                                             \n","2016-07-01                                       NaN   \n","2016-07-02                                       NaN   \n","2016-07-03                                  0.905757   \n","2016-07-04                                  1.815870   \n","2016-07-05                                  1.578354   \n","\n","            visitors_capped_log1p_count_by_air_store_id  \\\n","visit_date                                                \n","2016-07-01                                          0.0   \n","2016-07-02                                          1.0   \n","2016-07-03                                          2.0   \n","2016-07-04                                          3.0   \n","2016-07-05                                          4.0   \n","\n","            visitors_capped_log1p_max_by_air_store_id  \\\n","visit_date                                              \n","2016-07-01                                        NaN   \n","2016-07-02                                   3.583519   \n","2016-07-03                                   3.583519   \n","2016-07-04                                   3.583519   \n","2016-07-05                                   3.583519   \n","\n","            visitors_capped_log1p_min_by_air_store_id  \\\n","visit_date                                              \n","2016-07-01                                        NaN   \n","2016-07-02                                   3.583519   \n","2016-07-03                                   2.302585   \n","2016-07-04                                   0.000000   \n","2016-07-05                                   0.000000   \n","\n","            visitors_capped_log1p_exp_0.1_mean_by_air_store_id  \\\n","visit_date                                                       \n","2016-07-01                                                NaN    \n","2016-07-02                                           3.583519    \n","2016-07-03                                           3.455426    \n","2016-07-04                                           3.109883    \n","2016-07-05                                           3.103347    \n","\n","            visitors_capped_log1p_exp_0.25_mean_by_air_store_id  \\\n","visit_date                                                        \n","2016-07-01                                                NaN     \n","2016-07-02                                           3.583519     \n","2016-07-03                                           3.263285     \n","2016-07-04                                           2.447464     \n","2016-07-05                                           2.596729     \n","\n","            visitors_capped_log1p_exp_0.3_mean_by_air_store_id  \\\n","visit_date                                                       \n","2016-07-01                                                NaN    \n","2016-07-02                                           3.583519    \n","2016-07-03                                           3.199239    \n","2016-07-04                                           2.239467    \n","2016-07-05                                           2.480984    \n","\n","            visitors_capped_log1p_exp_0.5_mean_by_air_store_id  \\\n","visit_date                                                       \n","2016-07-01                                                NaN    \n","2016-07-02                                           3.583519    \n","2016-07-03                                           2.943052    \n","2016-07-04                                           1.471526    \n","2016-07-05                                           2.258024    \n","\n","            visitors_capped_log1p_exp_0.75_mean_by_air_store_id  \n","visit_date                                                       \n","2016-07-01                                                NaN    \n","2016-07-02                                           3.583519    \n","2016-07-03                                           2.622819    \n","2016-07-04                                           0.655705    \n","2016-07-05                                           2.447318    \n","\n","[5 rows x 89 columns]"]},"metadata":{}}]},{"cell_type":"code","source":"data = pd.get_dummies(data, columns=['day_of_week', 'air_genre_name'])\ndata.head()","metadata":{},"execution_count":23,"outputs":[{"execution_count":23,"output_type":"execute_result","data":{"text/html":["<div>\n","<style>\n","    .dataframe thead tr:only-child th {\n","        text-align: right;\n","    }\n","\n","    .dataframe thead th {\n","        text-align: left;\n","    }\n","\n","    .dataframe tbody tr th {\n","        vertical-align: top;\n","    }\n","</style>\n","<table border=\"1\" class=\"dataframe\">\n","  <thead>\n","    <tr style=\"text-align: right;\">\n","      <th></th>\n","      <th>air_store_id</th>\n","      <th>is_test</th>\n","      <th>test_number</th>\n","      <th>visit_date</th>\n","      <th>visitors</th>\n","      <th>was_nil</th>\n","      <th>is_holiday</th>\n","      <th>prev_day_is_holiday</th>\n","      <th>next_day_is_holiday</th>\n","      <th>air_area_name</th>\n","      <th>...</th>\n","      <th>air_genre_name_Dining bar</th>\n","      <th>air_genre_name_International cuisine</th>\n","      <th>air_genre_name_Italian/French</th>\n","      <th>air_genre_name_Izakaya</th>\n","      <th>air_genre_name_Japanese food</th>\n","      <th>air_genre_name_Karaoke/Party</th>\n","      <th>air_genre_name_Okonomiyaki/Monja/Teppanyaki</th>\n","      <th>air_genre_name_Other</th>\n","      <th>air_genre_name_Western food</th>\n","      <th>air_genre_name_Yakiniku/Korean food</th>\n","    </tr>\n","    <tr>\n","      <th>visit_date</th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","    </tr>\n","  </thead>\n","  <tbody>\n","    <tr>\n","      <th>2016-07-01</th>\n","      <td>air_00a91d42b08b08d9</td>\n","      <td>False</td>\n","      <td>NaN</td>\n","      <td>2016-07-01</td>\n","      <td>35.0</td>\n","      <td>False</td>\n","      <td>0</td>\n","      <td>0.0</td>\n","      <td>0.0</td>\n","      <td>Tōkyō-to Chiyoda-ku Kudanminami</td>\n","      <td>...</td>\n","      <td>0</td>\n","      <td>0</td>\n","      <td>1</td>\n","      <td>0</td>\n","      <td>0</td>\n","      <td>0</td>\n","      <td>0</td>\n","      <td>0</td>\n","      <td>0</td>\n","      <td>0</td>\n","    </tr>\n","    <tr>\n","      <th>2016-07-02</th>\n","      <td>air_00a91d42b08b08d9</td>\n","      <td>False</td>\n","      <td>NaN</td>\n","      <td>2016-07-02</td>\n","      <td>9.0</td>\n","      <td>False</td>\n","      <td>0</td>\n","      <td>0.0</td>\n","      <td>0.0</td>\n","      <td>Tōkyō-to Chiyoda-ku Kudanminami</td>\n","      <td>...</td>\n","      <td>0</td>\n","      <td>0</td>\n","      <td>1</td>\n","      <td>0</td>\n","      <td>0</td>\n","      <td>0</td>\n","      <td>0</td>\n","      <td>0</td>\n","      <td>0</td>\n","      <td>0</td>\n","    </tr>\n","    <tr>\n","      <th>2016-07-03</th>\n","      <td>air_00a91d42b08b08d9</td>\n","      <td>False</td>\n","      <td>NaN</td>\n","      <td>2016-07-03</td>\n","      <td>0.0</td>\n","      <td>True</td>\n","      <td>0</td>\n","      <td>0.0</td>\n","      <td>0.0</td>\n","      <td>Tōkyō-to Chiyoda-ku Kudanminami</td>\n","      <td>...</td>\n","      <td>0</td>\n","      <td>0</td>\n","      <td>1</td>\n","      <td>0</td>\n","      <td>0</td>\n","      <td>0</td>\n","      <td>0</td>\n","      <td>0</td>\n","      <td>0</td>\n","      <td>0</td>\n","    </tr>\n","    <tr>\n","      <th>2016-07-04</th>\n","      <td>air_00a91d42b08b08d9</td>\n","      <td>False</td>\n","      <td>NaN</td>\n","      <td>2016-07-04</td>\n","      <td>20.0</td>\n","      <td>False</td>\n","      <td>0</td>\n","      <td>0.0</td>\n","      <td>0.0</td>\n","      <td>Tōkyō-to Chiyoda-ku Kudanminami</td>\n","      <td>...</td>\n","      <td>0</td>\n","      <td>0</td>\n","      <td>1</td>\n","      <td>0</td>\n","      <td>0</td>\n","      <td>0</td>\n","      <td>0</td>\n","      <td>0</td>\n","      <td>0</td>\n","      <td>0</td>\n","    </tr>\n","    <tr>\n","      <th>2016-07-05</th>\n","      <td>air_00a91d42b08b08d9</td>\n","      <td>False</td>\n","      <td>NaN</td>\n","      <td>2016-07-05</td>\n","      <td>25.0</td>\n","      <td>False</td>\n","      <td>0</td>\n","      <td>0.0</td>\n","      <td>0.0</td>\n","      <td>Tōkyō-to Chiyoda-ku Kudanminami</td>\n","      <td>...</td>\n","      <td>0</td>\n","      <td>0</td>\n","      <td>1</td>\n","      <td>0</td>\n","      <td>0</td>\n","      <td>0</td>\n","      <td>0</td>\n","      <td>0</td>\n","      <td>0</td>\n","      <td>0</td>\n","    </tr>\n","  </tbody>\n","</table>\n","<p>5 rows × 108 columns</p>\n","</div>"],"text/plain":["                    air_store_id  is_test  test_number visit_date  visitors  \\\n","visit_date                                                                    \n","2016-07-01  air_00a91d42b08b08d9    False          NaN 2016-07-01      35.0   \n","2016-07-02  air_00a91d42b08b08d9    False          NaN 2016-07-02       9.0   \n","2016-07-03  air_00a91d42b08b08d9    False          NaN 2016-07-03       0.0   \n","2016-07-04  air_00a91d42b08b08d9    False          NaN 2016-07-04      20.0   \n","2016-07-05  air_00a91d42b08b08d9    False          NaN 2016-07-05      25.0   \n","\n","           was_nil  is_holiday  prev_day_is_holiday  next_day_is_holiday  \\\n","visit_date                                                                 \n","2016-07-01   False           0                  0.0                  0.0   \n","2016-07-02   False           0                  0.0                  0.0   \n","2016-07-03    True           0                  0.0                  0.0   \n","2016-07-04   False           0                  0.0                  0.0   \n","2016-07-05   False           0                  0.0                  0.0   \n","\n","                              air_area_name  \\\n","visit_date                                    \n","2016-07-01  Tōkyō-to Chiyoda-ku Kudanminami   \n","2016-07-02  Tōkyō-to Chiyoda-ku Kudanminami   \n","2016-07-03  Tōkyō-to Chiyoda-ku Kudanminami   \n","2016-07-04  Tōkyō-to Chiyoda-ku Kudanminami   \n","2016-07-05  Tōkyō-to Chiyoda-ku Kudanminami   \n","\n","                           ...                   air_genre_name_Dining bar  \\\n","visit_date                 ...                                               \n","2016-07-01                 ...                                           0   \n","2016-07-02                 ...                                           0   \n","2016-07-03                 ...                                           0   \n","2016-07-04                 ...                                           0   \n","2016-07-05                 ...                                           0   \n","\n","            air_genre_name_International cuisine  \\\n","visit_date                                         \n","2016-07-01                                     0   \n","2016-07-02                                     0   \n","2016-07-03                                     0   \n","2016-07-04                                     0   \n","2016-07-05                                     0   \n","\n","            air_genre_name_Italian/French  air_genre_name_Izakaya  \\\n","visit_date                                                          \n","2016-07-01                              1                       0   \n","2016-07-02                              1                       0   \n","2016-07-03                              1                       0   \n","2016-07-04                              1                       0   \n","2016-07-05                              1                       0   \n","\n","            air_genre_name_Japanese food  air_genre_name_Karaoke/Party  \\\n","visit_date                                                               \n","2016-07-01                             0                             0   \n","2016-07-02                             0                             0   \n","2016-07-03                             0                             0   \n","2016-07-04                             0                             0   \n","2016-07-05                             0                             0   \n","\n","            air_genre_name_Okonomiyaki/Monja/Teppanyaki  air_genre_name_Other  \\\n","visit_date                                                                      \n","2016-07-01                                            0                     0   \n","2016-07-02                                            0                     0   \n","2016-07-03                                            0                     0   \n","2016-07-04                                            0                     0   \n","2016-07-05                                            0                     0   \n","\n","            air_genre_name_Western food  air_genre_name_Yakiniku/Korean food  \n","visit_date                                                                    \n","2016-07-01                            0                                    0  \n","2016-07-02                            0                                    0  \n","2016-07-03                            0                                    0  \n","2016-07-04                            0                                    0  \n","2016-07-05                            0                                    0  \n","\n","[5 rows x 108 columns]"]},"metadata":{}}]},{"cell_type":"markdown","source":"数据集划分","metadata":{}},{"cell_type":"code","source":"data['visitors_log1p'] = np.log1p(data['visitors'])\ntrain = data[(data['is_test'] == False) & (data['is_outlier'] == False) & (data['was_nil'] == False)]\ntest = data[data['is_test']].sort_values('test_number')\n\nto_drop = ['air_store_id', 'is_test', 'test_number', 'visit_date', 'was_nil',\n           'is_outlier', 'visitors_capped', 'visitors',\n           'air_area_name', 'latitude', 'longitude', 'visitors_capped_log1p']\ntrain = train.drop(to_drop, axis='columns')\ntrain = train.dropna()\ntest = test.drop(to_drop, axis='columns')\n\nX_train = train.drop('visitors_log1p', axis='columns')\nX_test = test.drop('visitors_log1p', axis='columns')\ny_train = train['visitors_log1p']\n","metadata":{"collapsed":true},"execution_count":26,"outputs":[]},{"cell_type":"code","source":"X_train.head()","metadata":{},"execution_count":27,"outputs":[{"execution_count":27,"output_type":"execute_result","data":{"text/html":["<div>\n","<style>\n","    .dataframe thead tr:only-child th {\n","        text-align: right;\n","    }\n","\n","    .dataframe thead th {\n","        text-align: left;\n","    }\n","\n","    .dataframe tbody tr th {\n","        vertical-align: top;\n","    }\n","</style>\n","<table border=\"1\" class=\"dataframe\">\n","  <thead>\n","    <tr style=\"text-align: right;\">\n","      <th></th>\n","      <th>is_holiday</th>\n","      <th>prev_day_is_holiday</th>\n","      <th>next_day_is_holiday</th>\n","      <th>is_weekend</th>\n","      <th>day_of_month</th>\n","      <th>optimized_ewm_by_air_store_id_&amp;_day_of_week</th>\n","      <th>optimized_ewm_by_air_store_id_&amp;_is_weekend</th>\n","      <th>optimized_ewm_log1p_by_air_store_id_&amp;_day_of_week</th>\n","      <th>optimized_ewm_log1p_by_air_store_id_&amp;_is_weekend</th>\n","      <th>visitors_capped_mean_by_air_store_id_&amp;_day_of_week</th>\n","      <th>...</th>\n","      <th>air_genre_name_Dining bar</th>\n","      <th>air_genre_name_International cuisine</th>\n","      <th>air_genre_name_Italian/French</th>\n","      <th>air_genre_name_Izakaya</th>\n","      <th>air_genre_name_Japanese food</th>\n","      <th>air_genre_name_Karaoke/Party</th>\n","      <th>air_genre_name_Okonomiyaki/Monja/Teppanyaki</th>\n","      <th>air_genre_name_Other</th>\n","      <th>air_genre_name_Western food</th>\n","      <th>air_genre_name_Yakiniku/Korean food</th>\n","    </tr>\n","    <tr>\n","      <th>visit_date</th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","      <th></th>\n","    </tr>\n","  </thead>\n","  <tbody>\n","    <tr>\n","      <th>2016-07-15</th>\n","      <td>0</td>\n","      <td>0.0</td>\n","      <td>0.0</td>\n","      <td>0</td>\n","      <td>15</td>\n","      <td>35.000700</td>\n","      <td>31.642520</td>\n","      <td>3.588106</td>\n","      <td>3.425707</td>\n","      <td>38.5</td>\n","      <td>...</td>\n","      <td>0</td>\n","      <td>0</td>\n","      <td>1</td>\n","      <td>0</td>\n","      <td>0</td>\n","      <td>0</td>\n","      <td>0</td>\n","      <td>0</td>\n","      <td>0</td>\n","      <td>0</td>\n","    </tr>\n","    <tr>\n","      <th>2016-07-16</th>\n","      <td>0</td>\n","      <td>0.0</td>\n","      <td>0.0</td>\n","      <td>1</td>\n","      <td>16</td>\n","      <td>9.061831</td>\n","      <td>8.618812</td>\n","      <td>2.302603</td>\n","      <td>2.003579</td>\n","      <td>10.0</td>\n","      <td>...</td>\n","      <td>0</td>\n","      <td>0</td>\n","      <td>1</td>\n","      <td>0</td>\n","      <td>0</td>\n","      <td>0</td>\n","      <td>0</td>\n","      <td>0</td>\n","      <td>0</td>\n","      <td>0</td>\n","    </tr>\n","    <tr>\n","      <th>2016-07-19</th>\n","      <td>0</td>\n","      <td>1.0</td>\n","      <td>0.0</td>\n","      <td>0</td>\n","      <td>19</td>\n","      <td>24.841272</td>\n","      <td>27.988385</td>\n","      <td>3.252832</td>\n","      <td>2.428565</td>\n","      <td>24.5</td>\n","      <td>...</td>\n","      <td>0</td>\n","      <td>0</td>\n","      <td>1</td>\n","      <td>0</td>\n","      <td>0</td>\n","      <td>0</td>\n","      <td>0</td>\n","      <td>0</td>\n","      <td>0</td>\n","      <td>0</td>\n","    </tr>\n","    <tr>\n","      <th>2016-07-20</th>\n","      <td>0</td>\n","      <td>0.0</td>\n","      <td>0.0</td>\n","      <td>0</td>\n","      <td>20</td>\n","      <td>29.198575</td>\n","      <td>27.675525</td>\n","      <td>3.412813</td>\n","      <td>2.667124</td>\n","      <td>32.5</td>\n","      <td>...</td>\n","      <td>0</td>\n","      <td>0</td>\n","      <td>1</td>\n","      <td>0</td>\n","      <td>0</td>\n","      <td>0</td>\n","      <td>0</td>\n","      <td>0</td>\n","      <td>0</td>\n","      <td>0</td>\n","    </tr>\n","    <tr>\n","      <th>2016-07-21</th>\n","      <td>0</td>\n","      <td>0.0</td>\n","      <td>0.0</td>\n","      <td>0</td>\n","      <td>21</td>\n","      <td>32.710972</td>\n","      <td>26.767268</td>\n","      <td>3.537397</td>\n","      <td>2.761626</td>\n","      <td>31.0</td>\n","      <td>...</td>\n","      <td>0</td>\n","      <td>0</td>\n","      <td>1</td>\n","      <td>0</td>\n","      <td>0</td>\n","      <td>0</td>\n","      <td>0</td>\n","      <td>0</td>\n","      <td>0</td>\n","      <td>0</td>\n","    </tr>\n","  </tbody>\n","</table>\n","<p>5 rows × 96 columns</p>\n","</div>"],"text/plain":["            is_holiday  prev_day_is_holiday  next_day_is_holiday  is_weekend  \\\n","visit_date                                                                     \n","2016-07-15           0                  0.0                  0.0           0   \n","2016-07-16           0                  0.0                  0.0           1   \n","2016-07-19           0                  1.0                  0.0           0   \n","2016-07-20           0                  0.0                  0.0           0   \n","2016-07-21           0                  0.0                  0.0           0   \n","\n","            day_of_month  optimized_ewm_by_air_store_id_&_day_of_week  \\\n","visit_date                                                              \n","2016-07-15            15                                    35.000700   \n","2016-07-16            16                                     9.061831   \n","2016-07-19            19                                    24.841272   \n","2016-07-20            20                                    29.198575   \n","2016-07-21            21                                    32.710972   \n","\n","            optimized_ewm_by_air_store_id_&_is_weekend  \\\n","visit_date                                               \n","2016-07-15                                   31.642520   \n","2016-07-16                                    8.618812   \n","2016-07-19                                   27.988385   \n","2016-07-20                                   27.675525   \n","2016-07-21                                   26.767268   \n","\n","            optimized_ewm_log1p_by_air_store_id_&_day_of_week  \\\n","visit_date                                                      \n","2016-07-15                                           3.588106   \n","2016-07-16                                           2.302603   \n","2016-07-19                                           3.252832   \n","2016-07-20                                           3.412813   \n","2016-07-21                                           3.537397   \n","\n","            optimized_ewm_log1p_by_air_store_id_&_is_weekend  \\\n","visit_date                                                     \n","2016-07-15                                          3.425707   \n","2016-07-16                                          2.003579   \n","2016-07-19                                          2.428565   \n","2016-07-20                                          2.667124   \n","2016-07-21                                          2.761626   \n","\n","            visitors_capped_mean_by_air_store_id_&_day_of_week  \\\n","visit_date                                                       \n","2016-07-15                                               38.5    \n","2016-07-16                                               10.0    \n","2016-07-19                                               24.5    \n","2016-07-20                                               32.5    \n","2016-07-21                                               31.0    \n","\n","                           ...                   air_genre_name_Dining bar  \\\n","visit_date                 ...                                               \n","2016-07-15                 ...                                           0   \n","2016-07-16                 ...                                           0   \n","2016-07-19                 ...                                           0   \n","2016-07-20                 ...                                           0   \n","2016-07-21                 ...                                           0   \n","\n","            air_genre_name_International cuisine  \\\n","visit_date                                         \n","2016-07-15                                     0   \n","2016-07-16                                     0   \n","2016-07-19                                     0   \n","2016-07-20                                     0   \n","2016-07-21                                     0   \n","\n","            air_genre_name_Italian/French  air_genre_name_Izakaya  \\\n","visit_date                                                          \n","2016-07-15                              1                       0   \n","2016-07-16                              1                       0   \n","2016-07-19                              1                       0   \n","2016-07-20                              1                       0   \n","2016-07-21                              1                       0   \n","\n","            air_genre_name_Japanese food  air_genre_name_Karaoke/Party  \\\n","visit_date                                                               \n","2016-07-15                             0                             0   \n","2016-07-16                             0                             0   \n","2016-07-19                             0                             0   \n","2016-07-20                             0                             0   \n","2016-07-21                             0                             0   \n","\n","            air_genre_name_Okonomiyaki/Monja/Teppanyaki  air_genre_name_Other  \\\n","visit_date                                                                      \n","2016-07-15                                            0                     0   \n","2016-07-16                                            0                     0   \n","2016-07-19                                            0                     0   \n","2016-07-20                                            0                     0   \n","2016-07-21                                            0                     0   \n","\n","            air_genre_name_Western food  air_genre_name_Yakiniku/Korean food  \n","visit_date                                                                    \n","2016-07-15                            0                                    0  \n","2016-07-16                            0                                    0  \n","2016-07-19                            0                                    0  \n","2016-07-20                            0                                    0  \n","2016-07-21                            0                                    0  \n","\n","[5 rows x 96 columns]"]},"metadata":{}}]},{"cell_type":"code","source":"y_train.head()","metadata":{},"execution_count":28,"outputs":[{"execution_count":28,"output_type":"execute_result","data":{"text/plain":["visit_date\n","2016-07-15    3.367296\n","2016-07-16    1.791759\n","2016-07-19    3.258097\n","2016-07-20    2.995732\n","2016-07-21    3.871201\n","Name: visitors_log1p, dtype: float64"]},"metadata":{}}]},{"cell_type":"markdown","source":"看一看是不是哪还有问题","metadata":{}},{"cell_type":"code","source":"assert X_train.isnull().sum().sum() == 0\nassert y_train.isnull().sum() == 0\nassert len(X_train) == len(y_train)\nassert X_test.isnull().sum().sum() == 0\nassert len(X_test) == 32019","metadata":{"collapsed":true},"execution_count":29,"outputs":[]},{"cell_type":"markdown","source":"lightgbm建模","metadata":{}},{"cell_type":"code","source":"import lightgbm as lgbm\nfrom sklearn import metrics\nfrom sklearn import model_selection\n\n\nnp.random.seed(42)\n\nmodel = lgbm.LGBMRegressor(\n    objective='regression',\n    max_depth=5,\n    num_leaves=25,\n    learning_rate=0.007,\n    n_estimators=1000,\n    min_child_samples=80,\n    subsample=0.8,\n    colsample_bytree=1,\n    reg_alpha=0,\n    reg_lambda=0,\n    random_state=np.random.randint(10e6)\n)\n\nn_splits = 6\ncv = model_selection.KFold(n_splits=n_splits, shuffle=True, random_state=42)\n\nval_scores = [0] * n_splits\n\nsub = submission['id'].to_frame()\nsub['visitors'] = 0\n\nfeature_importances = pd.DataFrame(index=X_train.columns)\n\nfor i, (fit_idx, val_idx) in enumerate(cv.split(X_train, y_train)):\n    \n    X_fit = X_train.iloc[fit_idx]\n    y_fit = y_train.iloc[fit_idx]\n    X_val = X_train.iloc[val_idx]\n    y_val = y_train.iloc[val_idx]\n    \n    model.fit(\n        X_fit,\n        y_fit,\n        eval_set=[(X_fit, y_fit), (X_val, y_val)],\n        eval_names=('fit', 'val'),\n        eval_metric='l2',\n        early_stopping_rounds=200,\n        feature_name=X_fit.columns.tolist(),\n        verbose=False\n    )\n    \n    val_scores[i] = np.sqrt(model.best_score_['val']['l2'])\n    sub['visitors'] += model.predict(X_test, num_iteration=model.best_iteration_)\n    feature_importances[i] = model.feature_importances_\n    \n    print('Fold {} RMSLE: {:.5f}'.format(i+1, val_scores[i]))\n    \nsub['visitors'] /= n_splits\nsub['visitors'] = np.expm1(sub['visitors'])\n\nval_mean = np.mean(val_scores)\nval_std = np.std(val_scores)\n\nprint('Local RMSLE: {:.5f} (±{:.5f})'.format(val_mean, val_std))","metadata":{},"execution_count":30,"outputs":[{"name":"stdout","output_type":"stream","text":"Fold 1 RMSLE: 0.48936\n\nFold 2 RMSLE: 0.49091\n\nFold 3 RMSLE: 0.48654\n\nFold 4 RMSLE: 0.48831\n\nFold 5 RMSLE: 0.48788\n\nFold 6 RMSLE: 0.48706\n\nLocal RMSLE: 0.48834 (±0.00146)\n"}]},{"cell_type":"markdown","source":"输出结果","metadata":{}},{"cell_type":"code","source":"sub.to_csv('result.csv', index=False)","metadata":{"collapsed":true},"execution_count":31,"outputs":[]},{"cell_type":"code","source":"import pandas as pd\ndf = pd.read_csv('result.csv')\ndf.head()","metadata":{},"execution_count":1,"outputs":[{"execution_count":1,"output_type":"execute_result","data":{"text/html":["<div>\n","<style>\n","    .dataframe thead tr:only-child th {\n","        text-align: right;\n","    }\n","\n","    .dataframe thead th {\n","        text-align: left;\n","    }\n","\n","    .dataframe tbody tr th {\n","        vertical-align: top;\n","    }\n","</style>\n","<table border=\"1\" class=\"dataframe\">\n","  <thead>\n","    <tr style=\"text-align: right;\">\n","      <th></th>\n","      <th>id</th>\n","      <th>visitors</th>\n","    </tr>\n","  </thead>\n","  <tbody>\n","    <tr>\n","      <th>0</th>\n","      <td>air_00a91d42b08b08d9_2017-04-23</td>\n","      <td>4.340348</td>\n","    </tr>\n","    <tr>\n","      <th>1</th>\n","      <td>air_00a91d42b08b08d9_2017-04-24</td>\n","      <td>22.739363</td>\n","    </tr>\n","    <tr>\n","      <th>2</th>\n","      <td>air_00a91d42b08b08d9_2017-04-25</td>\n","      <td>29.535532</td>\n","    </tr>\n","    <tr>\n","      <th>3</th>\n","      <td>air_00a91d42b08b08d9_2017-04-26</td>\n","      <td>29.319551</td>\n","    </tr>\n","    <tr>\n","      <th>4</th>\n","      <td>air_00a91d42b08b08d9_2017-04-27</td>\n","      <td>31.838669</td>\n","    </tr>\n","  </tbody>\n","</table>\n","</div>"],"text/plain":["                                id   visitors\n","0  air_00a91d42b08b08d9_2017-04-23   4.340348\n","1  air_00a91d42b08b08d9_2017-04-24  22.739363\n","2  air_00a91d42b08b08d9_2017-04-25  29.535532\n","3  air_00a91d42b08b08d9_2017-04-26  29.319551\n","4  air_00a91d42b08b08d9_2017-04-27  31.838669"]},"metadata":{}}]},{"cell_type":"code","source":"","metadata":{"collapsed":true},"execution_count":null,"outputs":[]}],"metadata":{"kernelspec":{"display_name":"Python 3","language":"python","name":"python3"},"language_info":{"name":"python","version":"3.6.8","mimetype":"text/x-python","codemirror_mode":{"name":"ipython","version":3},"pygments_lexer":"ipython3","nbconvert_exporter":"python","file_extension":".py"}},"nbformat":4,"nbformat_minor":2}