url="https://tianqi.2345.com/Pc/GetHistory"

headers={
    "User-Agent":"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/114.0.0.0 Safari/537.36 Edg/114.0.1823.86",
    "referer":"https://tianqi.2345.com/wea_history/54511.htm",
"cookie":"positionCityID=60275; positionCityPinyin=xinglong; Hm_lvt_a3f2879f6b3620a363bec646b7a8bcdd=1739541021; HMACCOUNT=5C23096BD3AC5D21; lastTownId=-1; lastTownTime=1739541004; lastAreaName=å¤©æ´¥; lastCountyId=54511; lastCountyPinyin=beijing; lastProvinceId=12; lastCityId=54511; lastCountyTime=1739541377; Hm_lpvt_a3f2879f6b3620a363bec646b7a8bcdd=1739541380"
}
import requests
import pandas as pd
def craw_table(year,month):

    params = {
        "areaInfo[areaId]": 54511,
        "areaInfo[areaType]": 2,
        "date[year]": year,
        "date[month]": month
    }
    resp = requests.get(url,headers=headers,params=params)
    data = resp.json()["data"]
# print(data)
#data frame
    df = pd.read_html(data)[0]
    return df
df_list=[]
for year in range(2011,2024):
    for month in range(1,13):
        print("爬取",year,month)
        df = craw_table(year,month)
        df_list.append(df)
pd.concat(df_list).to_excel("北京10年天气数据.xlsx",index=False)