# coding=utf-8
from io import StringIO
import requests
import pandas as pd

url = "https://tianqi.2345.com/Pc/GetHistory"
codes = {
    "合川": 60433,
    "渝北": 70966,
    "巴南": 70964,
    "潼南": 60292
}  # 城市代码：60433合川，70966渝北，70964巴南，60292潼南

def craw_table(year, month, city_code):
    '''
    爬取指定年月指定城市的天气数据
    :param year:
    :param month:
    :param city_code:
    :return:
    '''
    headers = {
        "Referer": "https://tianqi.2345.com/wea_history/70964.htm",
        "Cookie": "positionCityID=70964; positionCityPinyin=yuzhong",
        "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 Safari/537.36 Edg/123.0.0.0"
    }
    params = {
        "areaInfo[areaId]": city_code,
        "areaInfo[areaType]": 2,
        "date[year]": year,
        "date[month]": month
    }

    resp = requests.get(url, headers=headers, params=params)
    resp.encoding = "gbk"

    data = resp.json()["data"]
    html_data = StringIO(data)
    df = pd.read_html(html_data)[0]
    return df

# 循环爬取四个城市的近8年的天气数据
for city_name, city_code in codes.items():
    df_list = []
    for year in range(2016, 2024):
        for month in range(1, 13):
            df = craw_table(year, month, city_code)
            df_list.append(df)

    # 将数据合并并保存到以城市名称命名的 CSV 文件中
    pd.concat(df_list).to_csv(f"./data/{city_name}.csv", index=False)
