import requests
from bs4 import BeautifulSoup

# 设置爬取的目标URL
url = "https://weather.cma.cn/web/weather/57494.html"

# 发送请求获取页面内容
response = requests.get(url)
response.encoding = 'utf-8'  # 确保中文字符能正确解析

# 使用BeautifulSoup解析页面
soup = BeautifulSoup(response.text, 'html.parser')

# 获取7天天气预报部分
days_section = soup.find(id="dayList")
days = days_section.find_all("div", class_="pull-left day")

# 存储所有的天气数据
forecast_data = []

for day in days:
    # 获取每一天的天气信息
    day_info = {}

    # 日期和星期
    day_info['date'] = day.find("div", class_="day-item").get_text(strip=True)

    # 天气
    day_info['weather'] = day.find_all("div", class_="day-item")[2].get_text(strip=True)

    # 风速
    day_info['wind'] = day.find_all("div", class_="day-item")[4].get_text(strip=True)

    # 温度信息
    high_temp = day.find("div", class_="high").get_text(strip=True)
    low_temp = day.find("div", class_="low").get_text(strip=True)
    day_info['high_temp'] = high_temp
    day_info['low_temp'] = low_temp

    # 夜间天气
    night_weather = day.find_all("div", class_="day-item")[7].get_text(strip=True)
    day_info['night_weather'] = night_weather

    forecast_data.append(day_info)

# 将天气数据保存到本地的TXT文件
with open("weather_data.txt", "w", encoding="utf-8") as file:
    for day in forecast_data:
        file.write(f"Date: {day['date']}\n")
        file.write(f"Weather: {day['weather']}\n")
        file.write(f"Wind: {day['wind']}\n")
        file.write(f"High Temperature: {day['high_temp']}\n")
        file.write(f"Low Temperature: {day['low_temp']}\n")
        file.write(f"Night Weather: {day['night_weather']}\n")
        file.write("\n")

print("数据已保存到 'weather_data.txt' 文件。")

