import requests as req
import json
#获取天气网站数据
headers = {
    "Accept": "*/*",
    "Accept-Language": "zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6",
    "Connection": "keep-alive",
    "Referer": "https://www.weather.com.cn/",
    "Sec-Fetch-Dest": "script",
    "Sec-Fetch-Mode": "no-cors",
    "Sec-Fetch-Site": "same-site",
    "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/138.0.0.0 Safari/537.36 Edg/138.0.0.0",
    "sec-ch-ua": "\"Not)A;Brand\";v=\"8\", \"Chromium\";v=\"138\", \"Microsoft Edge\";v=\"138\"",
    "sec-ch-ua-mobile": "?0",
    "sec-ch-ua-platform": "\"Windows\""
}
cookies = {
    "Hm_lvt_080dabacb001ad3dc8b9b9049b36d43b": "1754636912",
    "HMACCOUNT": "8E1DC484A663F0C2",
    "f_city": "%E6%B7%B1%E5%9C%B3%7C101280601%7C",
    "Hm_lpvt_080dabacb001ad3dc8b9b9049b36d43b": "1754638126"
}
url = "https://d1.weather.com.cn/weather_index/101280601.html"
params = {
    "_": "1754638126307"
}
response = req.get(url, headers=headers, cookies=cookies, params=params)#爬虫工具箱解析bash资源
res_data=response.content.decode("utf-8")
data=res_data.split("=")
weather_dict=json.loads(data[5])

weather_lists=weather_dict['f']
four_days_weather=[]
for weather in weather_lists:
    weather_dict={
        "week":weather['fj'],
        "tmax":weather['fc'],
        "tmin":weather['fd'],
        'feng':weather['fe'],
        "daxiao":weather['fg']



    }
    four_days_weather.append(weather_dict)
    four_days_weather.append("\n")

for i in four_days_weather:
    print(i)
