import requests
from bs4 import BeautifulSoup
import csv

def getdata1(url):
    head={'user-agent':'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/132.0.0.0 Safari/537.36'}
    res=requests.get(url,headers=head)
    res.encoding='utf-8'
    return res.text


def exdata1(data):
    weather_list=[]
    soup=BeautifulSoup(data,'lxml')
    bdk=soup.find_all('table')[:5]
    for j in bdk:
      bdt=j.find_all('tr')[2:]
      #使用enumerate
      for index,i in enumerate(bdt):
        dic={}
        city=i.find_all('td')[0]
        gwd=i.find_all('td')[-5]
        dwd=i.find_all('td')[-2]
        if(index==0):
         city=i.find_all('td')[1]

        dic['城市']=city.text
        dic['最高温度']=gwd.text
        dic['最低温度']=dwd.text
        weather_list.append(dic)
    return weather_list
def dowdata(data):
    head=('城市','最高温度','最低温度')
    with open('weatherhb.csv','w',encoding='utf-8-sig',newline='') as f:
        ws=csv.DictWriter(f,fieldnames=head)
        ws.writeheader()
        ws.writerows(data)

def main():
    url='https://www.weather.com.cn/textFC/hb.shtml'
    data1=getdata1(url)
    data2=exdata1(data1)
    dowdata(data2)

main()