import requests
from bs4 import BeautifulSoup
import time

WEATHER = []

def get_weather(url):
    headers = {
        'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.106 Safari/537.36',
        'Host': 'www.weather.com.cn',
        'Upgrade-Insecure-Requests': '1',
    }#http头
    req = requests.get(url,headers=headers)#get方法请求网页信息

    #print(req.content.decode())

    content = req.content #将信息处理并放入变量
    soup = BeautifulSoup(content,'lxml')#用BeautifulSoup工具分析处理数据
    #分析网页架构 提出需求信息
    conMidtab = soup.find('div',class_='conMidtab')
    conMidtab2_list = conMidtab.find_all('div',class_='conMidtab2')

    for x in conMidtab2_list:#对每个conMidtab2进行处理
        tr_list = x.find_all('tr')[2:]#筛选切片
        province = ''#每次对省份重置
        for index,tr in enumerate(tr_list):#针对网页架构进行数据处理
            if index == 0:
                td_list = tr.find_all('td')
                province = td_list[0].text.replace('\n','')#text提取文本信息，replace用空字符替换换行
                city = td_list[1].text.replace('\n','')#同上
                max = td_list[4].text.replace('\n','')
                min = td_list[7].text.replace('\n','')
            else:
                td_list = tr.find_all('td')
                city = td_list[0].text.replace('\n','')
                max = td_list[3].text.replace('\n','')
                min = td_list[6].text.replace('\n','')
            WEATHER.append({
                'city': province+city,
                'max': max,
                'min': min,
            })
            #print('%s|%s|%s'%(province+city,max,min))
def weather():
    urls = [
        'http://www.weather.com.cn/textFC/hb.shtml',
        'http://www.weather.com.cn/textFC/db.shtml',
        'http://www.weather.com.cn/textFC/hd.shtml',
        'http://www.weather.com.cn/textFC/hz.shtml',
        'http://www.weather.com.cn/textFC/hn.shtml',
        'http://www.weather.com.cn/textFC/xb.shtml',
        'http://www.weather.com.cn/textFC/xn.shtml',
    ]
    for url in urls:
        get_weather(url)
        time.sleep(2)
        for weather in WEATHER:
            print(weather)

weather()