import json

import requests
import bs4
import cityinfo


def get_web(url):
    header = {
        "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/95.0.4638.69 Safari/537.36"}
    res = requests.get(url, headers=header)

    content = res.text.encode('ISO-8859-1')

    return content


def parse_content(content):
    soup = bs4.BeautifulSoup(content, 'lxml')

    '''
    存放天气情况
    '''
    list_weather = []
    weather_list = soup.find_all('p', class_='wea')
    for i in weather_list:
        list_weather.append(i.text)

    '''
    存放日期
    '''
    list_day = []
    i = 0
    day_list = soup.find_all('h1')
    for each in day_list:
        if i <= 6:
            list_day.append(each.text.strip())
            i += 1
    # print(list_day)

    '''
    存放温度：最高温度和最低温度
    '''
    tem_list = soup.find_all('p', class_='tem')
    i = 0
    list_tem = []
    for each in tem_list:
        if i == 0:
            list_tem.append(each.i.text)
            i += 1
        elif i > 0:
            list_tem.append([each.span.text, each.i.text])
            i += 1
    # print(list_tem)

    '''
    存放风力
    '''
    list_wind = []
    wind_list = soup.find_all('p', class_='win')
    for each in wind_list:
        list_wind.append(each.i.text.strip())
    # print(list_wind)
    return list_day, list_weather, list_tem, list_wind


def get_content(url):
    content = get_web(url)
    day, weather, tem, wind = parse_content(content)
    item = 0
    res = []
    for i in range(0, 7):
        if item == 0:
            data = {"时间": day[i], "天气": weather[i], "气温": tem[i], "风力": wind[i]}
            res.append(data)
            item += 1
        elif item > 0:
            data = {"时间": day[i], "天气": weather[i], "最高气温": tem[i][0], "最低气温": tem[i][1], "风力": wind[i]}
            res.append(data)
    return res


def get_weather(province, city):
    city_id = cityinfo.cityInfo[province][city]["AREAID"]
    while True:
        try:
            url = f"http://www.weather.com.cn/weather/{city_id}.shtml"
            print("正在爬取数据...........................")
            res = get_content(url)
            print("爬取完毕！！")
            return res,city_id
        except Exception as e:
            print(e)
            continue
        else:
            break


