# coding=utf-8

import requests
from bs4 import BeautifulSoup
from pyecharts import options as opts

from pyecharts.charts import Bar

"""
echarts 参考地址
http://pyecharts.herokuapp.com/
"""

HREADERS = {
    "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4577.82 Safari/537.36"
}


def parse_page(url):
    response = requests.get(url, headers=HREADERS)
    html = response.content.decode("utf-8")
    # soup = BeautifulSoup(html, 'lxml')
    soup = BeautifulSoup(html, 'html5lib')
    # 只查找第一天的,find_all 会将后好几天的数据
    conMidtab = soup.find('div', class_="conMidtab")
    tables = conMidtab.find_all('table')
    all_area_weather = []
    get_val = lambda num, ls_: list(ls_[num].stripped_strings)[0]
    for tab in tables:
        trs = tab.find_all('tr')[2:]
        city = get_val(0, trs[0].find_all('td'))
        for index, tr in enumerate(trs):
            area_weather = {}
            tds = tr.find_all('td')
            area_num = 0
            if index == 0:
                area_num = 1
            area = get_val(area_num, tds)
            min_temperature = get_val(-2, tds)

            area_weather['city'] = city
            area_weather['area'] = area
            area_weather['最低温度'] = int(min_temperature)
            area_weather['天气现象'] = get_val(2, tds)
            area_weather['天气现象'] = get_val(5, tds)
            area_weather['风向风力'] = get_val(6, tds)

            all_area_weather.append(area_weather)
        # print(area_weather)
    return all_area_weather


if __name__ == '__main__':
    url = "http://www.weather.com.cn/textFC/db.shtml"
    url1 = "http://www.weather.com.cn/textFC/gat.shtml"
    urls = [
        'http://www.weather.com.cn/textFC/hb.shtml',
        url
        # ,
        # "http://www.weather.com.cn/textFC/hd.shtml",
        # "http://www.weather.com.cn/textFC/hz.shtml",
        # "http://www.weather.com.cn/textFC/xb.shtml",
        # "http://www.weather.com.cn/textFC/xn.shtml",
        # url1
    ]

    all_data = []
    for url_ in urls:
        all_data.extend(parse_page(url_))

    data_ = all_data.sort(key=lambda data: -data['最低温度'])
    top_10 = all_data[0:10]
    for d in top_10:
        print(d)

    cities = list(map(lambda da_: da_["area"], top_10))
    min_tmp = list(map(lambda da_: da_["最低温度"], top_10))

    # chart = Bar("中国最低天气排行榜")
    # chart.add("", cities, min_tmp)
    #
    # chart = (
    #     Bar()
    #         .add_xaxis(cities)
    #         .add_yaxis("最低温度", min_tmp)
    #         .set_global_opts(title_opts=opts.TitleOpts(title="中国最低天气排行榜"))
    # )
    # chart.render("min_temperature.html")
