# encoding: utf-8
"""
Created on 2017/12/7

@author: fanxing
"""

import json
import time
from urllib.parse import urlparse

import matplotlib
import matplotlib.pyplot as plt
import numpy as np
import requests
from bs4 import BeautifulSoup

baseUrl = 'http://www.weather.com.cn/textFC/hb.shtml'
TEMPTATURE_LIST = []  # 城市：最高气温


def get_html(url):
    """
    通过get请求获取网页内容
    :param url:
    :return:
    """
    headers = {
        'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/62.0.3202.62 Safari/537.36',
        'Upgrade-Insecure-Requests': 1,
        'Referer': 'http://www.weather.com.cn/textFC/hb.shtml',
        'Host': 'www.weather.com.cn'
    }
    response = requests.get(url, headers)
    if response.status_code == 200:
        return response.content
    else:
        print(response)
        return None


def get_host(url):
    """
    获取url中的主机地址，用于构建http请求全路径
    :param url:
    :return:
    """
    parse = urlparse(url)
    return url.split(parse.path)[0]


def get_urls(html, url):
    """
    提取按区域抓取天气的url集合
    :param soup:
    :return:
    """
    host = get_host(url)

    soup = BeautifulSoup(html, 'lxml')
    ul = soup.find(name="ul", attrs={"class": "lq_contentboxTab2"})
    a_list = ul.find_all("a")
    url_list = []
    for a in a_list:
        new_url = host + a.attrs["href"]
        if new_url != url:
            url_list.append(new_url)

    return url_list


def get_temperatures(html):
    if html is None:
        print("网页内容为空!!!")
        return
    """
    获取温度信息
    :return:
    """
    soup = BeautifulSoup(html, 'lxml')
    province = None  # 省
    # 找到当天的
    conMidtab = soup.find("div", attrs={"class": "conMidtab"})
    # 找到所有省的
    conMidtab2_list = conMidtab.find_all("div")
    for conMidtab2 in conMidtab2_list:
        # 对应所有市的
        tr_list = conMidtab2.find_all("tr")[2:]
        for index, tr in enumerate(tr_list):
            td_list = tr.find_all("td")
            if index == 0:  # 包含省信息
                province = td_list[0].text.replace("\n", "")
                city = province + td_list[1].text.replace("\n", "")
                min_temp = td_list[7].text.replace("\n", "")
            else:
                city = province + td_list[0].text.replace("\n", "")
                min_temp = td_list[6].text.replace("\n", "")

                TEMPTATURE_LIST.append({"city": city, "min": min_temp})

    print("一次分析结束")


def get_gat_temperatures(url):
    """
    港澳台的页面需要页面js调用才能得到完整的html内容，所以不能正常分析得到
    :param url:
    :return:
    """
    html = get_html(url)
    # with open("te.html", "wb") as fp:
    #     fp.write(html)
    # html = ""
    # with open("te.html", "r", encoding="utf-8") as fp:
    #     html = fp.read()

    soup = BeautifulSoup(html, 'html.parser')

    tr_list = soup.find_all('tr')

    gat_list = ['香港', '澳门', '台北', '高雄', '台中']

    index = 0
    for tr in tr_list:
        if (index < 2 and tr.text.find(gat_list[index]) > -1) or (index == 2 and tr.text.find(gat_list[index]) > -1):
            # 如果是香港或澳门,或者台北
            td_list = tr.find_all("td")
            province = td_list[0].text.replace("\n", "")
            city = province + td_list[1].text.replace("\n", "")
            min_temp = td_list[7].text.replace("\n", "")
            index += 1
            TEMPTATURE_LIST.append({"city": city, "min": min_temp})

        elif 2 < index < len(gat_list) and tr.text.find(gat_list[index]) > -1:
            # 台湾其他
            td_list = tr.find_all("td")
            city = province + td_list[0].text.replace("\n", "")
            min_temp = td_list[6].text.replace("\n", "")
            index += 1
            TEMPTATURE_LIST.append({"city": city, "min": min_temp})

    print("港澳台分析结束")


def spide_temperature():
    html = get_html(baseUrl)
    if html is None:
        print('获取失败')
    else:
        get_temperatures(html)

        urls = get_urls(html, baseUrl)

        # 港澳台特殊
        get_gat_temperatures(urls[-1])

        for url in urls[:-2]:
            time.sleep(2)

            content = get_html(url)
            get_temperatures(content)

        with open("temprature.json", "w", encoding="utf-8") as fp:
            json.dump(TEMPTATURE_LIST, fp)


def show_temperature():
    with open("temprature.json", "r") as fp:
        TEMPTATURE_LIST = json.load(fp, encoding="utf-8")

    # TOP20 = sorted(TEMPTATURE_LIST, key=lambda x: int(x["min"]), reverse=True)[:20]
    # CITY_LIST = []  # 城市
    # MAX_LIST = []  # 最高气温
    # for city_max in TOP20:
    #     CITY_LIST.append(city_max["city"])
    #     MAX_LIST.append(int(city_max["min"]))

    CITY_LIST = []  # 城市
    MAX_LIST = []  # 最高气温
    for i in range(20):
        city_max = TEMPTATURE_LIST[np.random.randint(0, len(TEMPTATURE_LIST))]
        CITY_LIST.append(city_max["city"])
        MAX_LIST.append(int(city_max["min"]))

    ind = np.arange(len(MAX_LIST))
    print(ind)
    print(CITY_LIST)
    print(MAX_LIST)

    # 解决中文乱码问题
    zhfont1 = matplotlib.font_manager.FontProperties(fname='C:\Windows\Fonts\simsun.ttc')

    fig, ax = plt.subplots()
    plt.bar(ind, MAX_LIST)
    plt.xticks(ind, CITY_LIST, fontproperties=zhfont1, rotation=60)
    plt.ylabel(u'温度', fontproperties=zhfont1)
    plt.title(u'今日随机20个城市的温度', fontproperties=zhfont1)

    # show the figure, but do not block
    plt.show()


def main():
    spide_temperature()
    show_temperature()


"""
中国天气网：http://www.weather.com.cn/textFC/hb.shtml

数据抓取说明：
    按大区分类（华北、东北、华东、华中、华南、西北、西南、港澳台）：todo(获取按区分类的天气url)
        <ul class="lq_contentboxTab2">
            <li style="background:transparent url(/m2/i/forecast/textFC/c_tabm_bar.png);"><span><a href="/textFC/hb.shtml">华北</a></span></li>
            <li><span><a href="/textFC/db.shtml">东北</a></span></li>
            <li><span><a href="/textFC/hd.shtml">华东</a></span></li>
            <li><span><a href="/textFC/hz.shtml">华中</a></span></li>
            <li><span><a href="/textFC/hn.shtml">华南</a></span></li>
            <li><span><a href="/textFC/xb.shtml">西北</a></span></li>
            <li><span><a href="/textFC/xn.shtml">西南</a></span></li>
            <li><span><a href="/textFC/gat.shtml">港澳台</a></span></li>
        </ul>
    --》往后七天天气（包括当天）: todo(获取天气对应的日期)
        <ul class="day_tabs">
            <li class="selected">今天周四(12月7日)</li>
            <li>周五(12月8日)</li>
            <li>周六(12月9日)</li>
            <li>周日(12月10日)</li>
            <li>周一(12月11日)</li>
            <li>周二(12月12日)</li>
            <li>周三(12月13日)</li>
        </ul>

        --》对应的七天的天气在一个div中：todo(获取相应日期对应的某一天的天气内容)
            <div class="hanml">
                <div class="conMidtab">...</div>
                <div class="conMidtab" style="display:none;">...</div>
                ...
            </div>
            --》按省/直辖市归类城市，每个省/直辖市位于div（class=conMidtab2）中：todo(获取某个省/直辖市的某一天的天气内容)
                <div class="conMidtab">
                    <div class="conMidtab2">...</div>
                </div>

                --》对应的天气数据位于table中：todo(从中抓取省下的城市的最高最低气温)
                    省
                        <td width="74" rowspan="17" class="rowsPan">
                            <a href="/textFC/beijing.shtml" target="_blank">北京</a>
                        </td>
                    城市
                        <td width="83" height="23">
                            <a href="http://www.weather.com.cn/weather/101010100.shtml" target="_blank">北京</a>
                        </td>
                    白天最高气温
                        如果是第一行，位于第5个td中
                        否则，位于第4个td中
                    夜间最低气温
                        如果是第一行，位于第8个td中
                        否则，位于第7个td中
"""
if __name__ == "__main__":
    main()
