import logging
import requests
from common import common_config
from graph import MaxMinAverageTemperature, MonthlyAveragePressure, MonthlyAverageTemperature, RainAndRainDay, \
    MonthlyAverageRainfall, SnowfallSnowDays, MonthlyAverageSnowfall, MaxAverageWindSpeedWindGust, Pressure, \
    CloudHumidity, UvIndex, MonthlyAverageUV, SunHoursSunDays, MonthlyAverageSunHoursDays, Visibility
from graph.GraphHtmlParse import HtmlParser
from common import logger_config

logger_config.init_console_log()
logger_config.init_file_log()
base_url = common_config.base_url


class Graph:
    def __init__(self, get_proxy_func=None, timeout=10) -> None:
        self.get_proxy_func = get_proxy_func
        self.timeout = timeout
        None

    def find(self, url):
        if len(url) == 0:
            return url, []
        get = requests.get(url=base_url + url, timeout=self.timeout)
        if get.status_code != 200 and self.get_proxy_func is not None:
            proxies = self.get_proxy_func()
            logging.info(f"调用代理{proxies}")
            get = requests.get(url=base_url + url, proxys=proxies, timeout=self.timeout)

        html = get.text
        result = []
        result.append(self.parse_html_by_package(html, MaxMinAverageTemperature))
        result.append(self.parse_html_monthly_by_package(html, MonthlyAverageTemperature))
        result.append(self.parse_html_by_package(html, RainAndRainDay))
        result.append(self.parse_html_monthly_by_package(html, MonthlyAverageRainfall))
        result.append(self.parse_html_by_package(html, SnowfallSnowDays))
        result.append(self.parse_html_monthly_by_package(html, MonthlyAverageSnowfall))
        result.append(self.parse_html_by_package(html, MaxAverageWindSpeedWindGust))
        result.append(self.parse_html_by_package(html, Pressure))
        result.append(self.parse_html_monthly_by_package(html, MonthlyAveragePressure))
        result.append(self.parse_html_by_package(html, CloudHumidity))
        result.append(self.parse_html_by_package(html, UvIndex))
        result.append(self.parse_html_monthly_by_package(html, MonthlyAverageUV))
        result.append(self.parse_html_by_package(html, SunHoursSunDays))
        result.append(self.parse_html_monthly_by_package(html, MonthlyAverageSunHoursDays))
        result.append(self.parse_html_by_package(html, Visibility))
        logging.info(f"{url}爬取成功")
        return url, result

    def parse_html_monthly_by_package(self, html, package):
        obj = self.new_obj(package)
        data_html = HtmlParser().parse_monthly_html(html, obj.title())
        return obj.title(), obj.parse(data_html)

    def parse_html_by_package(self, html, package):
        obj = self.new_obj(package)
        data_html = HtmlParser().parse_html(html, obj.title())
        return obj.title(), obj.parse(data_html)

    def new_obj(self, package):
        return package.Graph()


if __name__ == '__main__':
    graph = Graph()
    url, data = graph.find("/gaoshan-weather-averages/fujian/cn.aspx")
    title, data_ = data[0]
    print(data_.iloc[-1:])
