import logging
import re

import requests

from common import common_config
from history import HistoryGraph
from history.HistoryHtmlParse import HtmlParser
from common import logger_config

logger_config.init_console_log()
logger_config.init_file_log()
base_url = common_config.base_url


class History:
    def __init__(self, get_proxy_func=None, timeout=10) -> None:
        self.get_proxy_func = get_proxy_func
        self.timeout = timeout

    def find(self, url, date_str, viewstate):
        if len(url) == 0:
            return url, viewstate, None
        data = {
            "__VIEWSTATE": viewstate,
            "__VIEWSTATEGENERATOR": "F960AAB1",
            "ctl00$rblTemp": "1",
            "ctl00$rblWindSpeed": "2",
            "ctl00$rblPrecip": "1",
            "ctl00$rblPressure": "1",
            "ctl00$rblVis": "1",
            "search": "",
            "ctl00$MainContentHolder$txtPastDate": date_str,
            "ctl00$MainContentHolder$butShowPastWeather": "Get Weather",
            "ctl00$hdsample": "",
            "ctl00$areaid": "46555",
            "ctl00$ubu": "1",
        }
        post_url = base_url + url
        logging.info(f"请求{post_url}")
        post = requests.post(url=post_url, data=data, timeout=self.timeout)
        if post.status_code != 200 and self.get_proxy_func is not None:
            proxies = self.get_proxy_func()
            logging.info(f"调用代理{proxies}")
            post = requests.post(url=post_url, data=data, proxies=proxies, timeout=self.timeout)
        html = post.text
        parse_viewstate = self.parse_viewstate(html)
        data_html = HtmlParser().parse_html(html)
        parse = HistoryGraph.Graph().parse(data_html, date_str)
        logging.info(f"{url}[{date_str}]爬取成功")
        return url, parse_viewstate, parse

    def get_first_viewstate(self):
        post = requests.post(base_url + "/beijing-weather-history/beijing/cn.aspx", timeout=self.timeout)
        if post.status_code != 200 and self.get_proxy_func is not None:
            proxys = self.get_proxy_func()
            post = requests.post(base_url + "/beijing-weather-history/beijing/cn.aspx", data=data, proxies=proxys,
                                 timeout=self.timeout)
        return self.parse_viewstate(post.text)

    def parse_viewstate(self, html):
        findall = re.findall("name\=\"__VIEWSTATE\"(.*?)/>", html)
        if len(findall) > 0:
            viewstate = findall[0][25: findall[0].index('"', 25)]
            return viewstate
        else:
            return ''


if __name__ == '__main__':
    history = History()
    url, viewstate, data = history.find("/gaoshan-weather-history/fujian/cn.aspx", "2022-02-06",
                                        "R9k48a/V+LJ6wcXWzmdtwk67yDzcWhNsGC0p21FOZkuyKuaTKuBTbwtwOhZU6OsYstZEzPlghR4NKagJPW1W2paSgpJYpiBNgvghmlxQkGowgMaX")
    print("-----20220206-----")
    print(viewstate)
    print(data)
    url, viewstate, data2 = history.find("/gaoshan-weather-history/fujian/cn.aspx", "2022-02-07", viewstate)
    print("-----20220207-----")
    print(viewstate)
    print(data2)
    print("-----合并数据-----")
    append = data.append(data2)
    print(data)
    print(append)
