import logging

import requests
from bs4 import BeautifulSoup

import common
from common import ProxyIp
from common import LoggerTool
import chardet
import time
import random

logger = LoggerTool.get_logger(__name__)


# url 一样只初始化一次
class BeautySoupTool:
    # def __init__(self, url, proxy_ip=ProxyIp.ProxyIp().get_random_proxy_ip(), encoding='utf-8', timeout=20):
    def __init__(self, url, encoding='utf-8', timeout=20):
        try:
            # response = requests.get(url, headers=common.header, proxies=ProxyIp.ProxyIp().get_random_proxy_ip(), timeout=timeout)
            response = requests.get(url, headers=common.noval_header, timeout=timeout)
            self.status_code = response.status_code
            if self.status_code == 200:
                self.auto_encoding = response.encoding
                # logger.info("request自动检测的编码方式：{}".format(self.auto_encoding))

                self.chardet_encoding = chardet.detect(response.content)['encoding']
                response.encoding = self.chardet_encoding
                # logger.info("chardet 检测的编码方式：{}".format(self.chardet_encoding))

                self.error = False
                self.beautySoup = BeautifulSoup(response.text, 'lxml')
                # self.title = common.replace_sub(self.beautySoup.title.string).strip()
                self.title = common.replace_sub_common(self.beautySoup.title.string, '-', True).strip()
            else:
                self.title = self.status_code
                self.error = True
                self.e = self.status_code
                logger.error("BeautySoupTool 请求失败，{},status_code：{}".format(common.get_datetime('%Y/%m/%d %H:%M'), self.status_code))

            # with open(self.title + '.html', 'a+', encoding='utf-8') as f:
            #     f.write(html.text)
            # if html.status_code == 200:
            #     self.title = common.replace_sub(self.beautySoup.title.string).strip()
            # else:
            #     self.title = None
            #     logger.info('状态码错误：%i' % html.status_code)
        except Exception as e:
            self.e = e
            self.error = True
            self.status_code = None
            self.title = None
            logger.error("BeautySoupTool 请求失败，{},{}".format(common.get_datetime('%Y/%m/%d %H:%M'), e))


def get_beautysoup(url):
    # 生成一个0到10之间的随机整数
    # random_int = random.randint(5, 30)
    # logger.info("请求之前随机休眠：{},url:{}".format(random_int, url))
    # time.sleep(random_int)
    soup = BeautySoupTool(url)
    retry_num = 1
    while soup.error and retry_num <= 10:
        randint = random.randint(5 * retry_num, 10 * retry_num)
        logging.error('获取soup失败！尝试重试 第{}次; sleep_time:{},status_code：{}，e:{}'.format(retry_num, randint, soup.status_code, soup.e))
        time.sleep(randint)
        retry_num += 1
        # print(retry_num)
        soup = BeautySoupTool(url)
    return soup
# if __name__ == '__main__':
# proxy = ProxyIp.ProxyIp()
# random_ip = proxy.get_random_proxy_ip()
# soup_tool = BeautySoupTool('https://f.w24.rocks/viewthread.php?tid=373943&extra=page%3D1%26amp%3Borderby%3Ddateline%26amp%3Bfilter%3Ddigest')
# print(soup_tool.get_title())
# soup_tool2 = BeautySoupTool('https://f.w24.rocks/viewthread.php?tid=374392&extra=page%3D1%26amp%3Borderby%3Ddateline%26amp%3Bfilter%3Ddigest', random_ip)
# print(soup_tool2.beautySoup.title.string)
# soup_tool1 = BeautySoupTool('https://f.w24.rocks/viewthread.php?tid=373943&extra=page%3D1%26amp%3Borderby%3Ddateline%26amp%3Bfilter%3Ddigest', random_ip)
# print(soup_tool1.beautySoup.title.string)
