import requests
from retrying import retry
from requests.packages.urllib3.exceptions import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
from lxml import etree
import random
import time

# Windows NT 6.1; WOW64)', '(Windows NT 10.0; WOW64)', '(X11; Linux x86_64)','(Macintosh; Intel Mac OS X 10_12_6)
# Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.77 Safari/537.36
class FakChromeUA:
    first_num = random.randint(55,62)
    third_num = random.randint(0,3200)
    fourth_num = random.randint(0,140)
    os_type = [
        '(Windows NT 6.1; WOW64)', '(Windows NT 10.0; WOW64)', '(X11; Linux x86_64)',
        '(Macintosh; Intel Mac OS X 10_12_6)'
    ]
    chrome_virsion = f'Chrome/{first_num},0,{third_num},{fourth_num}'
    @classmethod
    def get_ua(cls):
        return f'Mozilla/5.0 {random.choice(cls.os_type)} AppleWebKit/537.36 (KHTML, like Gecko) {cls.chrome_virsion} Safari/537.36'

class Spider(FakChromeUA):
    def __init__(self):
        self.headers = {}


    def fetch(self,url,param=None,headers=None):
        # headers = {}
        self.headers['User-Agent'] = self.get_ua()
        self.wai_time()
        try:
            response = requests.get(url,params=param,headers=self.headers)
            if response.status_code == 200:
                return response
        except Exception:
            return '连接错误'

    def wai_time(self):
        time.sleep(random.randint(100,300)/1000)

# if __name__ == '__main__':
#     s = Spider()
#     print(s.fetch('http://www.baidu..com'))