import requests
import random
import time
import execjs
import re
from lxml import etree


class Headers(object):

    def __init__(self):
        self.user_agent_list = [
            'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/22.0.1207.1 Safari/537.1',
            'Mozilla/5.0 (X11; CrOS i686 2268.111.0) AppleWebKit/536.11 (KHTML, like Gecko) Chrome/20.0.1132.57 Safari/536.11',
            'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.6 (KHTML, like Gecko) Chrome/20.0.1092.0 Safari/536.6',
            'Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.6 (KHTML, like Gecko) Chrome/20.0.1090.0 Safari/536.6',
            'Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/19.77.34.5 Safari/537.1',
            'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/536.5 (KHTML, like Gecko) Chrome/19.0.1084.9 Safari/536.5',
            'Mozilla/5.0 (Windows NT 6.0) AppleWebKit/536.5 (KHTML, like Gecko) Chrome/19.0.1084.36 Safari/536.5',
            'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3',
            'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3',
            'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_0) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3',
            'Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1062.0 Safari/536.3',
            'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1062.0 Safari/536.3',
            'Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3',
            'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3',
            'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3',
            'Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.0 Safari/536.3',
            'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/535.24 (KHTML, like Gecko) Chrome/19.0.1055.1 Safari/535.24',
            'Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/535.24 (KHTML, like Gecko) Chrome/19.0.1055.1 Safari/535.24'
        ]
        self.ip_list = []
        self.proxy_pool()

    def proxy_pool(self):
        first_req = requests.get('http://www.kuaidaili.com/free/')
        if first_req.status_code == 521:
            js_func = ''.join(re.findall(r'(function .*?)</script>', first_req.text)).replace('eval("qo=eval;qo(po);");', 'return po')
            js_arg = ''.join(re.findall(r'setTimeout\(\"\D+\((\d+)\)\"', first_req.text))
            ctx = execjs.compile(js_func)
            js_func_name = js_func[9:11]
            cookie_str = ctx.call(js_func_name, js_arg)
            cookie = self.parse_cookie(cookie_str)
            req = requests.get('http://www.kuaidaili.com/free/', cookies=cookie)
        else:
            req = first_req
        html = etree.HTML(req.content)
        ip = html.xpath('//div[@id="list"]/table/tbody/tr/td[1]/text()')
        port = html.xpath('//div[@id="list"]/table/tbody/tr/td[2]/text()')
        for i, p in zip(ip, port):
            self.ip_list.append(i + ':' + p)

    def parse_cookie(self, string):
        string = string.replace("document.cookie='", "")
        clearance = string.split(';')[0]
        return {clearance.split('=')[0]: clearance.split('=')[1]}

    def get(self, url, timeout, num_retries=6, proxy=None):
        ua = random.choice(self.user_agent_list)
        headers = {'User-Anget': ua, 'Referer': url}
        if proxy is None:
            try:
                return requests.get(url=url, headers=headers, timeout=timeout)
            except requests.exceptions.ConnectionError:
                if num_retries > 0:
                    time.sleep(5)
                    print(url+' 请求超时，正在重试！')
                    return self.get(url, timeout, num_retries-1)
                else:
                    time.sleep(5)
                    print('启用代理')
                    return self.get(url, timeout, 6, True)
        else:
            try:
                return requests.get(url=url, timeout=timeout, headers=headers, proxies=self.use_proxy())
            except requests.exceptions.ConnectionError:
                if num_retries > 0:
                    time.sleep(5)
                    print(url+' 通过代理获取错误，正在更换代理！')
                    return self.get(url, timeout, num_retries-1, True)
                else:
                    time.sleep(5)
                    print(url+' 无法获取')
                    return None

    def use_proxy(self):
        ip = 'http://' + random.choice(self.ip_list)
        proxy = {'http': ip}
        return proxy

