# -*- coding:utf-8 -*-

from fake_useragent import UserAgent
from lxml import etree
import time
import base64
import requests
import json
import random
import datetime

from requests.auth import HTTPProxyAuth


requests.packages.urllib3.disable_warnings()  # 忽略HTTPS安全警告

"""
使用代理 18241255868,gkx55868
芝麻代理 http://www.zhimaruanjian.com/
"""

class ProxyApi():
    def __init__(self):
        self.ip_port_list = self.get_ip()


    def get_ip(self):
        """
        获取所有代理地址
        :return:
        """
        # IP的API获取地址
        url = "http://http.tiqu.alicdns.com/getip3?num=50&type=2&pro=&city=0&yys=0&port=1&time=1&ts=0&ys=0&cs=0&lb=1&sb=0&pb=4&mr=1&regions=&gm=4"
        url = "http://http.tiqu.alicdns.com/getip3?num=20&type=2&pro=&city=0&yys=0&port=1&pack=100763&ts=0&ys=0&cs=0&lb=1&sb=0&pb=4&mr=1&regions=&gm=4"
        print(f"-------------获取代理IP-------------")
        html = requests.get(url)
        data_json = json.loads(html.text)
        ip_list = data_json['data']

        return ip_list

    # 封装请求代理
    def get_request_proxy(self, method, url, headers_s, data, retry=1):
        # 判断ip列表是否有代理ip，如果没有调用获取ip列表
        if len(self.ip_port_list) > 0:
            ip_port = self.ip_port_list[random.randint(0, len(self.ip_port_list) - 1)]
            proxyHost = ip_port['ip']
            proxyPort = ip_port['port']
            print(f"======使用代理地址:{proxyHost}:{proxyPort}======")

            ua = UserAgent(use_cache_server=False)  # 禁用服务器缓存
            headers_p = {
                "User-Agent": ua.random,
            }
            # 合并头
            headers = dict(headers_s, **headers_p)
            proxyMeta = "http://%(host)s:%(port)s" % {
                "host": proxyHost,
                "port": proxyPort,
            }
            proxies = {
                "http": proxyMeta,
                "https": proxyMeta,
            }
            res = None
            try:
                if method == "POST":
                    if url.find("https") == 0:
                        res = requests.post(url, headers=headers, data=data, proxies=proxies, verify=False, timeout=10)
                    else:
                        res = requests.post(url, headers=headers, data=data, proxies=proxies, timeout=10)
                elif method == "GET":
                    if url.find("https") == 0:
                        res = requests.get(url, headers=headers, params=data, proxies=proxies, verify=False, timeout=10)
                    else:
                        res = requests.get(url, headers=headers, params=data, proxies=proxies, timeout=10)
                else:
                    res = None
            except Exception as ex:
                print(f"-------------【错误】,重试第【{retry}】次-------------")
                print(ex)
                retry += 1
                # 删除ip列表
                self.ip_port_list.remove(ip_port)
                return HttpUtils.do_request_proxy(method, url, headers, data, retry)
            else:
                if res.status_code != 200:
                    max_retry = 3  # 重试次数
                    print(f"-------------返回状态码:{res.status_code},重试第【{retry}】次-------------")
                    retry += 1
                    return HttpUtils.do_request_proxy(method, url, headers, data, retry)
            return res
        else:
            # 获取代理ip
            self.ip_port_list = self.get_ip()
            print(f"-------------重新获取代理IP-------------")
            HttpUtils.do_request_proxy(method, url, headers_s, data)


if __name__ == '__main__':
    proxyapi = ProxyApi()

    url = "https://ip.cn/"
    headers = {
        'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.138 Safari/537.36',

    }

    for i in range(10):
        start = datetime.datetime.now()
        html = proxyapi.get_request_proxy("GET", url, headers, "")
        end = datetime.datetime.now()
        root = etree.HTML(html.text)
        print("".join(root.xpath('//div[@id="result"]//text()')))
        print(f"====耗时：{str(end - start)}====")