#!/usr/bin/python
# -*-coding:utf-8-*-
import time
from _socket import timeout
from http.client import RemoteDisconnected, BadStatusLine
from urllib.error import HTTPError, URLError
from urllib.request import urlopen
from urllib.request import Request as request
from urllib import parse
from bs4 import BeautifulSoup as bs
import pymysql.cursors
import os.path
import random

user_agent = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36 Edg/87.0.664.66"


class proxy_server_info:
    def __init__(self, ip=None, port=None, anonymous_degrees=None, type=None, location=None, response_speed=None,
                 final_verification_time=None):
        self.ip = ip
        self.port = port
        self.anonymous_degrees = anonymous_degrees
        self.type = type
        self.location = location
        self.response_speed = response_speed
        self.final_verification_time = final_verification_time


def use_proxy(proxy_addr, url):
    """
    使用代理服务器获取网页信息
    :param proxy_addr: 代理服务器地址
    :param url: 需要访问的网页
    :return:
    """
    import urllib.request
    proxy = urllib.request.ProxyHandler({'http': proxy_addr})
    opener = urllib.request.build_opener(proxy, urllib.request.HTTPHandler)
    urllib.request.install_opener(opener)
    data = urllib.request.urlopen(url=url, timeout=5.0).read().decode('utf-8')
    return data


def get_random_proxy_server(proxy_servers):
    """
    随机获取一个代理地址
    :return:
    """
    # 若代理服务器不够则获新的
    if len(proxy_servers) < 5:
        for x in range(3):
            proxy_servers = get_proxys()
            if len(proxy_servers) > 0:
                break
            else:
                proxy_servers = get_proxys()
    if len(proxy_servers) > 0:
        return proxy_servers[random.randint(0, len(proxy_servers) - 1)]
    else:
        return None


def use_random_proxy(proxy_servers, url):
    proxy_server = get_random_proxy_server(proxy_servers)
    proxy_addr = "%s:%s" % (proxy_server.ip, proxy_server.port)
    print(url)
    return use_proxy(proxy_addr, url)


def test_proxy(psi, url):
    try:
        proxy_addr = "%s:%s" % (psi.ip, psi.port)
        bs(use_proxy(proxy_addr, url), "html.parser")
        return True
    except HTTPError as http:
        return False
    except URLError as url:
        return False
    except RemoteDisconnected as rd:
        return False
    except ConnectionAbortedError as cae:
        return False
    except BadStatusLine as bad:
        return False
    except timeout as out:
        return False


def get_proxys():
    proxy_servers = []
    index = 5
    for x in range(index, index + 6):
        url = "https://www.kuaidaili.com/free/inha/%s/" % x
        try:
            req = request(url)
            req.add_header("Host", "www.kuaidaili.com")
            req.add_header("User-Agent", user_agent)
            # 获得服务器返回的数据
            response = urlopen(url=url, timeout=5.0)
            # 处理数据
            page = response.read().decode("utf-8")
            soup = bs(page, "html.parser")
            for child in soup.select("div#list table tbody tr"):
                psi = proxy_server_info()
                psi.ip = child.select_one("td[data-title='IP']").text
                psi.port = child.select_one("td[data-title='PORT']").text
                psi.anonymous_degrees = child.select_one("td[data-title='匿名度']").text
                psi.type = child.select_one("td[data-title='类型']").text
                psi.location = child.select_one("td[data-title='位置']").text
                psi.response_speed = child.select_one("td[data-title='响应速度']").text
                psi.final_verification_time = child.select_one("td[data-title='最后验证时间']").text
                test_url = "https://blog.csdn.net/tianjingle_blog/article/details/111503564"
                for x in range(3):
                    if test_proxy(psi, test_url):
                        print(psi.ip, psi.port, psi.anonymous_degrees, psi.type, psi.location, psi.response_speed,
                              psi.final_verification_time)
                        proxy_servers.append(psi)
                        break
        except HTTPError as http:
            print("ID：%s %s 网页获取失败：%s" % (x, url, http.msg))
            continue
        except URLError as urlError:
            print("ID：%s %s 网页获取失败：%s" % (x, url, urlError))
            continue
    return proxy_servers


if __name__ == "__main__":
    get_proxys()
