#!/usr/bin/python
# -*-coding:utf-8-*-
import time
from _socket import timeout
from http.client import RemoteDisconnected, BadStatusLine
from urllib.error import HTTPError, URLError
from urllib.request import urlopen
from urllib.request import Request
from urllib import parse
from bs4 import BeautifulSoup as bs
import pymysql.cursors
import os.path


class proxy_server_info:
    def __init__(self, ip=None, port=None, anonymous_degrees=None, type=None, location=None, response_speed=None,
                 final_verification_time=None):
        self.ip = ip
        self.port = port
        self.anonymous_degrees = anonymous_degrees
        self.type = type
        self.location = location
        self.response_speed = response_speed
        self.final_verification_time = final_verification_time


def use_proxy(proxy_addr, url):
    """
    使用代理服务器获取网页信息
    :param proxy_addr: 服务器地址
    :param url: 需要访问的网页
    :return:
    """
    import urllib.request
    proxy = urllib.request.ProxyHandler({'http': proxy_addr})
    opener = urllib.request.build_opener(proxy, urllib.request.HTTPHandler)
    urllib.request.install_opener(opener)
    data = urllib.request.urlopen(url, timeout=2).read().decode('utf-8')
    return data


def test_proxy(psi, url):
    try:
        # 记录起始时间
        start_time = time.time()
        proxy_addr = "%s:%s" % (psi.ip, psi.port)
        page = use_proxy(proxy_addr, url)
        soup = bs(page, "html.parser")
        # 计算时间差值
        seconds, minutes, hours = int(time.time() - start_time), 0, 0
        # 可视化打印
        hours = seconds // 3600
        minutes = (seconds - hours * 3600) // 60
        seconds = seconds - hours * 3600 - minutes * 60
        print("Complete time cost {:>02d}:{:>02d}:{:>02d}".format(hours, minutes, seconds))
        # print(soup)
        return True
    except HTTPError as http:
        # print("%s:%s %s 不能连接！" % (psi.ip, psi.port, http))
        return False
    except URLError as url:
        # print("%s:%s %s 不能连接！" % (psi.ip, psi.port, url))
        return False
    except RemoteDisconnected as rd:
        # print("%s:%s %s 不能连接！" % (psi.ip, psi.port, url))
        return False
    except ConnectionAbortedError as cae:
        # print("%s:%s %s 不能连接！" % (psi.ip, psi.port, url))
        return False
    except BadStatusLine as bad:
        return False
    except timeout as out:
        # print("%s:%s %s 不能连接！" % (psi.ip, psi.port, url))
        return False


if __name__ == "__main__":
    index = 1
    for x in range(index, index + 1):
        url = "http://www.ip3366.net/free/?stype=3&page=%s" % x
        print(url)
        try:
            # 获得服务器返回的数据
            response = urlopen(url)
            # 处理数据
            page = response.read()
            soup = bs(page, "html.parser")
            psi = proxy_server_info()
            for child in soup.select("div#list table tbody tr"):
                psi.ip = child.select("td")[0].text
                psi.port = child.select("td")[1].text
                psi.anonymous_degrees = child.select("td")[2].text
                psi.type = child.select("td")[3].text
                psi.location = child.select("td")[4].text
                psi.response_speed = child.select("td")[5].text
                psi.final_verification_time = child.select("td")[6].text
                for x in range(3):
                    if test_proxy(psi, "https://blog.csdn.net/tianjingle_blog/article/details/111503564"):
                        print(psi.ip, psi.port, psi.anonymous_degrees, psi.type, psi.location, psi.response_speed, psi.final_verification_time)
                    else:
                        print("%s:%s 不能连接！" % (psi.ip, psi.port))
                        break
        except HTTPError as http:
            print("ID：%s %s 网页获取失败：%s" % (x, url, http.msg))
        except URLError as url:
            print("ID：%s %s 网页获取失败：%s" % (x, url, url))
