#!/usr/bin/env python
# -*- coding:utf-8 -*-
# author:BINGO
# datetime:2018/10/31 上午 08:29
# software: PyCharm
from code2.FindStreetDetail import GetUserAgent
import urllib
import time
from multiprocessing import Pool
from lxml import etree


def getProxies():
    '''
    功能：爬取西刺高匿IP构造原始代理IP池
    '''
    init_proxies = []
    ##爬取前十页
    for i in range(1, 11):
        print("####")
        print("####爬取第" + str(i) + "页####")
        print("####")
        print("IP地址\t\t\t端口\t存活时间\t\t验证时间")
        url = "http://www.xicidaili.com/nn/" + str(i)
        user_agent = GetUserAgent()
        headers = ("User-Agent", user_agent)
        opener = urllib.request.build_opener()
        opener.addheaders = [headers]

        try:
            data = opener.open(url, timeout=5).read()
        except Exception as er:
            print("爬取的时候发生错误，具体如下：")
            print(er)
        selector = etree.HTML(data)
        ip_addrs = selector.xpath('//tr[@class="odd"]/td[2]/text()')  # IP地址
        port = selector.xpath('//tr[@class="odd"]/td[3]/text()')  # 端口
        sur_time = selector.xpath('//tr[@class="odd"]/td[9]/text()')  # 存活时间
        ver_time = selector.xpath('//tr[@class="odd"]/td[10]/text()')  # 验证时间
        for j in range(len(ip_addrs)):
            ip = ip_addrs[j] + ":" + port[j]
            init_proxies.append(ip)
            # 输出爬取数据
            #print("http://"+ip_addrs[j] + ":" + port[j] + "\t\t" + sur_time[j] + "\t" + ver_time[j])
            print("'http://"+ip_addrs[j] + ":" + port[j]+"',")
    return init_proxies


def testProxy(curr_ip):
    '''
    功能：验证IP有效性
    @curr_ip：当前被验证的IP
    '''
    tmp_proxies = []
    # socket.setdefaulttimeout(10)  #设置全局超时时间
    tarURL = "http://www.baidu.com/"
    user_agent = GetUserAgent()
    proxy_support = urllib.request.ProxyHandler({"http": curr_ip})
    opener = urllib.request.build_opener(proxy_support)
    opener.addheaders = [("User-Agent", user_agent)]
    urllib.request.install_opener(opener)
    try:
        res = urllib.request.urlopen(tarURL, timeout=5).read()
        if len(res) != 0:
            tmp_proxies.append(curr_ip)
    except urllib.error.URLError as er2:
        if hasattr(er2, "code"):
            print("验证代理IP（" + curr_ip + "）时发生错误（错误代码）：" + str(er2.code))
        if hasattr(er2, "reason"):
            print("验证代理IP（" + curr_ip + "）时发生错误（错误原因）：" + str(er2.reason))
    except Exception as er:
        print("验证代理IP（" + curr_ip + "）时发生如下错误）：")
        print(er)
    time.sleep(2)
    return tmp_proxies


def mulTestProxies(init_proxies):
    '''
    功能：多进程验证IP有效性
    @init_proxies：原始未验证代理IP池
    '''
    pool = Pool(processes=7)
    fl_proxies = pool.map(testProxy, init_proxies)
    pool.close()
    pool.join()  # 等待进程池中的worker进程执行完毕
    return fl_proxies


if __name__ == '__main__':
    # ---（1）获取代理IP池
    init_proxies = getProxies()  # 获取原始代理IP
    tmp_proxies = mulTestProxies(init_proxies)  # 多进程测试原始代理IP
    proxy_addrs = []
    for tmp_proxy in tmp_proxies:
        if len(tmp_proxy) != 0:
            print('===> %s' % tmp_proxy)
            # proxy_addrs.append(tmp_proxy)
            with open('ip.txt', 'w') as f:
                f.write(tmp_proxy)
