# -*- coding: UTF-8 -*-

'''
Python 3.x
无忧代理IP Created on 2018年05月11日
描述：本DEMO演示了使用无忧代理IP请求网页的过程，代码使用了多线程
逻辑：每隔5秒从API接口获取IP，对于每一个IP开启一个线程去抓取网页源码
@author: www.data5u.com
'''
import requests;
import time;
import threading;
import urllib3;
apiUrl = "http://api.ip.data5u.com/dynamic/get.html?order=6fa95682189df372794601d88406b164&sep=3";
                                                          
targetUrl = "https://www.so.com/";

ips = [];
def get_proxyip():
    res = requests.get(apiUrl).content.decode()
    proxyip_result=''
            # 按照\n分割获取到的IP
    ips = res.split('\n');
    # 利用每一个IP
    for proxyip in ips:       
        if proxyip.strip()=='' :
            continue
        proxyip_result=proxyip
        print(proxyip)
    return proxyip_result
 
def getresponse():
        # 开始计时
    start = time.time();
    #消除关闭证书验证的警告
    urllib3.disable_warnings();
    # 请求头，设置Connection=close可以解决Max retries exceeded with报错
    headers = {
        'Connection': 'close',
        'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9',
        'accept-language': 'zh-CN,zh;q=0.9',
        'upgrade-insecure-requests': '1',
        'User-Agent':'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.113 Safari/537.36'
    }
    proxyip=get_proxyip()
    response=requests.get(url=targetUrl, proxies={"HTTP" : 'HTTP://' + proxyip, "HTTPS" : 'HTTP://' + proxyip}, verify=False, timeout=15, headers=headers)
    html = response.content.decode()
    # 结束计时
    end = time.time();
    # 输出内容
    print(threading.current_thread().getName() +  "使用代理IP, 耗时 " + str(end - start) + "毫秒 " + proxyip + " 获取到如下HTML内容：\n" + html + "\n*************")


if __name__ == '__main__':
    # 这里请填写你在无忧代理IP的API接口，接口返回格式为TXT，分隔符为\n
    
    # 要抓取的目标网站地址
    targetUrl = "https://www.so.com/"
    # 获取IP时间间隔，建议为5秒
    fetchSecond = 5
    # 开始自动获取IP
    getresponse()