#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# Project: spd-sxmcc
"""
@author: lyndon
@time Created on 2019/2/26 11:56
@desc
"""
import urllib2

headers = {"Content-type": "text/html; charset=UTF-8",
               'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8,pl;q=0.7',
               'User-Agent': "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.110 Safari/537.36",
               "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8",
               "Referer": "https://ty.5i5j.com/",
               "Cache-Control": "max-age=0"}

header_lst = [("Content-type", "text/html; charset=UTF-8"),
              ('Accept-Language', 'zh-CN,zh;q=0.9,en;q=0.8,pl;q=0.7'),
              ('User-Agent', "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.110 Safari/537.36"),
              ("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8"),
              ("Referer", "https://ty.5i5j.com/"),
              ("Cache-Control", "max-age=0")]

header_lst2 = [('User-Agent', "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.110 Safari/537.36")]


def req_by_proxy(url):
    proxy_ip_port = 'spdu:JIqnEW218W9@47.94.227.14:3128'
    proxy_handler = urllib2.ProxyHandler({"http": "%s" % proxy_ip_port, "https": "%s" % proxy_ip_port})
    opener = urllib2.build_opener(proxy_handler)
    opener.addheaders = header_lst2
    print('xxdd', url, proxy_ip_port)
    html = opener.open(url, timeout=60).read()
    print(html)


def req_by_block(url):
    req = urllib2.Request(url)
    # proxy_ip_port = 'spdu:JIqnEW218W9@47.94.227.14:43218'
    proxy_ip_port = 'spdu:JIqnEW218W9@47.95.230.230:43218'
    proxy_handler = urllib2.ProxyHandler({"http": "%s" % proxy_ip_port, "https": "%s" % proxy_ip_port})
    opener = urllib2.build_opener(proxy_handler)
    urllib2.install_opener(opener)
    global Max_Num
    Max_Num = 6
    for i in range(Max_Num):
        try:
            page = urllib2.urlopen(req, timeout=60).read()
            print(page)
            break
        except Exception as e:
            if i < Max_Num - 1:
                continue
            else:
                print('URLError: <urlopen error timed out> All times is failed ',e)


if __name__ == '__main__':
    # url = 'https://ty.5i5j.com/xiaoqu/'
    # url = 'https://taiyuan.fang.com/'
    # url = 'https://zu.fang.com/house-a0585/'
    # url = 'https://www.teradata.com.cn/'
    # url = 'http://www.baidu.com'
    url = 'http://www.sohu.com'
    # url = 'https://www.baidu.com/s?rtt=1&bsst=1&cl=2&tn=news&word=fangtianxia'
    # req_by_proxy(url)
    req_by_block(url)



# if __name__ == "__main__":
#     # 访问网址
#     url = 'http://www.wuxiaodong.cn'
#     # 这是代理IP
#     proxy = {'http': '47.94.227.14:8000'}
#     # 创建ProxyHandler
#     proxy_support = request.ProxyHandler(proxy)
#     # 创建Opener
#     opener = request.build_opener(proxy_support)
#     # 添加User Angent
#     opener.addheaders = [('User-Agent',
#                           'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.87 Safari/537.36')]
#     # 安装OPener
#     request.install_opener(opener)
#     # 使用自己安装好的Opener
#     response = request.urlopen(url, timeout=5)
#     # 读取相应信息并解码
#     html = response.read().decode("utf-8")
#     # 打印信息
#     print(html)
