#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# Project: spd-sxmcc
"""
@author: lyndon
@time Created on 2018/12/12 23:41
@desc
"""

import urllib2

headers = {"Content-type": "text/html; charset=UTF-8",
               'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8,pl;q=0.7',
               'User-Agent': "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.110 Safari/537.36",
               "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8",
               "Referer": "https://ty.5i5j.com/",
               "Cache-Control": "max-age=0"}

header_lst = [("Content-type", "text/html; charset=UTF-8"),
              ('Accept-Language', 'zh-CN,zh;q=0.9,en;q=0.8,pl;q=0.7'),
              ('User-Agent', "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.110 Safari/537.36"),
              ("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8"),
              ("Referer", "https://ty.5i5j.com/"),
              ("Cache-Control", "max-age=0")]

header_lst2 = [('User-Agent', "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.110 Safari/537.36")]

url = 'https://ty.5i5j.com/xiaoqu/'


def req_by_proxy():
    url = 'https://ty.5i5j.com/xiaoqu/'
    proxy_ip_port = '182.88.215.20:8123'
    proxy_handler = urllib2.ProxyHandler({"http": "%s" % proxy_ip_port, "https": "%s" % proxy_ip_port})
    opener = urllib2.build_opener(proxy_handler)
    opener.addheaders = header_lst2
    print('xxdd', url, proxy_ip_port)
    html = opener.open(url, timeout=5).read()
    print(html)


def req_by_block():
    req = urllib2.Request(url)
    proxy_ip_port = '114.225.171.19:53128'
    proxy_handler = urllib2.ProxyHandler({"http": "%s" % proxy_ip_port, "https": "%s" % proxy_ip_port})
    opener = urllib2.build_opener(proxy_handler)
    urllib2.install_opener(opener)
    global Max_Num
    Max_Num = 6
    for i in range(Max_Num):
        try:
            page = urllib2.urlopen(req, timeout=5).read()
            print(page)
            break
        except Exception as e:
            if i < Max_Num - 1:
                continue
            else:
                print('URLError: <urlopen error timed out> All times is failed ',e)



def req_by_header():
    url = "https://ty.5i5j.com/xiaoqu/"
    headers = {"Content-type": "text/html; charset=UTF-8",
               'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8,pl;q=0.7',
               'User-Agent': "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.110 Safari/537.36",
               "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8",
               "Referer": "https://ty.5i5j.com/",
               "Cache-Control": "max-age=0"}

    #  url 连同 headers，一起构造Request请求
    request = urllib2.Request(url, headers=headers)
    # 向服务器发送这个请求
    response = urllib2.urlopen(request)
    html = response.read()
    # response.add_header("Connection", "keep-alive")
    print(html)


def req_by_detail():
    url = 'http://www.baidu.com'
    # http协议有六种请求方法，get,head,put,delete,post,options，我们有时候需要用到PUT方式或者DELETE方式请求
    # PUT：这个方法比较少见。HTML表单也不支持这个。
    # 本质上来讲， PUT和POST极为相似，都是向服务器发送数据，但它们之间有一个重要区别，PUT通常指定了资源的存放位置，而POST则没有，POST的数据存放位置由服务器自己决定。
    # DELETE：删除某一个资源。
    request = urllib2.Request(url)
    request.get_method = lambda: 'PUT'  # or 'DELETE'
    response = urllib2.urlopen(request)
    # 可以通过下面的方法把 Debug Log 打开，这样收发包的内容就会在屏幕上打印出来，方便调试
    httpHandler = urllib2.HTTPHandler(debuglevel=1)
    httpsHandler = urllib2.HTTPSHandler(debuglevel=1)
    opener = urllib2.build_opener(httpHandler, httpsHandler)
    urllib2.install_opener(opener)
    response = urllib2.urlopen(url)

    print('-------------------------------------------')
    print(response)
    #
    # proxy_ip_port = '221.210.120.153:54402'
    # proxy_handler = urllib2.ProxyHandler({"http": "%s" % proxy_ip_port, "https": "%s" % proxy_ip_port})
    # opener = urllib2.build_opener(proxy_handler)
    # print('xxdd', url, proxy_ip_port)
    # html = opener.open(url, timeout=20).read()
    #
    # print(html)


if __name__ == '__main__':
    # req_by_header()
    # req_by_proxy()
    # req_by_detail()
    req_by_block()

