'''
当某一个ip地址过于频繁访问某一页面会被服务器拉进黑名单或者我们不想暴露自己真实的ip位置的时候可以代理
'''
import urllib.request
from urllib import request
from urllib.request import Request
import random

from urllib3 import HTTPResponse

url = "https://www.baidu.com/s?wd=ip"

headers = {
    'Cookie': 'BIDUPSID=8AC47C91EB24CEF54D60954251023E56; PSTM=1662460286; BAIDUID=1C81DFCBB13256AD62B7F7C55C088BF0:FG=1; BD_UPN=12314753; H_WISE_SIDS_BFESS=60236_60298_60326; BDUSS=l1VjEyUm1CMjkyODFHVjktak9SR35rVVQ3RUhTVmxVMUR2clh4ODhsOHo2WTltRVFBQUFBJCQAAAAAAQAAAAEAAABFqE5lVm9rdGEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADNcaGYzXGhmNG; BDUSS_BFESS=l1VjEyUm1CMjkyODFHVjktak9SR35rVVQ3RUhTVmxVMUR2clh4ODhsOHo2WTltRVFBQUFBJCQAAAAAAQAAAAEAAABFqE5lVm9rdGEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADNcaGYzXGhmNG; BAIDUID_BFESS=1C81DFCBB13256AD62B7F7C55C088BF0:FG=1; ZFY=QA1Cl0N7KouaHe:A8lZEMeUihNzxy5WzBRcUKjovdk5Q:C; B64_BOT=1; H_PS_PSSID=60236_60326_60334_60297_60345; H_WISE_SIDS=60236_60326_60334_60297_60345; BA_HECTOR=01a0848l8ha58l0la121010h2ompl11j6o2v51v; RT="z=1&dm=baidu.com&si=2f056dce-669f-4af1-b8cc-16e2e34928c9&ss=lxejnd6e&sl=0&tt=0&bcn=https%3A%2F%2Ffclog.baidu.com%2Flog%2Fweirwood%3Ftype%3Dperf&ld=8u3&ul=1oh3o&hd=1oh4p"; BDRCVFR[TVogQ1G2T-s]=-kY6jVJCv-DpykGIhR8mv3; delPer=0; BD_CK_SAM=1; PSINO=2; BDRCVFR[oxw6P5LCmIs]=mk3SLVN4HKm; BDRCVFR[feWj1Vr5u3D]=I67x6TjHwwYf0; BDORZ=B490B5EBF6F3CD402E515D22BCDA1598; sugstore=0; H_PS_645EC=eb4a6%2F6XcmuG5%2Fhqb%2FQk5tVqD63OEcJZLAsQ%2BH5%2BmAoeCl%2Btb9HdbbS8ofA; baikeVisitId=99ab371e-d9c9-4846-9535-af8111fcff60; COOKIE_SESSION=103_0_5_7_5_10_1_1_3_5_0_0_87205_0_2565_0_1718357050_0_1718354485%7C9%231304336_28_1717939087%7C9',
    "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/109.0.0.0 Safari/537.36 SLBrowser/9.0.3.5211 SLBChan/105"
}

# 请求对象定制
my_request: Request = request.Request(url=url, headers=headers)

# 代理---->快代理
proxies = {
    'http': '117.42.94.9:18520'  # 32位点分十进制0000.0000.0000.0000 + 四位端口号 0000  ----> 0000.0000.0000.0000: 0000

}

# 1. 获取Handler对象
handler = urllib.request.ProxyHandler(proxies=proxies)  # 需要传递一个字典类型的代理变量
# 2. 获取opener对象
opener = urllib.request.build_opener(handler)
# 3. 调用open()方法
response = opener.open(my_request)

# print(response.read().decode("utf-8")) # 只能调用一次response.read()方法，因为指针会向后移动
with open("ip.html", "w", encoding="utf-8") as f:
    f.write(response.read().decode("utf-8"))

# 代理池
proxies_pool = [
    {
        'http': '118.24.219.151:16817'
    },
    {
        'http': '118.24.219.151:16885'
    },
    {
        'http': '119.24.219.151:16817'
    }
]
random_proxies = random.choice(proxies_pool)  # 随机生成的ip代理
