# import requests
# from lxml import etree
#
#
# res = requests.get('https://proxyhub.me/zh/cn-http-proxy-list.html')
#
# tree = etree.HTML(res.text)
#
# items = tree.xpath('//table[@class="table table-bordered"]/tbody//tr')
#
# for item in items:
#     ip = item.xpath('./td[1]/text()')[0]
#     port = item.xpath('./td[2]/text()')[0]
#     print(ip, port)
#
#
#     try:
#         res2 = requests.get('https://httpbin.org/get', proxies={
#             'http': f'http://{ip}:{port}',
#             'https': f'http://{ip}:{port}'
#         })
#     except Exception as e:
#         print(f"代理失败", e)
#     else:
#         print(res2.json()['origin'])
#
#     # break



# import requests
# ip = '114.232.206.98'
# port = 23114
# try:
#     res2 = requests.get('https://httpbin.org/get', proxies={
#         'http': f'http://{ip}:{port}',
#         'https': f'http://{ip}:{port}'
#     })
# except Exception as e:
#     print(f"代理失败", e)
# else:
#     print(res2.json()['origin'])



"""
使用requests请求代理服务器
请求http和https网页均适用
"""

# import requests
#
# # 提取代理API接口，获取1个代理IP
# api_url = "https://dps.kdlapi.com/api/getdps/?secret_id=oj4i2tid4cpojoimew8u&signature=ns3a6j4oi14bv1e21uncyiqehhwf3o9z&num=1&sep=1"
#
# # 获取API接口返回的代理IP
# proxy_ip = requests.get(api_url).text
# print(f"获取到代理IP {proxy_ip}")
#
#
# # 用户名密码认证(私密代理/独享代理)
# username = "d4193409972"
# password = "njyzbgtb"
#
#
# proxies = {
#     "http": "http://%(user)s:%(pwd)s@%(proxy)s/" % {"user": username, "pwd": password, "proxy": proxy_ip},
#     "https": "http://%(user)s:%(pwd)s@%(proxy)s/" % {"user": username, "pwd": password, "proxy": proxy_ip}
# }
# print(f"使用格式  {proxies}")
# # 要访问的目标网页
# target_url = "https://httpbin.org/get"
#
# # 使用代理IP发送请求
# response = requests.get(target_url, proxies=proxies)
#
# # 获取页面内容
# if response.status_code == 200:
#     print(response.json())



#!/usr/bin/env Python
# -*- coding: utf-8 -*-

"""
使用aiohttp请求代理服务器
请求http和https网页均适用

"""
import random
import asyncio

import aiohttp
import requests



# API接口，返回格式为json
api_url = "https://dps.kdlapi.com/api/getdps/?secret_id=oj4i2tid4cpojoimew8u&signature=ns3a6j4oi14bv1e21uncyiqehhwf3o9z&num=1&format=json&sep=1"  # API接口

# API接口返回的proxy_list
proxy_list = requests.get(api_url).json().get('data').get('proxy_list')

# 用户名密码认证(私密代理/独享代理)
username = "d4193409972"
password = "njyzbgtb"

proxy_auth = aiohttp.BasicAuth(username, password)

proxies = "http://" + random.choice(proxy_list)
print(f"用户信息 {proxy_auth}  代理信息 {proxies}")
async def fetch(url):
    async with aiohttp.ClientSession() as session:
        async with session.get(url, proxy="http://" + random.choice(proxy_list), proxy_auth=proxy_auth) as resp:
            content =  await resp.json()
            print(f"status_code: {resp.status}, content: {content}")


async def run():
    page_url = "https://httpbin.org/get"  # 要访问的目标网页
    # 异步发出5次请求
    tasks = [fetch(page_url) for _ in range(5)]
    await asyncio.gather(*tasks)


if __name__ == '__main__':
    asyncio.run(run())