# import requests
# from lxml import etree
#
#
# res = requests.get('https://proxyhub.me/zh/cn-http-proxy-list.html')
#
# tree = etree.HTML(res.text)
#
# items = tree.xpath('//table[@class="table table-bordered"]/tbody//tr')
#
# for item in items:
#     ip = item.xpath('./td[1]/text()')[0]
#     port = item.xpath('./td[2]/text()')[0]
#     print(ip, port)
#
#
#     try:
#         res2 = requests.get('https://httpbin.org/get', proxies={
#             'http': f'http://{ip}:{port}',
#             'https': f'http://{ip}:{port}'
#         })
#     except Exception as e:
#         print(f"代理失败", e)
#     else:
#         print(res2.json()['origin'])
#
#     # break



# import requests
# ip = '114.232.206.98'
# port = 23114
# try:
#     res2 = requests.get('https://httpbin.org/get', proxies={
#         'http': f'http://{ip}:{port}',
#         'https': f'http://{ip}:{port}'
#     })
# except Exception as e:
#     print(f"代理失败", e)
# else:
#     print(res2.json()['origin'])



#!/usr/bin/env Python
# -*- coding: utf-8 -*-
#
# """
# 使用requests请求代理服务器
# 请求http和https网页均适用
# """
#
# import requests
#
# # 提取代理API接口，获取1个代理IP
# api_url = "https://dps.kdlapi.com/api/getdps/?secret_id=or7gy2qb2vom9diupu8r&signature=txoj22nu6yp3ihg6dhcwtvgqtzj6vnud&num=1&sep=1"
#
# # 获取API接口返回的代理IP
# proxy_ip = requests.get(api_url).text
#
# # 用户名密码认证(私密代理/独享代理)
# username = "d2773115006"
# password = "rhpcau89"
# proxies = {
#     "http": "http://%(user)s:%(pwd)s@%(proxy)s/" % {"user": username, "pwd": password, "proxy": proxy_ip},
#     "https": "http://%(user)s:%(pwd)s@%(proxy)s/" % {"user": username, "pwd": password, "proxy": proxy_ip}
# }
#
# # 白名单方式（需提前设置白名单）
# # proxies = {
# #     "http": "http://%(proxy)s/" % {"proxy": proxy_ip},
# #     "https": "http://%(proxy)s/" % {"proxy": proxy_ip}
# # }
#
# # 要访问的目标网页
# target_url = "https://dev.kdlapi.com/testproxy"
#
# # 使用代理IP发送请求
# response = requests.get(target_url, proxies=proxies)
#
# # 获取页面内容
# if response.status_code == 200:
#     print(response.text)


#!/usr/bin/env Python
# -*- coding: utf-8 -*-

"""
使用aiohttp请求代理服务器
请求http和https网页均适用

 """
import random
import asyncio
import aiohttp
import requests

# API接口，返回格式为json
api_url = "https://dps.kdlapi.com/api/getdps/?secret_id=or7gy2qb2vom9diupu8r&signature=txoj22nu6yp3ihg6dhcwtvgqtzj6vnud&num=1&sep=1"

# 先查看API实际返回的内容
print("=== 检查API返回 ===")
response = requests.get(api_url)
print("状态码:", response.status_code)
print("原始响应内容:", repr(response.text))  # 使用repr显示原始格式
print("内容长度:", len(response.text))

# 尝试按文本格式处理代理
proxy_list = []
try:
    # 如果是纯文本格式的代理
    if response.text.strip():
        # 按行分割，去除空行
        lines = response.text.strip().split('\n')
        proxy_list = [line.strip() for line in lines if line.strip()]
        print("按文本格式解析的代理列表:", proxy_list)
except Exception as e:
    print(f"解析失败: {e}")

# 如果还是空的，尝试其他解析方式
if not proxy_list:
    try:
        # 尝试直接作为单个代理处理
        if response.text.strip():
            proxy_list = [response.text.strip()]
            print("作为单个代理处理:", proxy_list)
    except Exception as e:
        print(f"备用解析也失败: {e}")

# 用户名密码认证(私密代理/独享代理)
username = "d2773115006"
password = "rhpcau89"
proxy_auth = aiohttp.BasicAuth(username, password)

print(f"最终代理列表: {proxy_list}")
print(f"代理数量: {len(proxy_list)}")


async def fetch(url):
    if proxy_list and proxy_list[0]:
        proxy_url = "http://" + proxy_list[0]  # 先用第一个代理测试
        print(f"使用代理: {proxy_url}")

        async with aiohttp.ClientSession() as session:
            try:
                async with session.get(url, proxy=proxy_url, proxy_auth=proxy_auth, timeout=10) as resp:
                    content = await resp.json()
                    print(f"status_code: {resp.status}, content: {content}")
            except Exception as e:
                print(f"请求失败: {e}")
    else:
        print("无可用代理，跳过请求")


async def run():
    page_url = "https://httpbin.org/get"
    # 先测试1次请求
    await fetch(page_url)


if __name__ == '__main__':
    asyncio.run(run())