import requests
import time
import random
from fake_useragent import UserAgent
from bs4 import BeautifulSoup  # 引入 BeautifulSoup

# User-Agent
ua = UserAgent()

def get_free_proxies():
    """从免费代理网站获取代理 IP"""
    url = "https://free-proxy-list.net/"  # 示例代理网站
    headers = {"User-Agent": ua.random}
    try:
        response = requests.get(url, headers=headers)
        response.raise_for_status()
        soup = BeautifulSoup(response.text, "html.parser")
        table = soup.find("table", {"id": "proxylisttable"})
        proxies = []
        for row in table.find_all("tr")[1:]:
            cells = row.find_all("td")
            ip = cells[0].text.strip()
            port = cells[1].text.strip()
            proxy = f"{ip}:{port}"
            proxies.append(proxy)
        return proxies
    except requests.exceptions.RequestException as e:
        print(f"获取代理 IP 失败: {e}")
        return []
    except Exception as e:
        print(f"解析代理 IP 失败: {e}")
        return []

def verify_proxy(proxy):
    """验证代理 IP 是否可用"""
    url = "https://www.baidu.com/"  # 示例验证网站
    headers = {"User-Agent": ua.random}
    try:
        response = requests.get(url, headers=headers, proxies={"http": proxy, "https": proxy}, timeout=5)
        response.raise_for_status()
        return True
    except requests.exceptions.RequestException:
        return False

def get_working_proxies():
    """获取可用的代理 IP"""
    proxies = get_free_proxies()
    working_proxies = []
    for proxy in proxies:
        if verify_proxy(proxy):
            working_proxies.append(proxy)
            print(f"代理 {proxy} 可用")
        else:
            print(f"代理 {proxy} 不可用")
    return working_proxies

# 获取可用的代理 IP
working_proxies = get_working_proxies()

def use_proxy(url):
    """使用代理 IP 发送请求"""
    if not working_proxies:
        print("没有可用的代理 IP")
        return None

    proxy = random.choice(working_proxies)
    headers = {"User-Agent": ua.random}
    try:
        response = requests.get(url, headers=headers, proxies={"http": proxy, "https": proxy}, timeout=10)
        response.raise_for_status()
        return response.text
    except requests.exceptions.RequestException as e:
        print(f"使用代理 {proxy} 请求失败: {e}")
        working_proxies.remove(proxy)  # 移除不可用的代理 IP
        return None
    except Exception as e:
        print(f"发生其他错误: {e}")
        return None

# 示例使用
if __name__ == "__main__":
    url = "https://www.example.com/"
    content = use_proxy(url)
    if content:
        print(content[:100])  # 打印前 100 个字符