import requests
from bs4 import BeautifulSoup
import time

def get_free_proxies():
    """获取免费代理IP"""
    proxies = []
    
    try:
        # 西刺代理
        url = "https://www.xicidaili.com/nn/"
        headers = {
            'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36'
        }
        
        response = requests.get(url, headers=headers, timeout=10)
        soup = BeautifulSoup(response.text, 'html.parser')
        
        for row in soup.select('#ip_list tr')[1:]:
            cells = row.find_all('td')
            if len(cells) >= 6:
                ip = cells[1].text.strip()
                port = cells[2].text.strip()
                protocol = cells[5].text.strip().lower()
                
                if protocol in ['http', 'https']:
                    proxy = f"{protocol}://{ip}:{port}"
                    if test_proxy(proxy):
                        proxies.append(proxy)
                        print(f"可用代理: {proxy}")
        
    except Exception as e:
        print(f"获取代理失败: {e}")
    
    return proxies

def test_proxy(proxy):
    """测试代理是否可用"""
    try:
        response = requests.get(
            'http://httpbin.org/ip',
            proxies={'http': proxy, 'https': proxy},
            timeout=5
        )
        return response.status_code == 200
    except:
        return False

if __name__ == "__main__":
    proxies = get_free_proxies()
    print(f"获取到 {len(proxies)} 个可用代理")