import requests
from bs4 import BeautifulSoup
import time
import random

class ProxyManager:
    """代理IP管理器"""
    
    def __init__(self):
        self.proxies = []
        self.last_update = 0
        self.update_interval = 1800  # 30分钟更新一次

    def get_free_proxies(self):
        """获取免费代理IP"""
        sources = [
            self.get_xici_proxies,
            self.get_kuaidaili_proxies,
        ]
        
        all_proxies = []
        for source in sources:
            try:
                proxies = source()
                all_proxies.extend(proxies)
                time.sleep(2)  # 避免请求过快
            except Exception as e:
                print(f"获取代理失败: {e}")
        
        # 测试代理可用性
        valid_proxies = []
        for proxy in all_proxies:
            if self.test_proxy(proxy):
                valid_proxies.append(proxy)
                print(f"可用代理: {proxy}")
        
        return valid_proxies

    def get_xici_proxies(self):
        """从西刺代理获取IP"""
        proxies = []
        url = "https://www.xicidaili.com/nn/"
        headers = {
            'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36'
        }
        
        response = requests.get(url, headers=headers, timeout=10)
        soup = BeautifulSoup(response.text, 'html.parser')
        
        for row in soup.select('#ip_list tr')[1:]:
            cells = row.find_all('td')
            if len(cells) >= 6:
                ip = cells[1].text.strip()
                port = cells[2].text.strip()
                protocol = cells[5].text.strip().lower()
                
                if protocol in ['http', 'https']:
                    proxy = f"{protocol}://{ip}:{port}"
                    proxies.append(proxy)
        
        return proxies

    def get_kuaidaili_proxies(self):
        """从快代理获取IP"""
        proxies = []
        url = "https://www.kuaidaili.com/free/inha/1/"
        headers = {
            'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36'
        }
        
        response = requests.get(url, headers=headers, timeout=10)
        soup = BeautifulSoup(response.text, 'html.parser')
        
        for row in soup.select('#list table tr')[1:]:
            cells = row.find_all('td')
            if len(cells) >= 4:
                ip = cells[0].text.strip()
                port = cells[1].text.strip()
                protocol = cells[3].text.strip().lower()
                
                proxy = f"{protocol}://{ip}:{port}"
                proxies.append(proxy)
        
        return proxies

    def test_proxy(self, proxy):
        """测试代理是否可用"""
        try:
            response = requests.get(
                'http://httpbin.org/ip',
                proxies={'http': proxy, 'https': proxy},
                timeout=5
            )
            return response.status_code == 200
        except:
            return False

    def get_proxies(self):
        """获取代理列表，自动更新"""
        current_time = time.time()
        if not self.proxies or (current_time - self.last_update) > self.update_interval:
            print("更新代理IP列表...")
            self.proxies = self.get_free_proxies()
            self.last_update = current_time
            print(f"获取到 {len(self.proxies)} 个可用代理")
        
        return self.proxies

# 全局代理管理器实例
proxy_manager = ProxyManager()