import requests
import random
import time
import logging
from typing import List, Dict, Optional, Any
from concurrent.futures import ThreadPoolExecutor, as_completed
import threading

logger = logging.getLogger(__name__)

class ProxyManager:
    def __init__(self, config=None):
        self.config = config
        self.proxies = []
        self.working_proxies = []
        self.failed_proxies = []
        self.current_index = 0
        self.lock = threading.Lock()
        self.last_check_time = 0
        self.check_interval = 300  # 5分钟检查一次
        
    def load_proxies_from_list(self, proxy_list: List[str]) -> None:
        """从代理列表加载代理"""
        self.proxies = []
        for proxy in proxy_list:
            if '://' not in proxy:
                proxy = f'http://{proxy}'
            self.proxies.append({
                'http': proxy,
                'https': proxy
            })
        logger.info(f"Loaded {len(self.proxies)} proxies from list")
    
    def load_proxies_from_file(self, filepath: str) -> None:
        """从文件加载代理列表"""
        try:
            with open(filepath, 'r', encoding='utf-8') as f:
                proxy_list = [line.strip() for line in f if line.strip() and not line.startswith('#')]
            self.load_proxies_from_list(proxy_list)
        except FileNotFoundError:
            logger.error(f"Proxy file not found: {filepath}")
        except Exception as e:
            logger.error(f"Error loading proxy file: {str(e)}")
    
    def load_proxies_from_api(self, api_url: str, parser_func=None) -> None:
        """从API加载代理列表"""
        try:
            response = requests.get(api_url, timeout=30)
            response.raise_for_status()
            
            if parser_func:
                proxy_list = parser_func(response.text)
            else:
                # 默认解析器，假设每行一个代理
                proxy_list = [line.strip() for line in response.text.split('\n') if line.strip()]
            
            self.load_proxies_from_list(proxy_list)
            
        except Exception as e:
            logger.error(f"Error loading proxies from API: {str(e)}")
    
    def test_proxy(self, proxy: Dict[str, str], test_url: str = "http://httpbin.org/ip", timeout: int = 10) -> bool:
        """测试单个代理是否可用"""
        try:
            response = requests.get(
                test_url,
                proxies=proxy,
                timeout=timeout,
                headers={'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36'}
            )
            
            if response.status_code == 200:
                logger.debug(f"Proxy working: {proxy['http']}")
                return True
            else:
                logger.debug(f"Proxy failed with status {response.status_code}: {proxy['http']}")
                return False
                
        except Exception as e:
            logger.debug(f"Proxy test failed: {proxy['http']} - {str(e)}")
            return False
    
    def test_all_proxies(self, max_workers: int = 50, timeout: int = 10) -> None:
        """并发测试所有代理"""
        if not self.proxies:
            logger.warning("No proxies to test")
            return
        
        logger.info(f"Testing {len(self.proxies)} proxies with {max_workers} workers...")
        
        with ThreadPoolExecutor(max_workers=max_workers) as executor:
            future_to_proxy = {
                executor.submit(self.test_proxy, proxy, timeout=timeout): proxy
                for proxy in self.proxies
            }
            
            working_count = 0
            for future in as_completed(future_to_proxy):
                proxy = future_to_proxy[future]
                try:
                    if future.result():
                        self.working_proxies.append(proxy)
                        working_count += 1
                    else:
                        self.failed_proxies.append(proxy)
                except Exception as e:
                    logger.error(f"Error testing proxy {proxy['http']}: {str(e)}")
                    self.failed_proxies.append(proxy)
        
        logger.info(f"Proxy test completed: {len(self.working_proxies)} working, {len(self.failed_proxies)} failed")
        self.last_check_time = time.time()
    
    def get_random_proxy(self) -> Optional[Dict[str, str]]:
        """获取随机可用代理"""
        if not self.working_proxies:
            return None
        
        # 如果距离上次检查时间过长，重新测试代理
        if time.time() - self.last_check_time > self.check_interval:
            logger.info("Proxies need refresh, testing...")
            self.test_all_proxies()
        
        with self.lock:
            if self.working_proxies:
                return random.choice(self.working_proxies)
        return None
    
    def get_next_proxy(self) -> Optional[Dict[str, str]]:
        """按顺序获取下一个可用代理"""
        if not self.working_proxies:
            return None
        
        with self.lock:
            if self.working_proxies:
                proxy = self.working_proxies[self.current_index]
                self.current_index = (self.current_index + 1) % len(self.working_proxies)
                return proxy
        return None
    
    def mark_proxy_failed(self, proxy: Dict[str, str]) -> None:
        """标记代理为失败"""
        with self.lock:
            if proxy in self.working_proxies:
                self.working_proxies.remove(proxy)
                self.failed_proxies.append(proxy)
                logger.warning(f"Marked proxy as failed: {proxy['http']}")
    
    def get_proxy_stats(self) -> Dict[str, int]:
        """获取代理统计信息"""
        return {
            'total': len(self.proxies),
            'working': len(self.working_proxies),
            'failed': len(self.failed_proxies)
        }
    
    def save_working_proxies(self, filepath: str) -> None:
        """保存可用代理到文件"""
        try:
            with open(filepath, 'w', encoding='utf-8') as f:
                for proxy in self.working_proxies:
                    proxy_url = proxy['http'].replace('http://', '')
                    f.write(f"{proxy_url}\n")
            logger.info(f"Saved {len(self.working_proxies)} working proxies to {filepath}")
        except Exception as e:
            logger.error(f"Error saving proxies: {str(e)}")

class ProxyRotator:
    """代理轮换器，提供更高级的代理管理功能"""
    
    def __init__(self, proxy_manager: ProxyManager, max_retries: int = 3):
        self.proxy_manager = proxy_manager
        self.max_retries = max_retries
        self.proxy_usage_count = {}
        self.proxy_success_rate = {}
    
    def get_best_proxy(self) -> Optional[Dict[str, str]]:
        """获取成功率最高的代理"""
        if not self.proxy_manager.working_proxies:
            return None
        
        best_proxy = None
        best_rate = -1
        
        for proxy in self.proxy_manager.working_proxies:
            proxy_key = proxy['http']
            success_rate = self.proxy_success_rate.get(proxy_key, 1.0)  # 默认成功率100%
            
            if success_rate > best_rate:
                best_rate = success_rate
                best_proxy = proxy
        
        return best_proxy
    
    def record_proxy_result(self, proxy: Dict[str, str], success: bool) -> None:
        """记录代理使用结果"""
        proxy_key = proxy['http']
        
        if proxy_key not in self.proxy_usage_count:
            self.proxy_usage_count[proxy_key] = {'success': 0, 'total': 0}
        
        self.proxy_usage_count[proxy_key]['total'] += 1
        if success:
            self.proxy_usage_count[proxy_key]['success'] += 1
        
        # 计算成功率
        stats = self.proxy_usage_count[proxy_key]
        self.proxy_success_rate[proxy_key] = stats['success'] / stats['total']
        
        # 如果成功率太低，标记为失败
        if stats['total'] >= 10 and self.proxy_success_rate[proxy_key] < 0.3:
            self.proxy_manager.mark_proxy_failed(proxy)
    
    def make_request_with_rotation(self, url: str, **kwargs) -> Optional[requests.Response]:
        """使用代理轮换进行请求"""
        last_exception = None
        
        for attempt in range(self.max_retries):
            proxy = self.get_best_proxy()
            if not proxy:
                logger.warning("No working proxies available")
                break
            
            try:
                kwargs['proxies'] = proxy
                kwargs.setdefault('timeout', 30)
                
                response = requests.get(url, **kwargs)
                response.raise_for_status()
                
                # 记录成功
                self.record_proxy_result(proxy, True)
                return response
                
            except Exception as e:
                last_exception = e
                logger.warning(f"Request failed with proxy {proxy['http']}: {str(e)}")
                
                # 记录失败
                self.record_proxy_result(proxy, False)
                
                # 短暂延迟后重试
                time.sleep(random.uniform(1, 3))
        
        logger.error(f"All proxy attempts failed for {url}. Last error: {last_exception}")
        return None