"""代理池管理器

管理和维护HTTP/HTTPS代理池，支持自动验证和失效检测
"""
import asyncio
import json
import logging
import random
import time
from datetime import datetime, timedelta
from pathlib import Path
from typing import Dict, List, Optional, Tuple
from enum import Enum

import aiohttp
import redis.asyncio as aioredis
import aiofiles

logger = logging.getLogger(__name__)


class ProxyType(Enum):
    """代理类型"""
    HTTP = "http"
    HTTPS = "https"
    SOCKS5 = "socks5"


class ProxyPool:
    """代理池管理器"""
    
    def __init__(self, redis_url: str = None, storage_path: str = "./proxies"):
        """初始化代理池
        
        Args:
            redis_url: Redis连接URL
            storage_path: 本地存储路径
        """
        self.redis_url = redis_url
        self.storage_path = Path(storage_path)
        self.storage_path.mkdir(exist_ok=True)
        
        self.proxies: List[Dict] = []
        self.current_index = 0
        self.redis_client = None
        
        # 代理健康状态
        self.proxy_stats: Dict[str, Dict] = {}
        
        # 配置
        self.max_failures = 5  # 最大失败次数
        self.check_interval = 300  # 检查间隔（秒）
        self.timeout = 10  # 验证超时时间
        
        # 验证URL
        self.test_urls = [
            "http://httpbin.org/ip",
            "https://api.ipify.org?format=json",
            "http://ip-api.com/json/"
        ]
    
    async def initialize(self):
        """初始化连接"""
        if self.redis_url:
            try:
                self.redis_client = aioredis.from_url(
                    self.redis_url,
                    encoding="utf-8",
                    decode_responses=True
                )
                logger.info("代理池已连接到Redis")
            except Exception as e:
                logger.warning(f"Redis连接失败，使用本地存储: {e}")
        
        # 加载代理
        await self.load_proxies()
    
    async def load_proxies(self):
        """加载代理池"""
        # 优先从Redis加载
        if self.redis_client:
            try:
                key = "proxy_pool:all"
                data = await self.redis_client.get(key)
                if data:
                    self.proxies = json.loads(data)
                    logger.info(f"从Redis加载了 {len(self.proxies)} 个代理")
                    return
            except Exception as e:
                logger.error(f"从Redis加载失败: {e}")
        
        # 从本地文件加载
        proxy_file = self.storage_path / "proxies.json"
        if proxy_file.exists():
            try:
                async with aiofiles.open(proxy_file, 'r') as f:
                    content = await f.read()
                    self.proxies = json.loads(content)
                    logger.info(f"从文件加载了 {len(self.proxies)} 个代理")
            except Exception as e:
                logger.error(f"从文件加载失败: {e}")
    
    async def save_proxies(self):
        """保存代理池"""
        # 保存到Redis
        if self.redis_client:
            try:
                key = "proxy_pool:all"
                await self.redis_client.set(
                    key,
                    json.dumps(self.proxies),
                    ex=3600  # 1小时过期
                )
            except Exception as e:
                logger.error(f"保存到Redis失败: {e}")
        
        # 保存到本地文件
        proxy_file = self.storage_path / "proxies.json"
        try:
            async with aiofiles.open(proxy_file, 'w') as f:
                await f.write(json.dumps(self.proxies, indent=2))
        except Exception as e:
            logger.error(f"保存到文件失败: {e}")
    
    def add_proxy(self, proxy: str, proxy_type: ProxyType = ProxyType.HTTP,
                  username: str = None, password: str = None,
                  metadata: Dict = None):
        """添加代理
        
        Args:
            proxy: 代理地址 (IP:PORT)
            proxy_type: 代理类型
            username: 用户名（如果需要认证）
            password: 密码（如果需要认证）
            metadata: 代理元数据
        """
        # 构造代理URL
        if username and password:
            proxy_url = f"{proxy_type.value}://{username}:{password}@{proxy}"
        else:
            proxy_url = f"{proxy_type.value}://{proxy}"
        
        proxy_data = {
            'proxy': proxy,
            'proxy_url': proxy_url,
            'type': proxy_type.value,
            'add_time': datetime.now().isoformat(),
            'last_use': None,
            'last_check': None,
            'use_count': 0,
            'fail_count': 0,
            'success_count': 0,
            'status': 'unchecked',
            'response_time': None,
            'metadata': metadata or {}
        }
        
        # 检查是否已存在
        for p in self.proxies:
            if p['proxy'] == proxy:
                logger.info(f"代理已存在，更新状态")
                p.update(proxy_data)
                return
        
        self.proxies.append(proxy_data)
        logger.info(f"添加新代理: {proxy}")
    
    async def validate_proxy(self, proxy_data: Dict) -> Tuple[bool, float]:
        """验证代理有效性
        
        Args:
            proxy_data: 代理数据
            
        Returns:
            (是否有效, 响应时间)
        """
        proxy_url = proxy_data['proxy_url']
        
        for test_url in self.test_urls:
            try:
                start_time = time.time()
                timeout = aiohttp.ClientTimeout(total=self.timeout)
                
                async with aiohttp.ClientSession(timeout=timeout) as session:
                    async with session.get(
                        test_url,
                        proxy=proxy_url,
                        ssl=False
                    ) as response:
                        if response.status == 200:
                            response_time = time.time() - start_time
                            
                            # 验证IP是否正确
                            data = await response.json()
                            if 'ip' in data or 'origin' in data:
                                return True, response_time
                
            except asyncio.TimeoutError:
                logger.debug(f"代理 {proxy_data['proxy']} 超时")
                continue
            except Exception as e:
                logger.debug(f"代理 {proxy_data['proxy']} 验证失败: {e}")
                continue
        
        return False, None
    
    async def check_all_proxies(self):
        """检查所有代理的有效性"""
        logger.info(f"开始检查 {len(self.proxies)} 个代理")
        
        tasks = []
        for proxy_data in self.proxies:
            # 只检查未检查或活跃的代理
            if proxy_data['status'] in ['unchecked', 'active']:
                tasks.append(self._check_proxy(proxy_data))
        
        if tasks:
            await asyncio.gather(*tasks)
        
        # 更新状态统计
        active = len([p for p in self.proxies if p['status'] == 'active'])
        invalid = len([p for p in self.proxies if p['status'] == 'invalid'])
        
        logger.info(f"代理检查完成: 活跃={active}, 失效={invalid}")
        await self.save_proxies()
    
    async def _check_proxy(self, proxy_data: Dict):
        """检查单个代理"""
        is_valid, response_time = await self.validate_proxy(proxy_data)
        
        proxy_data['last_check'] = datetime.now().isoformat()
        
        if is_valid:
            proxy_data['status'] = 'active'
            proxy_data['response_time'] = response_time
            proxy_data['fail_count'] = 0
            logger.debug(f"代理 {proxy_data['proxy']} 验证成功，响应时间: {response_time:.2f}s")
        else:
            proxy_data['fail_count'] += 1
            if proxy_data['fail_count'] >= self.max_failures:
                proxy_data['status'] = 'invalid'
                logger.info(f"代理 {proxy_data['proxy']} 失效")
            else:
                proxy_data['status'] = 'unstable'
    
    def get_proxy(self, prefer_fast: bool = True) -> Optional[str]:
        """获取一个可用的代理
        
        Args:
            prefer_fast: 是否优先选择响应快的代理
            
        Returns:
            代理URL
        """
        # 获取活跃的代理
        active_proxies = [p for p in self.proxies if p['status'] == 'active']
        if not active_proxies:
            logger.warning("没有活跃的代理")
            return None
        
        if prefer_fast:
            # 按响应时间排序
            active_proxies.sort(key=lambda x: x.get('response_time', float('inf')))
            # 选择前30%中的随机一个
            top_proxies = active_proxies[:max(1, len(active_proxies) // 3)]
            proxy_data = random.choice(top_proxies)
        else:
            # 轮询选择
            proxy_data = active_proxies[self.current_index % len(active_proxies)]
            self.current_index = (self.current_index + 1) % len(active_proxies)
        
        # 更新使用信息
        proxy_data['last_use'] = datetime.now().isoformat()
        proxy_data['use_count'] += 1
        
        return proxy_data['proxy_url']
    
    def get_random_proxy(self) -> Optional[str]:
        """随机获取一个可用的代理
        
        Returns:
            代理URL
        """
        active_proxies = [p for p in self.proxies if p['status'] == 'active']
        if not active_proxies:
            return None
        
        proxy_data = random.choice(active_proxies)
        proxy_data['last_use'] = datetime.now().isoformat()
        proxy_data['use_count'] += 1
        
        return proxy_data['proxy_url']
    
    def mark_proxy_success(self, proxy_url: str):
        """标记代理使用成功"""
        for p in self.proxies:
            if p['proxy_url'] == proxy_url:
                p['success_count'] += 1
                p['fail_count'] = 0
                p['last_success'] = datetime.now().isoformat()
                break
    
    def mark_proxy_failed(self, proxy_url: str):
        """标记代理使用失败"""
        for p in self.proxies:
            if p['proxy_url'] == proxy_url:
                p['fail_count'] += 1
                p['last_fail'] = datetime.now().isoformat()
                
                # 超过最大失败次数，标记为失效
                if p['fail_count'] >= self.max_failures:
                    p['status'] = 'invalid'
                    logger.warning(f"代理失效: {p['proxy']}")
                elif p['fail_count'] > 2:
                    p['status'] = 'unstable'
                break
    
    async def remove_invalid_proxies(self):
        """移除失效的代理"""
        before_count = len(self.proxies)
        self.proxies = [p for p in self.proxies if p['status'] != 'invalid']
        removed = before_count - len(self.proxies)
        
        if removed > 0:
            logger.info(f"移除了 {removed} 个失效代理")
            await self.save_proxies()
    
    def get_stats(self) -> Dict:
        """获取代理池统计信息"""
        total = len(self.proxies)
        active = len([p for p in self.proxies if p['status'] == 'active'])
        invalid = len([p for p in self.proxies if p['status'] == 'invalid'])
        unstable = len([p for p in self.proxies if p['status'] == 'unstable'])
        unchecked = len([p for p in self.proxies if p['status'] == 'unchecked'])
        
        # 计算平均响应时间
        response_times = [p['response_time'] for p in self.proxies 
                         if p['status'] == 'active' and p.get('response_time')]
        avg_response_time = sum(response_times) / len(response_times) if response_times else 0
        
        return {
            'total': total,
            'active': active,
            'invalid': invalid,
            'unstable': unstable,
            'unchecked': unchecked,
            'avg_response_time': avg_response_time,
            'usage': {
                'total_uses': sum(p['use_count'] for p in self.proxies),
                'total_successes': sum(p['success_count'] for p in self.proxies),
                'total_failures': sum(p['fail_count'] for p in self.proxies)
            }
        }
    
    async def close(self):
        """关闭连接"""
        await self.save_proxies()
        if self.redis_client:
            await self.redis_client.close()


# 代理源抓取器
class ProxyFetcher:
    """从免费代理源获取代理"""
    
    @staticmethod
    async def fetch_from_api(api_url: str) -> List[str]:
        """从API获取代理列表
        
        Args:
            api_url: 代理API地址
            
        Returns:
            代理列表
        """
        try:
            async with aiohttp.ClientSession() as session:
                async with session.get(api_url, timeout=10) as response:
                    if response.status == 200:
                        text = await response.text()
                        # 假设每行一个代理
                        proxies = [line.strip() for line in text.split('\n') 
                                 if line.strip()]
                        return proxies
        except Exception as e:
            logger.error(f"获取代理失败: {e}")
        
        return []


async def main():
    """测试代理池"""
    pool = ProxyPool()
    await pool.initialize()
    
    # 添加测试代理（实际使用需要真实的代理）
    pool.add_proxy("127.0.0.1:8080", ProxyType.HTTP)
    pool.add_proxy("127.0.0.1:8081", ProxyType.HTTPS)
    
    # 验证所有代理
    await pool.check_all_proxies()
    
    # 获取代理
    proxy = pool.get_proxy(prefer_fast=True)
    print(f"获取到代理: {proxy}")
    
    # 获取统计信息
    stats = pool.get_stats()
    print(f"代理池统计: {json.dumps(stats, indent=2)}")
    
    await pool.close()


if __name__ == "__main__":
    asyncio.run(main())