"""反爬虫策略工具"""
import random
import time
import asyncio
from typing import List, Optional, Dict, Any
from datetime import datetime, timedelta
import logging

logger = logging.getLogger(__name__)


class AntiCrawlerStrategy:
    """反爬策略管理器"""
    
    def __init__(self):
        # 用户代理池
        self.user_agents = [
            # Chrome
            'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
            'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36',
            'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
            # Firefox
            'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:120.0) Gecko/20100101 Firefox/120.0',
            'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:120.0) Gecko/20100101 Firefox/120.0',
            # Safari
            'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/17.1 Safari/605.1.15',
            # Edge
            'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36 Edg/120.0.0.0'
        ]
        
        # Cookie池
        self.cookie_pools = {}
        
        # 代理池
        self.proxy_pool = []
        
        # 请求历史（用于控制频率）
        self.request_history = {}
        
    def get_random_user_agent(self) -> str:
        """获取随机用户代理"""
        return random.choice(self.user_agents)
    
    def get_random_delay(self, min_seconds: float = 1.0, max_seconds: float = 3.0) -> float:
        """获取随机延迟时间"""
        return random.uniform(min_seconds, max_seconds)
    
    async def apply_delay(self, min_seconds: float = 1.0, max_seconds: float = 3.0):
        """应用随机延迟"""
        delay = self.get_random_delay(min_seconds, max_seconds)
        logger.debug(f"Applying delay: {delay:.2f} seconds")
        await asyncio.sleep(delay)
    
    def add_cookies(self, platform: str, cookies: List[str]):
        """添加Cookie到池中
        
        Args:
            platform: 平台名称
            cookies: Cookie列表
        """
        if platform not in self.cookie_pools:
            self.cookie_pools[platform] = []
        self.cookie_pools[platform].extend(cookies)
        logger.info(f"Added {len(cookies)} cookies for {platform}")
    
    def get_cookie(self, platform: str) -> Optional[str]:
        """获取Cookie
        
        Args:
            platform: 平台名称
            
        Returns:
            Cookie字符串，如果没有则返回None
        """
        if platform not in self.cookie_pools or not self.cookie_pools[platform]:
            return None
        return random.choice(self.cookie_pools[platform])
    
    def add_proxies(self, proxies: List[str]):
        """添加代理到池中
        
        Args:
            proxies: 代理列表，格式如 'http://ip:port'
        """
        self.proxy_pool.extend(proxies)
        logger.info(f"Added {len(proxies)} proxies")
    
    def get_proxy(self) -> Optional[str]:
        """获取随机代理"""
        if not self.proxy_pool:
            return None
        return random.choice(self.proxy_pool)
    
    def check_rate_limit(self, domain: str, max_requests_per_minute: int = 30) -> bool:
        """检查请求频率限制
        
        Args:
            domain: 域名
            max_requests_per_minute: 每分钟最大请求数
            
        Returns:
            是否可以发送请求
        """
        now = datetime.now()
        minute_ago = now - timedelta(minutes=1)
        
        if domain not in self.request_history:
            self.request_history[domain] = []
        
        # 清理过期记录
        self.request_history[domain] = [
            timestamp for timestamp in self.request_history[domain]
            if timestamp > minute_ago
        ]
        
        # 检查是否超过限制
        if len(self.request_history[domain]) >= max_requests_per_minute:
            logger.warning(f"Rate limit reached for {domain}: {len(self.request_history[domain])} requests in last minute")
            return False
        
        # 记录新请求
        self.request_history[domain].append(now)
        return True
    
    def get_headers_with_referer(self, url: str, referer: Optional[str] = None) -> Dict[str, str]:
        """生成包含Referer的请求头
        
        Args:
            url: 目标URL
            referer: 来源页面URL
            
        Returns:
            请求头字典
        """
        headers = {
            'User-Agent': self.get_random_user_agent(),
            'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
            'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8',
            'Accept-Encoding': 'gzip, deflate, br',
            'Connection': 'keep-alive',
            'Upgrade-Insecure-Requests': '1',
            'Sec-Fetch-Dest': 'document',
            'Sec-Fetch-Mode': 'navigate',
            'Sec-Fetch-Site': 'same-origin' if referer else 'none',
            'Cache-Control': 'max-age=0'
        }
        
        if referer:
            headers['Referer'] = referer
        
        return headers


class WeiboCookieManager:
    """微博Cookie管理器"""
    
    def __init__(self):
        self.cookies = []
        self.current_index = 0
        
    def add_cookie(self, cookie: str):
        """添加Cookie"""
        self.cookies.append(cookie)
        
    def get_next_cookie(self) -> Optional[str]:
        """轮询获取下一个Cookie"""
        if not self.cookies:
            return None
        
        cookie = self.cookies[self.current_index]
        self.current_index = (self.current_index + 1) % len(self.cookies)
        return cookie
    
    def load_from_file(self, filepath: str):
        """从文件加载Cookie
        
        文件格式：每行一个Cookie
        """
        try:
            with open(filepath, 'r') as f:
                for line in f:
                    line = line.strip()
                    if line and not line.startswith('#'):
                        self.add_cookie(line)
            logger.info(f"Loaded {len(self.cookies)} cookies from {filepath}")
        except FileNotFoundError:
            logger.warning(f"Cookie file not found: {filepath}")
        except Exception as e:
            logger.error(f"Error loading cookies: {e}")


# 全局反爬策略实例
anti_crawler_strategy = AntiCrawlerStrategy()
weibo_cookie_manager = WeiboCookieManager()