#!/usr/bin/env python3
"""
反爬虫管理器 - 处理403、验证码等反爬机制
"""

import logging
import time
import random
from datetime import datetime, timedelta
from typing import Dict, List, Optional, Tuple
from pathlib import Path
import json

logger = logging.getLogger(__name__)

class AntiSpiderManager:
    """反爬虫管理器 - 增强版"""

    def __init__(self):
        self.blocked_ips = {}  # IP黑名单记录
        self.failed_requests = {}  # 失败请求统计
        self.last_request_time = {}  # 最后请求时间
        self.device_fingerprints = {}  # 设备指纹池
        self.behavior_patterns = {}  # 行为模式库
        self.status_file = Path("data/anti_spider_status.json")

        # 加载历史状态
        self.load_status()

        # 初始化指纹池
        self.init_fingerprint_pool()

        # 配置参数 - 更保守的策略
        self.config = {
            'min_delay_between_pages': 90,  # 页面间最小延迟（秒）
            'max_delay_between_pages': 180,  # 页面间最大延迟（秒）
            'min_delay_between_categories': 600,  # 类别间最小延迟（秒）
            'max_delay_between_categories': 1200,  # 类别间最大延迟（秒）
            'blacklist_duration': 24 * 3600,  # IP黑名单持续时间（秒）
            'max_consecutive_failures': 3,  # 最大连续失败次数
            'recovery_wait_time': 3600,  # 恢复等待时间（1小时）
            'fingerprint_rotation_interval': 1800,  # 指纹轮换间隔（30分钟）
        }
    
    def load_status(self):
        """加载反爬状态"""
        try:
            if self.status_file.exists():
                with open(self.status_file, 'r', encoding='utf-8') as f:
                    data = json.load(f)
                    self.blocked_ips = data.get('blocked_ips', {})
                    self.failed_requests = data.get('failed_requests', {})
                logger.info(f"[ANTI_SPIDER] 加载状态: {len(self.blocked_ips)} 个被封IP")
        except Exception as e:
            logger.error(f"[ANTI_SPIDER] 状态加载失败: {e}")

    def init_fingerprint_pool(self):
        """初始化设备指纹池"""
        self.device_fingerprints = {
            'chrome_windows': {
                'user_agents': [
                    'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
                    'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36',
                    'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/118.0.0.0 Safari/537.36'
                ],
                'viewports': [
                    {'width': 1920, 'height': 1080},
                    {'width': 1366, 'height': 768},
                    {'width': 1536, 'height': 864}
                ],
                'screen_resolutions': [
                    {'width': 1920, 'height': 1080, 'colorDepth': 24},
                    {'width': 1366, 'height': 768, 'colorDepth': 24},
                    {'width': 2560, 'height': 1440, 'colorDepth': 24}
                ]
            }
        }

        # 行为模式库
        self.behavior_patterns = {
            'normal_user': {
                'reading_time_base': 15,  # 基础阅读时间（秒）
                'scroll_patterns': ['smooth', 'step_by_step', 'quick_scan'],
                'mouse_movement_frequency': 'medium',
                'interaction_probability': 0.3
            },
            'careful_user': {
                'reading_time_base': 25,
                'scroll_patterns': ['smooth', 'detailed'],
                'mouse_movement_frequency': 'high',
                'interaction_probability': 0.5
            }
        }
    
    def save_status(self):
        """保存反爬状态"""
        try:
            self.status_file.parent.mkdir(exist_ok=True)
            data = {
                'blocked_ips': self.blocked_ips,
                'failed_requests': self.failed_requests,
                'last_updated': datetime.now().isoformat()
            }
            with open(self.status_file, 'w', encoding='utf-8') as f:
                json.dump(data, f, ensure_ascii=False, indent=2)
        except Exception as e:
            logger.error(f"[ANTI_SPIDER] 状态保存失败: {e}")
    
    def is_ip_blocked(self, ip: str = "current") -> bool:
        """检查IP是否被封"""
        if ip in self.blocked_ips:
            blocked_time = datetime.fromisoformat(self.blocked_ips[ip])
            if datetime.now() - blocked_time < timedelta(seconds=self.config['blacklist_duration']):
                return True
            else:
                # 黑名单过期，移除记录
                del self.blocked_ips[ip]
                self.save_status()
        return False
    
    def record_403_error(self, ip: str = "current", url: str = ""):
        """记录403错误"""
        current_time = datetime.now()
        
        # 记录IP被封
        self.blocked_ips[ip] = current_time.isoformat()
        
        # 记录失败请求
        if ip not in self.failed_requests:
            self.failed_requests[ip] = []
        
        self.failed_requests[ip].append({
            'time': current_time.isoformat(),
            'url': url,
            'error': '403_forbidden'
        })
        
        # 保持最近100条记录
        if len(self.failed_requests[ip]) > 100:
            self.failed_requests[ip] = self.failed_requests[ip][-100:]
        
        self.save_status()
        
        logger.error(f"[ANTI_SPIDER] ❌ IP被封: {ip}, URL: {url}")
        logger.error(f"[ANTI_SPIDER] ⏰ 预计恢复时间: {(current_time + timedelta(seconds=self.config['blacklist_duration'])).strftime('%Y-%m-%d %H:%M:%S')}")
    
    def get_smart_delay(self, delay_type: str = "page") -> int:
        """获取智能延迟时间"""
        if delay_type == "page":
            min_delay = self.config['min_delay_between_pages']
            max_delay = self.config['max_delay_between_pages']
        elif delay_type == "category":
            min_delay = self.config['min_delay_between_categories']
            max_delay = self.config['max_delay_between_categories']
        else:
            min_delay, max_delay = 30, 60
        
        # 基础延迟
        base_delay = random.randint(min_delay, max_delay)
        
        # 根据失败次数增加延迟
        ip = "current"
        if ip in self.failed_requests:
            recent_failures = len([
                req for req in self.failed_requests[ip]
                if datetime.now() - datetime.fromisoformat(req['time']) < timedelta(hours=1)
            ])
            if recent_failures > 0:
                base_delay *= (1 + recent_failures * 0.5)  # 每次失败增加50%延迟
        
        return int(base_delay)
    
    def should_stop_crawling(self, ip: str = "current") -> Tuple[bool, str]:
        """判断是否应该停止爬取"""
        # 检查IP是否被封
        if self.is_ip_blocked(ip):
            blocked_time = datetime.fromisoformat(self.blocked_ips[ip])
            remaining_time = self.config['blacklist_duration'] - (datetime.now() - blocked_time).total_seconds()
            return True, f"IP被封，剩余时间: {remaining_time/3600:.1f}小时"
        
        # 检查连续失败次数
        if ip in self.failed_requests:
            recent_failures = [
                req for req in self.failed_requests[ip]
                if datetime.now() - datetime.fromisoformat(req['time']) < timedelta(minutes=30)
            ]
            if len(recent_failures) >= self.config['max_consecutive_failures']:
                return True, f"连续失败{len(recent_failures)}次，建议暂停30分钟"
        
        return False, ""
    
    def get_recovery_suggestions(self) -> List[str]:
        """获取恢复建议"""
        suggestions = []
        
        if self.blocked_ips:
            suggestions.append("🔄 IP被封解决方案:")
            suggestions.append("  1. 等待24小时自动解封")
            suggestions.append("  2. 更换IP地址或使用代理")
            suggestions.append("  3. 增加请求间隔时间")
        
        if self.failed_requests:
            suggestions.append("⚡ 减少风控建议:")
            suggestions.append("  1. 页面间延迟增加到60-120秒")
            suggestions.append("  2. 类别间延迟增加到5-10分钟")
            suggestions.append("  3. 添加更多随机用户行为")
            suggestions.append("  4. 使用多个Cookie轮换")
        
        return suggestions
    
    def get_status_report(self) -> Dict:
        """获取状态报告"""
        current_time = datetime.now()
        
        # 统计被封IP
        active_blocks = 0
        for ip, blocked_time_str in self.blocked_ips.items():
            blocked_time = datetime.fromisoformat(blocked_time_str)
            if current_time - blocked_time < timedelta(seconds=self.config['blacklist_duration']):
                active_blocks += 1
        
        # 统计最近失败
        recent_failures = 0
        for ip, failures in self.failed_requests.items():
            recent_failures += len([
                f for f in failures
                if current_time - datetime.fromisoformat(f['time']) < timedelta(hours=1)
            ])
        
        return {
            'active_ip_blocks': active_blocks,
            'total_blocked_ips': len(self.blocked_ips),
            'recent_failures_1h': recent_failures,
            'suggested_page_delay': self.get_smart_delay('page'),
            'suggested_category_delay': self.get_smart_delay('category'),
            'recovery_suggestions': self.get_recovery_suggestions()
        }
