"""增量更新中间件"""
import hashlib
import logging
from typing import Dict, Any, List, Optional, Set
from datetime import datetime, timedelta
import asyncio
from motor.motor_asyncio import AsyncIOMotorClient

logger = logging.getLogger(__name__)


class IncrementalUpdateMiddleware:
    """增量更新中间件
    
    实现URL去重、时间戳对比、跳过未更新内容
    """
    
    def __init__(self, mongodb_url: str = "mongodb://localhost:27017"):
        """初始化增量更新中间件
        
        Args:
            mongodb_url: MongoDB连接URL
        """
        self.mongodb_url = mongodb_url
        self.client = None
        self.db = None
        self.collection = None
        self.url_cache: Set[str] = set()  # 内存缓存
        self.cache_size = 10000  # 缓存大小限制
        self.update_threshold = timedelta(hours=1)  # 更新阈值
        
    async def connect(self):
        """连接MongoDB"""
        if not self.client:
            self.client = AsyncIOMotorClient(self.mongodb_url)
            self.db = self.client['aipaper']
            self.collection = self.db['hot_topics']
            
            # 创建索引
            await self._create_indexes()
            
            # 加载URL缓存
            await self._load_url_cache()
            
    async def close(self):
        """关闭连接"""
        if self.client:
            self.client.close()
            self.client = None
            
    async def _create_indexes(self):
        """创建数据库索引"""
        try:
            # URL哈希索引（唯一）
            await self.collection.create_index(
                [("url_hash", 1)],
                unique=True,
                background=True
            )
            
            # 复合索引：源+时间
            await self.collection.create_index(
                [("source", 1), ("fetch_time", -1)],
                background=True
            )
            
            # TTL索引：自动删除过期数据
            await self.collection.create_index(
                [("expire_at", 1)],
                expireAfterSeconds=0,
                background=True
            )
            
            logger.info("数据库索引创建成功")
            
        except Exception as e:
            logger.error(f"创建索引失败: {e}")
            
    async def _load_url_cache(self):
        """加载URL缓存到内存"""
        try:
            # 只加载最近的URL哈希
            recent_time = datetime.utcnow() - timedelta(days=7)
            
            cursor = self.collection.find(
                {"fetch_time": {"$gte": recent_time.isoformat()}},
                {"url_hash": 1}
            ).limit(self.cache_size)
            
            async for doc in cursor:
                self.url_cache.add(doc['url_hash'])
                
            logger.info(f"加载 {len(self.url_cache)} 个URL到缓存")
            
        except Exception as e:
            logger.error(f"加载URL缓存失败: {e}")
            
    def generate_url_hash(self, url: str) -> str:
        """生成URL哈希
        
        Args:
            url: 原始URL
            
        Returns:
            MD5哈希值
        """
        # 规范化URL
        url = url.strip().lower()
        
        # 移除可变参数
        if '?' in url:
            url = url.split('?')[0]
            
        return hashlib.md5(url.encode()).hexdigest()
        
    async def is_duplicate(self, url: str) -> bool:
        """检查URL是否重复
        
        Args:
            url: 要检查的URL
            
        Returns:
            是否重复
        """
        url_hash = self.generate_url_hash(url)
        
        # 先检查内存缓存
        if url_hash in self.url_cache:
            return True
            
        # 检查数据库
        await self.connect()
        exists = await self.collection.find_one(
            {"url_hash": url_hash},
            {"_id": 1}
        )
        
        if exists:
            # 添加到缓存
            self.url_cache.add(url_hash)
            return True
            
        return False
        
    async def needs_update(self, url: str, new_data: Dict[str, Any]) -> bool:
        """检查是否需要更新
        
        Args:
            url: 内容URL
            new_data: 新数据
            
        Returns:
            是否需要更新
        """
        url_hash = self.generate_url_hash(url)
        
        await self.connect()
        
        # 查找现有记录
        existing = await self.collection.find_one({"url_hash": url_hash})
        
        if not existing:
            return True  # 新记录，需要存储
            
        # 检查时间戳
        last_update = existing.get('update_time')
        if last_update:
            last_update_dt = datetime.fromisoformat(last_update.replace('Z', '+00:00'))
            if datetime.utcnow() - last_update_dt < self.update_threshold:
                logger.debug(f"跳过未到更新时间的内容: {url}")
                return False
                
        # 检查内容是否变化（比较关键字段）
        if self._content_changed(existing, new_data):
            return True
            
        # 更新访问时间但不更新内容
        await self.collection.update_one(
            {"url_hash": url_hash},
            {"$set": {"last_check": datetime.utcnow().isoformat() + 'Z'}}
        )
        
        return False
        
    def _content_changed(self, old_data: Dict, new_data: Dict) -> bool:
        """检查内容是否变化
        
        Args:
            old_data: 旧数据
            new_data: 新数据
            
        Returns:
            内容是否变化
        """
        # 比较关键字段
        compare_fields = ['title', 'heat_value', 'rank']
        
        for field in compare_fields:
            if old_data.get(field) != new_data.get(field):
                return True
                
        # 比较评论数等统计信息
        old_content = old_data.get('content', {})
        new_content = new_data.get('content', {})
        
        if old_content.get('discussion_count') != new_content.get('discussion_count'):
            return True
            
        if old_content.get('read_count') != new_content.get('read_count'):
            return True
            
        return False
        
    async def filter_items(self, items: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
        """过滤需要更新的项目
        
        Args:
            items: 原始数据项列表
            
        Returns:
            需要更新的项目列表
        """
        filtered_items = []
        
        for item in items:
            url = item.get('url')
            if not url:
                continue
                
            # 检查是否需要更新
            if await self.needs_update(url, item):
                # 添加URL哈希
                item['url_hash'] = self.generate_url_hash(url)
                filtered_items.append(item)
                
                # 更新缓存
                self.url_cache.add(item['url_hash'])
                
                # 控制缓存大小
                if len(self.url_cache) > self.cache_size:
                    self.url_cache.pop()
                    
        logger.info(f"增量更新过滤: 输入{len(items)}条，需更新{len(filtered_items)}条")
        return filtered_items
        
    async def mark_processed(self, items: List[Dict[str, Any]]):
        """标记项目已处理
        
        Args:
            items: 已处理的项目列表
        """
        for item in items:
            url_hash = item.get('url_hash')
            if url_hash:
                self.url_cache.add(url_hash)
                
    async def get_statistics(self) -> Dict:
        """获取统计信息
        
        Returns:
            统计数据
        """
        await self.connect()
        
        total_count = await self.collection.count_documents({})
        today_count = await self.collection.count_documents({
            "fetch_time": {
                "$gte": datetime.utcnow().replace(
                    hour=0, minute=0, second=0, microsecond=0
                ).isoformat()
            }
        })
        
        # 获取最后更新时间
        last_doc = await self.collection.find_one(
            {},
            sort=[("fetch_time", -1)]
        )
        last_update = last_doc.get('fetch_time') if last_doc else None
        
        return {
            'total_records': total_count,
            'today_records': today_count,
            'cache_size': len(self.url_cache),
            'last_update': last_update
        }
        
        
class DuplicateFilter:
    """简单的去重过滤器"""
    
    def __init__(self, capacity: int = 100000):
        """初始化去重过滤器
        
        Args:
            capacity: 容量大小
        """
        self.seen = set()
        self.capacity = capacity
        
    def is_duplicate(self, key: str) -> bool:
        """检查是否重复
        
        Args:
            key: 要检查的键
            
        Returns:
            是否重复
        """
        if key in self.seen:
            return True
            
        # 添加到集合
        self.seen.add(key)
        
        # 控制集合大小
        if len(self.seen) > self.capacity:
            # 随机删除一些旧的
            to_remove = len(self.seen) - int(self.capacity * 0.9)
            for _ in range(to_remove):
                self.seen.pop()
                
        return False
        
    def add(self, key: str):
        """添加键到过滤器
        
        Args:
            key: 要添加的键
        """
        self.seen.add(key)
        
    def clear(self):
        """清空过滤器"""
        self.seen.clear()
        
    def size(self) -> int:
        """获取当前大小
        
        Returns:
            过滤器中的元素数量
        """
        return len(self.seen)