# encoding: utf-8
# @File  : sync_manager.py
# @Author: shaoyun
# @Date  :  2025/06/01
"""
爬虫文件同步管理器（高级功能，当前未启用）

这是一个预留的高级同步模块，用于在大规模部署时优化文件同步。
当前系统使用简单的事件通知机制，已经可以满足基本需求。

启用条件：
- 节点数量 > 10
- 爬虫文件 > 50MB
- 需要版本控制和增量更新

使用方法：
1. 在 app/core/lifecycle.py 中启动 sync_manager
2. 配置相关环境变量
3. Worker 端添加对应的事件处理

详见：docs/sync_manager_readme.md
"""

import asyncio
from datetime import datetime, timedelta
from typing import Set, Dict, Any

from loguru import logger

from app.db.mongo_manager import MongoDB
from app.db.redis_manager import RedisClient


class SyncManager:
    """爬虫文件同步管理器"""

    def __init__(self):
        self.synced_spiders: Set[str] = set()  # 记录已同步的爬虫
        self.sync_interval = 300  # 5分钟检查一次
        self.enabled = True  # 是否启用同步

    async def start(self):
        """启动同步管理器"""
        logger.info("Starting sync manager...")

        # 启动多个任务
        await asyncio.gather(
            self._sync_new_spiders_loop(),
            self._sync_updated_spiders_loop(),
            self._cleanup_old_cache_loop()
        )

    async def stop(self):
        """停止同步管理器"""
        self.enabled = False
        logger.info("Sync manager stopped")

    async def _sync_new_spiders_loop(self):
        """同步新上传的爬虫文件"""
        while self.enabled:
            try:
                await self._sync_new_spiders()
                await asyncio.sleep(60)  # 每分钟检查新文件
            except Exception as e:
                logger.error(f"Error in sync new spiders loop: {e}")
                await asyncio.sleep(10)

    async def _sync_new_spiders(self):
        """检查并同步新的爬虫文件"""
        try:
            # 获取所有爬虫
            spiders = await MongoDB.find_many("spiders", {})

            for spider in spiders:
                spider_id = spider["_id"]
                file_id = spider.get("file_id")

                if not file_id:
                    continue

                # 检查是否已经同步过
                if spider_id not in self.synced_spiders:
                    # 获取在线节点数量
                    online_nodes = await self._get_online_nodes_count()

                    if online_nodes > 0:
                        # 发布同步事件
                        subscribers = await RedisClient.publish_event("spider.sync", {
                            "spider_id": spider_id,
                            "file_id": file_id,
                            "action": "sync",
                            "timestamp": datetime.utcnow().isoformat()
                        })

                        if subscribers > 0:
                            self.synced_spiders.add(spider_id)
                            logger.info(f"Published sync event for spider {spider_id} to {subscribers} nodes")
                        else:
                            logger.warning(f"No nodes subscribed to sync events")

        except Exception as e:
            logger.error(f"Error syncing new spiders: {e}")

    async def _sync_updated_spiders_loop(self):
        """同步更新的爬虫文件"""
        while self.enabled:
            try:
                await self._sync_updated_spiders()
                await asyncio.sleep(self.sync_interval)  # 5分钟检查一次更新
            except Exception as e:
                logger.error(f"Error in sync updated spiders loop: {e}")
                await asyncio.sleep(30)

    async def _sync_updated_spiders(self):
        """检查并同步更新的爬虫文件"""
        try:
            # 获取最近更新的爬虫（5分钟内）
            cutoff_time = datetime.utcnow() - timedelta(minutes=5)

            # MongoDB查询
            db = await MongoDB.get_database()
            updated_spiders = await db["spiders"].find({
                "updated_at": {"$gte": cutoff_time}
            }).to_list(length=None)

            for spider in updated_spiders:
                spider_id = spider["_id"]
                file_id = spider.get("file_id")

                if file_id:
                    # 发布更新事件
                    await RedisClient.publish_event("spider.update", {
                        "spider_id": spider_id,
                        "file_id": file_id,
                        "action": "update",
                        "version": spider.get("version", "1.0.0"),
                        "timestamp": datetime.utcnow().isoformat()
                    })

                    logger.info(f"Published update event for spider {spider_id}")

        except Exception as e:
            logger.error(f"Error syncing updated spiders: {e}")

    async def _cleanup_old_cache_loop(self):
        """定期清理旧缓存"""
        while self.enabled:
            try:
                # 每天执行一次清理
                await asyncio.sleep(86400)  # 24小时

                # 发布清理事件
                await RedisClient.publish_event("cache.cleanup", {
                    "action": "cleanup_old_cache",
                    "days": 7,  # 清理7天前的缓存
                    "timestamp": datetime.utcnow().isoformat()
                })

                logger.info("Published cache cleanup event")

            except Exception as e:
                logger.error(f"Error in cleanup loop: {e}")

    async def _get_online_nodes_count(self) -> int:
        """获取在线节点数量"""
        try:
            from app.db.mysql import SessionLocal
            from app.models.node import Node, NodeStatus

            db = SessionLocal()
            count = db.query(Node).filter(Node.status == NodeStatus.ONLINE).count()
            db.close()

            return count
        except Exception as e:
            logger.error(f"Error getting online nodes count: {e}")
            return 0

    async def force_sync_spider(self, spider_id: str) -> bool:
        """强制同步指定爬虫"""
        try:
            spider = await MongoDB.find_one("spiders", {"_id": spider_id})
            if not spider or not spider.get("file_id"):
                logger.error(f"Spider {spider_id} not found or has no file")
                return False

            # 发布强制同步事件
            subscribers = await RedisClient.publish_event("spider.force_sync", {
                "spider_id": spider_id,
                "file_id": spider["file_id"],
                "action": "force_sync",
                "timestamp": datetime.utcnow().isoformat()
            })

            logger.info(f"Force sync event published for spider {spider_id} to {subscribers} nodes")
            return subscribers > 0

        except Exception as e:
            logger.error(f"Error forcing sync for spider {spider_id}: {e}")
            return False

    async def get_sync_status(self) -> Dict[str, Any]:
        """获取同步状态"""
        try:
            # 获取所有爬虫数量
            total_spiders = await MongoDB.count_documents("spiders", {})

            # 获取在线节点数量
            online_nodes = await self._get_online_nodes_count()

            return {
                "enabled": self.enabled,
                "total_spiders": total_spiders,
                "synced_spiders": len(self.synced_spiders),
                "online_nodes": online_nodes,
                "sync_interval": self.sync_interval,
                "last_check": datetime.utcnow().isoformat()
            }

        except Exception as e:
            logger.error(f"Error getting sync status: {e}")
            return {
                "enabled": self.enabled,
                "error": str(e)
            }


# 创建全局实例
sync_manager = SyncManager()