"""定时任务调度器
定期从Tophub同步热榜数据到MongoDB
"""
import asyncio
import logging
from datetime import datetime
from apscheduler.schedulers.asyncio import AsyncIOScheduler
from apscheduler.triggers.interval import IntervalTrigger
from motor.motor_asyncio import AsyncIOMotorClient

from ..services.tophub_service import TophubService
from ..models.hot_data import Platform
from .config import settings

logger = logging.getLogger(__name__)


class DataSyncScheduler:
    """数据同步调度器"""
    
    def __init__(self):
        """初始化调度器"""
        self.scheduler = AsyncIOScheduler()
        self.mongo_client = AsyncIOMotorClient(settings.mongodb_uri)
        self.db = self.mongo_client[settings.mongodb_db]
        
        # 平台映射 - 只保留实际支持的平台
        self.platform_map = {
            'zhihu': 'zhihu',
            'weibo': 'weibo',
            'baidu': 'baidu',
            'toutiao': 'toutiao',
            'bilibili': 'bilibili',
            'douyin': 'douyin',
        }
    
    async def sync_platform_data(self, platform_name: str):
        """同步单个平台的数据
        
        Args:
            platform_name: 平台名称
        """
        try:
            logger.info(f"开始同步 {platform_name} 数据...")
            
            async with TophubService() as service:
                # 获取数据
                data = await service.get_node_data(platform_name)
                if not data:
                    logger.warning(f"无法从Tophub获取 {platform_name} 数据")
                    return
                
                # 标准化数据
                normalized = service.normalize_data(platform_name, data)
                
                # 准备文档
                collection = self.db[f"{platform_name}_hot_items"]
                documents = []
                
                now = datetime.utcnow()
                for item in normalized:
                    doc = {
                        'title': item['title'],
                        'url': item['url'],
                        'rank': item['rank'],
                        'heat_value': str(item.get('heat_value', 0)),
                        'heat_value_numeric': int(item.get('heat_value', 0)),
                        'platform': platform_name,
                        'category': 'hot',
                        'crawled_at': now,
                        'updated_at': now,
                        'excerpt': item.get('description', '')[:200],
                        'source': 'tophub',
                        'extra': item.get('extra', {})
                    }
                    documents.append(doc)
                
                if documents:
                    # 删除旧数据（保留最近24小时的历史数据）
                    from datetime import timedelta
                    await collection.delete_many({
                        'crawled_at': {'$lt': now - timedelta(hours=24)}
                    })
                    
                    # 插入新数据
                    try:
                        result = await collection.insert_many(documents)
                        logger.info(f"成功同步 {len(result.inserted_ids)} 条 {platform_name} 数据")
                    except Exception as e:
                        # 忽略重复键错误
                        if 'duplicate key error' in str(e):
                            logger.debug(f"{platform_name} 数据已存在，跳过")
                        else:
                            logger.error(f"存储 {platform_name} 数据失败: {e}")
                
        except Exception as e:
            logger.error(f"同步 {platform_name} 数据失败: {e}")
    
    async def sync_all_platforms(self):
        """同步所有平台的数据"""
        logger.info("开始同步所有平台数据...")
        start_time = datetime.utcnow()
        
        platforms = ['zhihu', 'weibo', 'baidu', 'toutiao', 'bilibili', 'douyin', 'weixin']
        
        for platform in platforms:
            await self.sync_platform_data(platform)
            # 避免请求过快
            await asyncio.sleep(2)
        
        elapsed = (datetime.utcnow() - start_time).total_seconds()
        logger.info(f"所有平台数据同步完成，耗时 {elapsed:.2f} 秒")
    
    def start(self):
        """启动调度器"""
        # 添加定时任务：每5小时执行一次
        self.scheduler.add_job(
            self.sync_all_platforms,
            trigger=IntervalTrigger(hours=5),
            id='sync_tophub_data',
            name='同步Tophub热榜数据',
            replace_existing=True
        )
        
        # 立即执行一次（启动时同步）
        self.scheduler.add_job(
            self.sync_all_platforms,
            id='initial_sync',
            name='初始数据同步',
            replace_existing=True
        )
        
        self.scheduler.start()
        logger.info("数据同步调度器已启动，将每5小时同步一次数据")
    
    def stop(self):
        """停止调度器"""
        self.scheduler.shutdown()
        logger.info("数据同步调度器已停止")


# 全局调度器实例
scheduler = DataSyncScheduler()


async def init_scheduler():
    """初始化调度器"""
    scheduler.start()
    # 启动时立即同步一次
    await scheduler.sync_all_platforms()