#!/usr/bin/env python
"""
Tophub 数据爬取任务
定期从 Tophub API 获取热榜数据并存储到 MongoDB
"""
import asyncio
import logging
from datetime import datetime
from typing import Dict, List, Any
from motor.motor_asyncio import AsyncIOMotorClient
import pymongo
import os
import sys

# 添加项目路径
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))

from services.tophub_service import TophubService

logging.basicConfig(
    level=logging.INFO,
    format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
)
logger = logging.getLogger(__name__)


class TophubCrawlerTask:
    """Tophub 爬取任务"""
    
    def __init__(self):
        # MongoDB 连接
        self.mongo_uri = os.getenv('MONGODB_URI', 'mongodb://admin:newhand@localhost:27017/')
        self.mongo_db = os.getenv('MONGODB_DB', 'crawler_db')
        self.mongo_client = None
        self.db = None
        
        # Tophub 服务
        self.tophub_service = None
        
        # 平台映射
        self.platform_mapping = {
            'weibo': 'weibo_hot_items',
            'zhihu': 'zhihu_hot_items',
            'baidu': 'baidu_hot_items',
            'bilibili': 'bilibili_hot_items',
            'douyin': 'douyin_hot_items',
            'toutiao': 'toutiao_hot_items',
            '36kr': 'kr36_hot_items',
            'sspai': 'sspai_hot_items',
            'juejin': 'juejin_hot_items',
            'v2ex': 'v2ex_hot_items'
        }
    
    async def init_connections(self):
        """初始化连接"""
        try:
            # 初始化 MongoDB
            self.mongo_client = AsyncIOMotorClient(self.mongo_uri)
            self.db = self.mongo_client[self.mongo_db]
            
            # 测试连接
            await self.mongo_client.server_info()
            logger.info("MongoDB connection successful")
            
            # 初始化 Tophub
            self.tophub_service = TophubService()
            
            return True
            
        except Exception as e:
            logger.error(f"Failed to initialize connections: {e}")
            return False
    
    async def close_connections(self):
        """关闭连接"""
        if self.mongo_client:
            self.mongo_client.close()
        if self.tophub_service and self.tophub_service.session:
            await self.tophub_service.session.close()
    
    def transform_to_mongodb_format(self, platform: str, items: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
        """转换数据格式以适配 MongoDB 存储
        
        Args:
            platform: 平台名称
            items: 原始数据项
            
        Returns:
            转换后的数据列表
        """
        transformed = []
        
        for item in items:
            doc = {
                'platform': platform,
                'rank': item.get('rank', 0),
                'title': item.get('title', ''),
                'url': item.get('url', ''),
                'heat_value': str(item.get('heat_value', '')),
                'heat_value_numeric': item.get('heat_value', 0),
                'description': item.get('description', ''),
                'excerpt': item.get('description', '')[:200] if item.get('description') else '',
                'crawled_at': datetime.utcnow(),
                'source': 'tophub',
                'fetch_time': item.get('fetch_time', datetime.utcnow().isoformat()),
                
                # 额外字段
                'category': item.get('extra', {}).get('category'),
                'tags': item.get('extra', {}).get('tags', []),
                'author': item.get('extra', {}).get('author'),
                'published_at': item.get('extra', {}).get('published_at'),
                
                # 生成唯一标识
                'url_hash': self.generate_hash(item.get('url', '')),
                'has_comments': False,  # Tophub 不提供评论
                'is_trending': item.get('rank', 999) <= 10  # 前10视为热门
            }
            
            transformed.append(doc)
        
        return transformed
    
    def generate_hash(self, text: str) -> str:
        """生成哈希值"""
        import hashlib
        return hashlib.md5(text.encode()).hexdigest()
    
    async def crawl_and_store_platform(self, platform: str) -> int:
        """爬取并存储单个平台的数据
        
        Args:
            platform: 平台名称
            
        Returns:
            存储的数据条数
        """
        try:
            # 获取数据
            logger.info(f"Fetching {platform} data...")
            data = await self.tophub_service.get_node_data(platform)
            
            if not data or 'items' not in data:
                logger.warning(f"No data returned for {platform}")
                return 0
            
            # 标准化数据
            items = self.tophub_service.normalize_data(platform, data)
            
            if not items:
                logger.warning(f"No items to store for {platform}")
                return 0
            
            # 转换格式
            documents = self.transform_to_mongodb_format(platform, items)
            
            # 获取集合名称
            collection_name = self.platform_mapping.get(platform, f"{platform}_hot_items")
            collection = self.db[collection_name]
            
            # 批量更新或插入（upsert）
            operations = []
            for doc in documents:
                operations.append(
                    pymongo.UpdateOne(
                        {'url_hash': doc['url_hash']},
                        {'$set': doc},
                        upsert=True
                    )
                )
            
            if operations:
                result = await collection.bulk_write(operations)
                logger.info(f"Stored {platform} data: {result.upserted_count} new, {result.modified_count} updated")
                return len(documents)
            
            return 0
            
        except Exception as e:
            logger.error(f"Error crawling {platform}: {e}")
            return 0
    
    async def crawl_all_platforms(self, platforms: List[str] = None) -> Dict[str, int]:
        """爬取所有平台数据
        
        Args:
            platforms: 平台列表，如果不提供则爬取所有
            
        Returns:
            各平台存储的数据条数
        """
        if not platforms:
            platforms = ['weibo', 'zhihu', 'baidu', 'bilibili', 'douyin', 'toutiao']
        
        results = {}
        
        async with self.tophub_service:
            for platform in platforms:
                count = await self.crawl_and_store_platform(platform)
                results[platform] = count
                
                # 避免请求过快
                await asyncio.sleep(2)
        
        return results
    
    async def run_once(self):
        """执行一次爬取"""
        try:
            # 初始化连接
            if not await self.init_connections():
                logger.error("Failed to initialize, exiting")
                return False
            
            logger.info("Starting Tophub crawler task...")
            
            # 爬取所有平台
            results = await self.crawl_all_platforms()
            
            # 统计
            total = sum(results.values())
            success_count = sum(1 for v in results.values() if v > 0)
            
            logger.info(f"Crawl completed: {success_count}/{len(results)} platforms successful")
            logger.info(f"Total items stored: {total}")
            
            for platform, count in results.items():
                if count > 0:
                    logger.info(f"  {platform}: {count} items")
            
            return True
            
        except Exception as e:
            logger.error(f"Task failed: {e}")
            return False
            
        finally:
            await self.close_connections()
    
    async def run_periodic(self, interval_seconds: int = 1800):
        """定期执行爬取
        
        Args:
            interval_seconds: 间隔时间（秒），默认30分钟
        """
        logger.info(f"Starting periodic crawler, interval: {interval_seconds}s")
        
        while True:
            try:
                await self.run_once()
            except Exception as e:
                logger.error(f"Periodic task error: {e}")
            
            logger.info(f"Sleeping for {interval_seconds}s...")
            await asyncio.sleep(interval_seconds)


async def main():
    """主函数"""
    task = TophubCrawlerTask()
    
    # 选择运行模式
    import sys
    
    if len(sys.argv) > 1 and sys.argv[1] == '--periodic':
        # 定期运行模式
        interval = int(sys.argv[2]) if len(sys.argv) > 2 else 1800
        await task.run_periodic(interval)
    else:
        # 单次运行模式
        success = await task.run_once()
        return 0 if success else 1


if __name__ == "__main__":
    exit_code = asyncio.run(main())