"""
Celery任务定义
"""

import asyncio
from datetime import datetime, timedelta
from typing import List, Dict, Any
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker

from .celery_app import celery_app
from ..core.config import get_settings
from ..core.database import HotTrend, GeneratedContent, PublishRecord
from ..core.logger import app_logger
from ..scrapers.weibo_scraper import WeiboScraper
from ..scrapers.zhihu_scraper import ZhihuScraper
from ..scrapers.baidu_scraper import BaiduScraper
from ..scrapers.douyin_scraper import DouyinScraper
from ..generators.openai_generator import OpenAIGenerator
from ..publishers.manager import publisher_manager

settings = get_settings()

# 创建数据库会话
engine = create_engine(settings.DATABASE_URL)
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)


def get_db_session():
    """获取数据库会话"""
    return SessionLocal()


@celery_app.task(bind=True, name="src.schedulers.tasks.scrape_hot_trends")
def scrape_hot_trends(self, sources: List[str] = None):
    """抓取热点趋势任务"""
    
    try:
        app_logger.info("开始执行热点抓取任务")
        
        if not sources:
            sources = ["weibo", "zhihu", "baidu", "douyin"]
        
        # 初始化爬虫
        scrapers = {
            "weibo": WeiboScraper(),
            "zhihu": ZhihuScraper(),
            "baidu": BaiduScraper(),
            "douyin": DouyinScraper()
        }
        
        db = get_db_session()
        total_saved = 0
        
        try:
            for source in sources:
                if source not in scrapers:
                    app_logger.warning(f"未知的数据源: {source}")
                    continue
                
                try:
                    scraper = scrapers[source]
                    
                    # 使用事件循环运行异步函数
                    import asyncio
                    import nest_asyncio
                    
                    # 允许嵌套事件循环
                    nest_asyncio.apply()
                    
                    loop = asyncio.new_event_loop()
                    asyncio.set_event_loop(loop)
                    
                    trends = loop.run_until_complete(scraper.scrape())
                    loop.close()
                    
                    # 保存到数据库
                    saved_count = 0
                    for trend_data in trends:
                        # 检查是否已存在
                        existing = db.query(HotTrend).filter(
                            HotTrend.title == trend_data["title"],
                            HotTrend.source == source
                        ).first()
                        
                        if not existing:
                            trend = HotTrend(
                                title=trend_data["title"],
                                description=trend_data.get("description", ""),
                                source=source,
                                rank=trend_data.get("rank", 0),
                                heat_score=trend_data.get("heat_score", 0),
                                url=trend_data.get("url", "")
                            )
                            db.add(trend)
                            saved_count += 1
                    
                    total_saved += saved_count
                    app_logger.info(f"从 {source} 抓取 {len(trends)} 条热点，保存 {saved_count} 条新热点")
                    
                except Exception as e:
                    app_logger.error(f"抓取 {source} 热点失败: {e}")
                    
            db.commit()
            app_logger.info(f"热点抓取任务完成，共保存 {total_saved} 条新热点")
            
            return {
                "status": "success",
                "total_saved": total_saved,
                "sources": sources
            }
            
        finally:
            db.close()
            
    except Exception as e:
        app_logger.error(f"热点抓取任务失败: {e}")
        self.retry(countdown=60, max_retries=3)


@celery_app.task(bind=True, name="src.schedulers.tasks.generate_content")
def generate_content(self, trend_id: int, platforms: List[str] = None, style: str = "default"):
    """生成内容任务"""
    
    try:
        app_logger.info(f"开始为热点 {trend_id} 生成内容")
        
        if not platforms:
            platforms = ["weibo", "twitter", "wechat"]
        
        db = get_db_session()
        
        try:
            # 获取热点信息
            trend = db.query(HotTrend).filter(HotTrend.id == trend_id).first()
            if not trend:
                raise ValueError(f"热点 {trend_id} 不存在")
            
            # 根据配置选择生成器
            from ..core.config import get_settings
            settings = get_settings()
            
            if settings.QWEN_API_KEY:
                from ..generators.qwen_generator import QwenGenerator
                generator = QwenGenerator()
            else:
                from ..generators.openai_generator import OpenAIGenerator
                generator = OpenAIGenerator()
                
            results = {}
            
            for platform in platforms:
                try:
                    # 检查是否已经生成过
                    existing = db.query(GeneratedContent).filter(
                        GeneratedContent.trend_id == trend_id,
                        GeneratedContent.platform == platform
                    ).first()
                    
                    if existing:
                        app_logger.info(f"平台 {platform} 已存在生成内容，跳过")
                        results[platform] = {"status": "skipped", "reason": "already_exists"}
                        continue
                    
                    # 生成内容
                    trend_data = {
                        "title": trend.title,
                        "description": trend.description,
                        "source": trend.source,
                        "url": trend.url
                    }
                    
                    loop = asyncio.new_event_loop()
                    asyncio.set_event_loop(loop)
                    
                    content = loop.run_until_complete(
                        generator.generate(trend_data, platform, style)
                    )
                    loop.close()
                    
                    # 保存生成的内容
                    generated = GeneratedContent(
                        trend_id=trend_id,
                        platform=platform,
                        content=content["text"],
                        hashtags=content.get("hashtags", "")
                    )
                    db.add(generated)
                    
                    results[platform] = {
                        "status": "success",
                        "content_length": len(content["text"])
                    }
                    
                    app_logger.info(f"为平台 {platform} 生成内容成功")
                    
                except Exception as e:
                    app_logger.error(f"为平台 {platform} 生成内容失败: {e}")
                    results[platform] = {"status": "error", "error": str(e)}
            
            db.commit()
            
            # 标记热点为已处理
            trend.processed = True
            db.commit()
            
            app_logger.info(f"热点 {trend_id} 内容生成任务完成")
            
            return {
                "status": "success",
                "trend_id": trend_id,
                "platforms": platforms,
                "results": results
            }
            
        finally:
            db.close()
            
    except Exception as e:
        app_logger.error(f"内容生成任务失败: {e}")
        self.retry(countdown=60, max_retries=3)


@celery_app.task(bind=True, name="src.schedulers.tasks.publish_content")
def publish_content(self, content_id: int):
    """发布内容任务"""
    
    try:
        app_logger.info(f"开始发布内容 {content_id}")
        
        db = get_db_session()
        
        try:
            # 获取内容信息
            content = db.query(GeneratedContent).filter(
                GeneratedContent.id == content_id
            ).first()
            
            if not content:
                raise ValueError(f"内容 {content_id} 不存在")
            
            if content.status == "published":
                app_logger.info(f"内容 {content_id} 已发布，跳过")
                return {"status": "skipped", "reason": "already_published"}
            
            # 发布内容
            loop = asyncio.new_event_loop()
            asyncio.set_event_loop(loop)
            
            result = loop.run_until_complete(
                publisher_manager.publish_to_platform(
                    content.platform,
                    content.content,
                    hashtags=content.hashtags
                )
            )
            loop.close()
            
            # 记录发布结果
            record = PublishRecord(
                content_id=content_id,
                platform=content.platform,
                platform_post_id=result.get("data", {}).get("post_id", ""),
                status="success" if result["success"] else "failed",
                error_message=result.get("error", "")
            )
            db.add(record)
            
            # 更新内容状态
            content.status = "published" if result["success"] else "failed"
            if result["success"]:
                content.published_at = datetime.utcnow()
            
            db.commit()
            
            if result["success"]:
                app_logger.info(f"内容 {content_id} 发布成功")
            else:
                app_logger.error(f"内容 {content_id} 发布失败: {result.get('error', '未知错误')}")
            
            return {
                "status": "success" if result["success"] else "failed",
                "content_id": content_id,
                "platform": content.platform,
                "post_id": result.get("data", {}).get("post_id", ""),
                "error": result.get("error", "") if not result["success"] else None
            }
            
        finally:
            db.close()
            
    except Exception as e:
        app_logger.error(f"发布内容任务失败: {e}")
        self.retry(countdown=60, max_retries=3)


@celery_app.task(bind=True, name="src.schedulers.tasks.auto_workflow")
def auto_workflow(self):
    """自动化工作流程"""
    
    try:
        app_logger.info("开始执行自动化工作流程")
        
        db = get_db_session()
        
        try:
            # 1. 获取未处理的热点（最新的10个）
            unprocessed_trends = db.query(HotTrend).filter(
                HotTrend.processed == False
            ).order_by(HotTrend.created_at.desc()).limit(10).all()
            
            if not unprocessed_trends:
                app_logger.info("没有未处理的热点")
                return {"status": "no_new_trends"}
            
            app_logger.info(f"找到 {len(unprocessed_trends)} 个未处理热点")
            
            # 2. 为每个热点生成内容
            content_generation_tasks = []
            for trend in unprocessed_trends:
                task = generate_content.delay(
                    trend.id,
                    platforms=["weibo", "twitter"],  # 只发布到主要平台
                    style="default"
                )
                content_generation_tasks.append((trend.id, task))
            
            # 3. 等待内容生成完成
            generated_contents = []
            for trend_id, task in content_generation_tasks:
                try:
                    result = task.get(timeout=300)  # 5分钟超时
                    if result.get("status") == "success":
                        # 获取生成的内容ID
                        contents = db.query(GeneratedContent).filter(
                            GeneratedContent.trend_id == trend_id,
                            GeneratedContent.status == "pending"
                        ).all()
                        generated_contents.extend(contents)
                        app_logger.info(f"热点 {trend_id} 内容生成完成")
                    else:
                        app_logger.error(f"热点 {trend_id} 内容生成失败")
                except Exception as e:
                    app_logger.error(f"热点 {trend_id} 内容生成任务异常: {e}")
            
            # 4. 发布生成的内容（延迟发布，避免频率限制）
            publish_tasks = []
            for i, content in enumerate(generated_contents):
                # 每5分钟发布一个，避免API限制
                task = publish_content.apply_async(
                    args=[content.id],
                    countdown=i * 300  # 延迟发布
                )
                publish_tasks.append((content.id, task))
            
            app_logger.info(f"自动化工作流程完成，生成 {len(generated_contents)} 个内容，安排发布")
            
            return {
                "status": "success",
                "processed_trends": len(unprocessed_trends),
                "generated_contents": len(generated_contents),
                "scheduled_publications": len(publish_tasks)
            }
            
        finally:
            db.close()
            
    except Exception as e:
        app_logger.error(f"自动化工作流程失败: {e}")
        self.retry(countdown=300, max_retries=2)


@celery_app.task(name="src.schedulers.tasks.cleanup_old_data")
def cleanup_old_data():
    """清理过期数据任务"""
    
    try:
        app_logger.info("开始清理过期数据")
        
        db = get_db_session()
        
        try:
            # 删除30天前的热点数据
            cutoff_date = datetime.utcnow() - timedelta(days=30)
            
            old_trends = db.query(HotTrend).filter(
                HotTrend.created_at < cutoff_date
            ).all()
            
            for trend in old_trends:
                # 同时删除相关的生成内容和发布记录
                db.query(GeneratedContent).filter(
                    GeneratedContent.trend_id == trend.id
                ).delete()
                
                # 获取相关的内容ID来删除发布记录
                content_ids = [content.id for content in 
                             db.query(GeneratedContent).filter(
                                 GeneratedContent.trend_id == trend.id
                             ).all()]
                
                if content_ids:
                    db.query(PublishRecord).filter(
                        PublishRecord.content_id.in_(content_ids)
                    ).delete()
                
                db.delete(trend)
            
            db.commit()
            
            app_logger.info(f"清理过期数据完成，删除 {len(old_trends)} 条过期热点")
            
            return {
                "status": "success",
                "deleted_trends": len(old_trends)
            }
            
        finally:
            db.close()
            
    except Exception as e:
        app_logger.error(f"清理过期数据失败: {e}")
        return {"status": "error", "error": str(e)}


@celery_app.task(name="src.schedulers.tasks.health_check")
def health_check():
    """系统健康检查任务"""
    
    try:
        app_logger.info("开始系统健康检查")
        
        db = get_db_session()
        
        try:
            # 检查数据库连接
            db.execute("SELECT 1")
            
            # 统计数据
            trends_count = db.query(HotTrend).count()
            contents_count = db.query(GeneratedContent).count()
            published_count = db.query(PublishRecord).filter(
                PublishRecord.status == "success"
            ).count()
            
            # 检查最近的活动
            recent_trends = db.query(HotTrend).filter(
                HotTrend.created_at > datetime.utcnow() - timedelta(hours=1)
            ).count()
            
            health_status = {
                "database": "healthy",
                "total_trends": trends_count,
                "total_contents": contents_count,
                "total_published": published_count,
                "recent_trends_1h": recent_trends,
                "timestamp": datetime.utcnow().isoformat()
            }
            
            app_logger.info(f"系统健康检查完成: {health_status}")
            
            return {
                "status": "healthy",
                "data": health_status
            }
            
        finally:
            db.close()
            
    except Exception as e:
        app_logger.error(f"系统健康检查失败: {e}")
        return {"status": "unhealthy", "error": str(e)}