from typing import Dict, Any, Optional, List
from datetime import datetime, timedelta
import asyncio
import json
from dataclasses import dataclass, asdict
import redis.asyncio as redis
from celery import Celery
from celery.result import AsyncResult
import uuid

from utils.logger import Logger
from utils.exceptions import TaskQueueError, TimeoutError

logger = get_logger("celery_manager")


@dataclass
class TaskConfig:
    """任务配置"""
    task_name: str
    queue_name: str
    priority: int = 5
    max_retries: int = 3
    timeout: int = 300  # 5分钟
    rate_limit: Optional[str] = None  # 例如 "10/m" 表示每分钟10次


class CeleryManager:
    """Celery分布式任务管理器"""

    def __init__(self, redis_url: str = "redis://192.168.64.100:6379/0"):
        self.redis_url = redis_url
        self.celery_app = None
        self.redis_client = None
        self.task_configs = {}
        self._initialize_celery()
        self._initialize_redis()

    def _initialize_celery(self):
        """初始化Celery应用"""
        try:
            self.celery_app = Celery(
                "maess_flow",
                broker=self.redis_url,
                backend=self.redis_url,
                include=[
                    "tasks.text_retrieval_tasks",
                    "tasks.doc_segment_tasks",
                    "tasks.multi_modal_tasks",
                    "tasks.validation_tasks",
                    "tasks.integration_tasks",
                    "tasks.crawler_tasks"
                ]
            )

            # 配置Celery
            self.celery_app.conf.update(
                task_serializer="json",
                accept_content=["json"],
                result_serializer="json",
                timezone="Asia/Shanghai",
                enable_utc=True,
                task_track_started=True,
                task_time_limit=600,  # 10分钟
                task_soft_time_limit=300,  # 5分钟软限制
                worker_prefetch_multiplier=1,
                worker_max_tasks_per_child=1000,
                result_expires=3600,  # 结果过期时间1小时
                beat_schedule={
                    "cleanup-expired-results": {
                        "task": "celery.backend_cleanup",
                        "schedule": 3600.0,  # 每小时清理一次
                    },
                }
            )

            logger.info("Celery应用初始化完成")
            
        except Exception as e:
            logger.error(f"Celery初始化失败: {str(e)}")
            raise TaskQueueError(f"Celery初始化失败: {str(e)}")

    async def _initialize_redis(self):
        """初始化Redis连接"""
        try:
            self.redis_client = redis.from_url(
                self.redis_url,
                encoding="utf-8",
                decode_responses=True
            )
            
            # 测试连接
            await self.redis_client.ping()
            logger.info("Redis连接初始化完成")
            
        except Exception as e:
            logger.error(f"Redis连接失败: {str(e)}")
            raise TaskQueueError(f"Redis连接失败: {str(e)}")

    def register_task_config(self, task_config: TaskConfig):
        """注册任务配置"""
        self.task_configs[task_config.task_name] = task_config
        logger.info(f"任务配置注册完成: {task_config.task_name}")

    async def submit_task(self, task_name: str, task_data: Dict[str, Any], 
                       priority: int = 5, delay: Optional[int] = None) -> str:
        """提交任务到队列"""
        try:
            task_id = str(uuid.uuid4())
            
            # 获取任务配置
            task_config = self.task_configs.get(task_name)
            if not task_config:
                task_config = TaskConfig(
                    task_name=task_name,
                    queue_name="default",
                    priority=priority
                )

            # 准备任务数据
            task_payload = {
                "task_id": task_id,
                "task_name": task_name,
                "data": task_data,
                "priority": priority,
                "created_at": datetime.now().isoformat(),
                "max_retries": task_config.max_retries,
                "timeout": task_config.timeout
            }

            # 提交任务
            if delay:
                # 延迟任务
                result = self.celery_app.send_task(
                    task_name,
                    args=[task_payload],
                    countdown=delay,
                    queue=task_config.queue_name,
                    priority=priority
                )
            else:
                # 立即执行
                result = self.celery_app.send_task(
                    task_name,
                    args=[task_payload],
                    queue=task_config.queue_name,
                    priority=priority
                )

            logger.info(f"任务提交完成: task_id={task_id}, task_name={task_name}")
            return task_id
            
        except Exception as e:
            logger.error(f"任务提交失败: {str(e)}")
            raise TaskQueueError(f"任务提交失败: {str(e)}")

    async def get_task_status(self, task_id: str) -> Dict[str, Any]:
        """获取任务状态"""
        try:
            result = AsyncResult(task_id, app=self.celery_app)
            
            status_info = {
                "task_id": task_id,
                "status": result.status,
                "ready": result.ready(),
                "successful": result.successful() if result.ready() else None,
                "failed": result.failed() if result.ready() else None,
                "result": result.result if result.ready() and result.successful() else None,
                "error": str(result.result) if result.ready() and result.failed() else None,
                "traceback": result.traceback if result.failed() else None
            }

            # 获取任务元数据
            try:
                task_meta = await self.redis_client.hget(f"task:{task_id}", "metadata")
                if task_meta:
                    status_info["metadata"] = json.loads(task_meta)
            except Exception as e:
                logger.warning(f"获取任务元数据失败: {str(e)}")

            return status_info
            
        except Exception as e:
            logger.error(f"获取任务状态失败: {str(e)}")
            raise TaskQueueError(f"获取任务状态失败: {str(e)}")

    async def wait_for_task(self, task_id: str, timeout: int = 300) -> Dict[str, Any]:
        """等待任务完成"""
        try:
            start_time = datetime.now()
            
            while True:
                status = await self.get_task_status(task_id)
                
                if status["ready"]:
                    return status
                
                # 检查超时
                elapsed = (datetime.now() - start_time).total_seconds()
                if elapsed > timeout:
                    raise TimeoutError(f"任务等待超时: task_id={task_id}")
                
                # 等待1秒后重试
                await asyncio.sleep(1)
                
        except TimeoutError:
            raise
        except Exception as e:
            logger.error(f"等待任务失败: {str(e)}")
            raise TaskQueueError(f"等待任务失败: {str(e)}")

    async def cancel_task(self, task_id: str) -> bool:
        """取消任务"""
        try:
            result = AsyncResult(task_id, app=self.celery_app)
            
            if not result.ready():
                result.revoke(terminate=True)
                logger.info(f"任务取消完成: task_id={task_id}")
                return True
            else:
                logger.warning(f"任务已完成，无法取消: task_id={task_id}")
                return False
                
        except Exception as e:
            logger.error(f"取消任务失败: {str(e)}")
            raise TaskQueueError(f"取消任务失败: {str(e)}")

    async def get_queue_stats(self, queue_name: str = "default") -> Dict[str, Any]:
        """获取队列统计信息"""
        try:
            # 使用Redis命令获取队列长度
            queue_key = f"celery@{queue_name}"
            queue_length = await self.redis_client.llen(queue_key)
            
            # 获取活跃任务数
            active_tasks = await self.redis_client.scard("celery@active")
            
            # 获取已处理任务数
            processed_tasks = await self.redis_client.get("celery@processed") or 0
            
            return {
                "queue_name": queue_name,
                "queue_length": queue_length,
                "active_tasks": active_tasks,
                "processed_tasks": int(processed_tasks),
                "timestamp": datetime.now().isoformat()
            }
            
        except Exception as e:
            logger.error(f"获取队列统计失败: {str(e)}")
            raise TaskQueueError(f"获取队列统计失败: {str(e)}")

    async def cleanup_expired_tasks(self, max_age_hours: int = 24) -> int:
        """清理过期任务"""
        try:
            cutoff_time = datetime.now() - timedelta(hours=max_age_hours)
            
            # 扫描并清理过期任务
            cleaned_count = 0
            cursor = 0
            
            while True:
                cursor, keys = await self.redis_client.scan(
                    cursor, match="celery-task-meta-*", count=100
                )
                
                for key in keys:
                    try:
                        # 获取任务元数据
                        task_data = await self.redis_client.get(key)
                        if task_data:
                            import json
                            task_info = json.loads(task_data)
                            task_time = datetime.fromisoformat(task_info.get("date_done", ""))
                            
                            if task_time < cutoff_time:
                                await self.redis_client.delete(key)
                                cleaned_count += 1
                    except Exception as e:
                        logger.warning(f"清理任务失败: {key}, 错误: {str(e)}")
                
                if cursor == 0:
                    break
            
            logger.info(f"过期任务清理完成: 清理了 {cleaned_count} 个任务")
            return cleaned_count
            
        except Exception as e:
            logger.error(f"清理过期任务失败: {str(e)}")
            raise TaskQueueError(f"清理过期任务失败: {str(e)}")

    async def health_check(self) -> Dict[str, Any]:
        """健康检查"""
        try:
            # 检查Redis连接
            redis_ok = await self.redis_client.ping()
            
            # 检查Celery连接
            celery_ok = False
            try:
                # 尝试获取Celery统计信息
                stats = self.celery_app.control.inspect().stats()
                celery_ok = stats is not None
            except Exception:
                celery_ok = False
            
            # 获取队列统计
            queue_stats = await self.get_queue_stats()
            
            return {
                "status": "healthy" if redis_ok and celery_ok else "unhealthy",
                "redis": {"status": "connected" if redis_ok else "disconnected"},
                "celery": {"status": "connected" if celery_ok else "disconnected"},
                "queues": queue_stats,
                "timestamp": datetime.now().isoformat()
            }
            
        except Exception as e:
            logger.error(f"健康检查失败: {str(e)}")
            return {
                "status": "error",
                "error": str(e),
                "timestamp": datetime.now().isoformat()
            }

    def get_celery_app(self) -> Celery:
        """获取Celery应用实例"""
        return self.celery_app

    async def close(self):
        """关闭连接"""
        try:
            if self.redis_client:
                await self.redis_client.close()
                logger.info("Redis连接已关闭")
        except Exception as e:
            logger.error(f"关闭连接失败: {str(e)}")


# 全局Celery管理器实例
celery_manager = None


async def get_celery_manager() -> CeleryManager:
    """获取Celery管理器实例"""
    global celery_manager
    if celery_manager is None:
        celery_manager = CeleryManager()
        
        # 注册默认任务配置
        default_configs = [
            TaskConfig("text_retrieval", "text_queue", priority=8, max_retries=2),
            TaskConfig("doc_segment", "doc_queue", priority=7, max_retries=3),
            TaskConfig("multi_modal", "multi_queue", priority=6, max_retries=2),
            TaskConfig("validation", "validation_queue", priority=9, max_retries=1),
            TaskConfig("integration", "integration_queue", priority=10, max_retries=2),
            TaskConfig("crawler", "crawler_queue", priority=5, max_retries=3),
        ]
        
        for config in default_configs:
            celery_manager.register_task_config(config)
    
    return celery_manager