"""
异步任务队列管理器
"""

import asyncio
import json
import pickle
import time
from typing import Dict, List, Any, Optional, Callable, Union
from dataclasses import dataclass, field, asdict
from enum import Enum
from datetime import datetime, timedelta
from collections import deque
import uuid
import threading
import weakref

from ...core.exceptions import BusinessLogicError, ValidationError
from ...core.logging import get_logger, audit_logger
from ...core.config import settings
from ...services.cache import cache_service

logger = get_logger(__name__)


class TaskStatus(Enum):
    """任务状态"""
    PENDING = "pending"        # 等待中
    QUEUED = "queued"         # 已排队
    RUNNING = "running"       # 运行中
    SUCCESS = "success"       # 成功
    FAILED = "failed"         # 失败
    CANCELLED = "cancelled"     # 已取消
    RETRY = "retry"           # 重试中


class TaskPriority(Enum):
    """任务优先级"""
    LOW = 1
    NORMAL = 2
    HIGH = 3
    URGENT = 4


@dataclass
class Task:
    """任务对象"""
    id: str
    name: str
    func: Callable
    args: tuple = field(default_factory=tuple)
    kwargs: dict = field(default_factory=dict)
    priority: TaskPriority = TaskPriority.NORMAL
    queue_name: str = "default"
    timeout: Optional[float] = None
    max_retries: int = 3
    retry_delay: float = 60.0
    
    # 运行时信息
    status: TaskStatus = TaskStatus.PENDING
    created_at: datetime = field(default_factory=datetime.utcnow)
    queued_at: Optional[datetime] = None
    started_at: Optional[datetime] = None
    completed_at: Optional[datetime] = None
    
    # 重试信息
    retry_count: int = 0
    last_error: Optional[str] = None
    last_traceback: Optional[str] = None
    
    # 结果信息
    result: Any = None
    progress: float = 0.0
    metadata: Dict[str, Any] = field(default_factory=dict)
    
    def __post_init__(self):
        if not self.id:
            self.id = str(uuid.uuid4())


@dataclass
class QueueConfig:
    """队列配置"""
    name: str
    max_workers: int = 4
    max_queue_size: int = 1000
    worker_timeout: float = 300.0
    task_timeout: float = 3600.0
    retry_delay: float = 60.0
    max_retries: int = 3
    dead_letter_queue: str = None


class TaskQueue:
    """任务队列"""
    
    def __init__(self, config: QueueConfig):
        self.config = config
        
        # 队列存储
        self.pending_queue = deque()
        self.running_tasks: Dict[str, Task] = {}
        self.completed_tasks: Dict[str, Task] = {}
        self.failed_tasks: Dict[str, Task] = {}
        
        # 工作进程
        self.workers: List[asyncio.Task] = []
        self.is_running = False
        
        # 统计信息
        self.stats = {
            "total_tasks": 0,
            "completed_tasks": 0,
            "failed_tasks": 0,
            "retried_tasks": 0,
            "average_execution_time": 0.0,
            "queue_size": 0,
            "active_workers": 0
        }
        
        # 回调函数
        self.task_callbacks: Dict[TaskStatus, List[Callable]] = {
            status: [] for status in TaskStatus
        }
        
        logger.info(f"TaskQueue '{config.name}' initialized")
    
    async def start(self):
        """启动队列"""
        if self.is_running:
            return
        
        self.is_running = True
        
        # 启动工作进程
        for i in range(self.config.max_workers):
            worker = asyncio.create_task(self._worker(f"{self.config.name}-worker-{i}"))
            self.workers.append(worker)
        
        logger.info(f"TaskQueue '{self.config.name}' started with {len(self.workers)} workers")
    
    async def stop(self):
        """停止队列"""
        if not self.is_running:
            return
        
        self.is_running = False
        
        # 取消所有工作进程
        for worker in self.workers:
            worker.cancel()
        
        # 等待工作进程结束
        await asyncio.gather(*self.workers, return_exceptions=True)
        
        self.workers.clear()
        
        logger.info(f"TaskQueue '{self.config.name}' stopped")
    
    async def enqueue(
        self,
        func: Callable,
        args: tuple = (),
        kwargs: dict = None,
        priority: TaskPriority = TaskPriority.NORMAL,
        timeout: Optional[float] = None,
        max_retries: Optional[int] = None,
        retry_delay: Optional[float] = None,
        task_id: Optional[str] = None,
        metadata: Optional[Dict[str, Any]] = None
    ) -> Task:
        """添加任务到队列"""
        if len(self.pending_queue) >= self.config.max_queue_size:
            raise ValidationError(f"Queue '{self.config.name}' is full")
        
        # 创建任务
        task = Task(
            id=task_id or str(uuid.uuid4()),
            name=func.__name__,
            func=func,
            args=args,
            kwargs=kwargs or {},
            priority=priority,
            queue_name=self.config.name,
            timeout=timeout or self.config.task_timeout,
            max_retries=max_retries or self.config.max_retries,
            retry_delay=retry_delay or self.config.retry_delay,
            metadata=metadata or {}
        )
        
        # 添加到队列
        self._add_to_queue(task)
        
        # 更新统计
        self.stats["total_tasks"] += 1
        self.stats["queue_size"] = len(self.pending_queue)
        
        logger.info(f"Task {task.id} enqueued to queue '{self.config.name}'")
        return task
    
    async def get_task(self, task_id: str) -> Optional[Task]:
        """获取任务"""
        # 在各个状态字典中查找
        for task_dict in [self.running_tasks, self.completed_tasks, self.failed_tasks]:
            if task_id in task_dict:
                return task_dict[task_id]
        
        return None
    
    async def cancel_task(self, task_id: str) -> bool:
        """取消任务"""
        # 从待执行队列中移除
        for i, task in enumerate(self.pending_queue):
            if task.id == task_id:
                task.status = TaskStatus.CANCELLED
                self.pending_queue.remove(task)
                self._notify_status_change(task)
                logger.info(f"Task {task_id} cancelled")
                return True
        
        # 如果正在运行，标记为取消
        if task_id in self.running_tasks:
            task = self.running_tasks[task_id]
            task.status = TaskStatus.CANCELLED
            self._notify_status_change(task)
            logger.info(f"Task {task_id} marked for cancellation")
            return True
        
        return False
    
    async def retry_task(self, task_id: str) -> bool:
        """重试任务"""
        task = await self.get_task(task_id)
        if not task:
            return False
        
        if task.status != TaskStatus.FAILED:
            return False
        
        if task.retry_count >= task.max_retries:
            return False
        
        # 重置任务状态
        task.status = TaskStatus.PENDING
        task.retry_count += 1
        task.last_error = None
        task.last_traceback = None
        task.progress = 0.0
        
        # 重新加入队列
        self._add_to_queue(task)
        
        logger.info(f"Task {task_id} queued for retry ({task.retry_count}/{task.max_retries})")
        return True
    
    def _add_to_queue(self, task: Task):
        """添加任务到队列（按优先级）"""
        task.status = TaskStatus.QUEUED
        task.queued_at = datetime.utcnow()
        
        # 按优先级插入
        inserted = False
        for i, queued_task in enumerate(self.pending_queue):
            if task.priority.value > queued_task.priority.value:
                self.pending_queue.insert(i, task)
                inserted = True
                break
        
        if not inserted:
            self.pending_queue.append(task)
        
        self._notify_status_change(task)
    
    async def _worker(self, worker_name: str):
        """工作进程"""
        logger.info(f"Worker {worker_name} started")
        
        while self.is_running:
            try:
                # 获取任务
                task = await self._get_next_task()
                if not task:
                    await asyncio.sleep(0.1)
                    continue
                
                # 执行任务
                await self._execute_task(task, worker_name)
                
            except asyncio.CancelledError:
                break
            except Exception as e:
                logger.error(f"Worker {worker_name} error: {e}")
                await asyncio.sleep(1.0)
        
        logger.info(f"Worker {worker_name} stopped")
    
    async def _get_next_task(self) -> Optional[Task]:
        """获取下一个任务"""
        while self.pending_queue and self.is_running:
            # 检查是否超过最大并发数
            if len(self.running_tasks) >= self.config.max_workers:
                break
            
            task = self.pending_queue.popleft()
            
            # 检查任务是否被取消
            if task.status == TaskStatus.CANCELLED:
                continue
            
            return task
        
        return None
    
    async def _execute_task(self, task: Task, worker_name: str):
        """执行任务"""
        start_time = time.time()
        
        # 更新任务状态
        task.status = TaskStatus.RUNNING
        task.started_at = datetime.utcnow()
        self.running_tasks[task.id] = task
        
        # 更新统计
        self.stats["queue_size"] = len(self.pending_queue)
        self.stats["active_workers"] = len(self.running_tasks)
        
        self._notify_status_change(task)
        
        try:
            # 执行任务
            if asyncio.iscoroutinefunction(task.func):
                result = await asyncio.wait_for(
                    task.func(*task.args, **task.kwargs),
                    timeout=task.timeout
                )
            else:
                # 在线程池中执行同步函数
                result = await asyncio.wait_for(
                    asyncio.to_thread(task.func, *task.args, **task.kwargs),
                    timeout=task.timeout
                )
            
            # 任务成功
            task.result = result
            task.status = TaskStatus.SUCCESS
            task.completed_at = datetime.utcnow()
            task.progress = 1.0
            
            # 移到完成列表
            self.completed_tasks[task.id] = task
            del self.running_tasks[task.id]
            
            # 更新统计
            self.stats["completed_tasks"] += 1
            execution_time = time.time() - start_time
            self._update_execution_time(execution_time)
            
            logger.info(f"Task {task.id} completed successfully in {execution_time:.2f}s")
            
        except asyncio.TimeoutError:
            # 任务超时
            task.last_error = f"Task timeout after {task.timeout}s"
            await self._handle_task_failure(task)
            
        except Exception as e:
            # 任务失败
            task.last_error = str(e)
            task.last_traceback = self._get_traceback()
            await self._handle_task_failure(task)
            
        finally:
            # 更新统计
            self.stats["active_workers"] = len(self.running_tasks)
            
            # 通知状态变化
            self._notify_status_change(task)
    
    async def _handle_task_failure(self, task: Task):
        """处理任务失败"""
        if task.retry_count < task.max_retries:
            # 安排重试
            task.status = TaskStatus.RETRY
            task.retry_count += 1
            
            # 延迟重试
            asyncio.create_task(self._schedule_retry(task))
            
            self.stats["retried_tasks"] += 1
            logger.info(f"Task {task.id} failed, scheduling retry ({task.retry_count}/{task.max_retries})")
            
        else:
            # 最终失败
            task.status = TaskStatus.FAILED
            task.completed_at = datetime.utcnow()
            
            # 移到失败列表
            self.failed_tasks[task.id] = task
            
            if task.id in self.running_tasks:
                del self.running_tasks[task.id]
            
            self.stats["failed_tasks"] += 1
            logger.error(f"Task {task.id} failed permanently: {task.last_error}")
    
    async def _schedule_retry(self, task: Task):
        """安排重试"""
        await asyncio.sleep(task.retry_delay)
        
        if task.status == TaskStatus.RETRY:
            self._add_to_queue(task)
            logger.info(f"Task {task.id} re-queued for retry")
    
    def _update_execution_time(self, execution_time: float):
        """更新平均执行时间"""
        completed = self.stats["completed_tasks"]
        if completed == 1:
            self.stats["average_execution_time"] = execution_time
        else:
            # 指数移动平均
            alpha = 0.1
            current_avg = self.stats["average_execution_time"]
            self.stats["average_execution_time"] = alpha * execution_time + (1 - alpha) * current_avg
    
    def _get_traceback(self) -> str:
        """获取异常追踪信息"""
        import traceback
        return traceback.format_exc()
    
    def _notify_status_change(self, task: Task):
        """通知状态变化"""
        callbacks = self.task_callbacks.get(task.status, [])
        for callback in callbacks:
            try:
                if asyncio.iscoroutinefunction(callback):
                    asyncio.create_task(callback(task))
                else:
                    callback(task)
            except Exception as e:
                logger.error(f"Task callback error: {e}")
    
    def add_status_callback(self, status: TaskStatus, callback: Callable):
        """添加状态回调"""
        self.task_callbacks[status].append(callback)
    
    def get_statistics(self) -> Dict[str, Any]:
        """获取统计信息"""
        self.stats.update({
            "queue_size": len(self.pending_queue),
            "active_workers": len(self.running_tasks),
            "completed_count": len(self.completed_tasks),
            "failed_count": len(self.failed_tasks)
        })
        
        return dict(self.stats)


class QueueManager:
    """队列管理器"""
    
    def __init__(self):
        self.queues: Dict[str, TaskQueue] = {}
        self.default_config = QueueConfig("default")
        
        # 全局统计
        self.global_stats = {
            "total_queues": 0,
            "active_queues": 0,
            "total_workers": 0,
            "total_pending_tasks": 0,
            "total_running_tasks": 0,
            "total_completed_tasks": 0,
            "total_failed_tasks": 0
        }
        
        logger.info("QueueManager initialized")
    
    async def create_queue(self, config: QueueConfig) -> TaskQueue:
        """创建队列"""
        if config.name in self.queues:
            raise ValidationError(f"Queue '{config.name}' already exists")
        
        queue = TaskQueue(config)
        self.queues[config.name] = queue
        
        logger.info(f"Created queue '{config.name}'")
        return queue
    
    async def get_queue(self, name: str) -> Optional[TaskQueue]:
        """获取队列"""
        return self.queues.get(name)
    
    async def start_queue(self, name: str) -> bool:
        """启动队列"""
        queue = await self.get_queue(name)
        if not queue:
            return False
        
        await queue.start()
        logger.info(f"Started queue '{name}'")
        return True
    
    async def stop_queue(self, name: str) -> bool:
        """停止队列"""
        queue = await self.get_queue(name)
        if not queue:
            return False
        
        await queue.stop()
        logger.info(f"Stopped queue '{name}'")
        return True
    
    async def start_all(self):
        """启动所有队列"""
        for queue in self.queues.values():
            await queue.start()
        
        logger.info("All queues started")
    
    async def stop_all(self):
        """停止所有队列"""
        for queue in self.queues.values():
            await queue.stop()
        
        logger.info("All queues stopped")
    
    async def enqueue_task(
        self,
        func: Callable,
        queue_name: str = "default",
        **kwargs
    ) -> Task:
        """添加任务到指定队列"""
        queue = await self.get_queue(queue_name)
        if not queue:
            # 使用默认队列
            if queue_name != "default":
                logger.warning(f"Queue '{queue_name}' not found, using default queue")
            queue = self.queues.get("default")
            
            if not queue:
                # 创建默认队列
                queue = await self.create_queue(self.default_config)
                await queue.start()
        
        return await queue.enqueue(func, queue_name=queue_name, **kwargs)
    
    async def get_task(self, task_id: str, queue_name: Optional[str] = None) -> Optional[Task]:
        """获取任务"""
        if queue_name:
            queue = await self.get_queue(queue_name)
            if queue:
                return await queue.get_task(task_id)
            return None
        
        # 在所有队列中查找
        for queue in self.queues.values():
            task = await queue.get_task(task_id)
            if task:
                return task
        
        return None
    
    async def cancel_task(self, task_id: str, queue_name: Optional[str] = None) -> bool:
        """取消任务"""
        if queue_name:
            queue = await self.get_queue(queue_name)
            if queue:
                return await queue.cancel_task(task_id)
            return False
        
        # 在所有队列中查找并取消
        for queue in self.queues.values():
            if await queue.cancel_task(task_id):
                return True
        
        return False
    
    async def get_queue_statistics(self, queue_name: str) -> Optional[Dict[str, Any]]:
        """获取队列统计"""
        queue = await self.get_queue(queue_name)
        if queue:
            return queue.get_statistics()
        return None
    
    async def get_global_statistics(self) -> Dict[str, Any]:
        """获取全局统计"""
        total_pending = 0
        total_running = 0
        total_completed = 0
        total_failed = 0
        total_workers = 0
        active_queues = 0
        
        for queue in self.queues.values():
            stats = queue.get_statistics()
            total_pending += stats.get("queue_size", 0)
            total_running += stats.get("active_workers", 0)
            total_completed += stats.get("completed_tasks", 0)
            total_failed += stats.get("failed_tasks", 0)
            total_workers += len(queue.workers)
            
            if queue.is_running:
                active_queues += 1
        
        self.global_stats.update({
            "total_queues": len(self.queues),
            "active_queues": active_queues,
            "total_workers": total_workers,
            "total_pending_tasks": total_pending,
            "total_running_tasks": total_running,
            "total_completed_tasks": total_completed,
            "total_failed_tasks": total_failed
        })
        
        return dict(self.global_stats)
    
    async def cleanup_completed_tasks(self, max_age_hours: int = 24) -> int:
        """清理已完成的任务"""
        cleaned_count = 0
        cutoff_time = datetime.utcnow() - timedelta(hours=max_age_hours)
        
        for queue in self.queues.values():
            # 清理完成的任务
            to_remove = []
            for task_id, task in queue.completed_tasks.items():
                if task.completed_at and task.completed_at < cutoff_time:
                    to_remove.append(task_id)
            
            for task_id in to_remove:
                del queue.completed_tasks[task_id]
                cleaned_count += 1
            
            # 清理失败的任务
            to_remove = []
            for task_id, task in queue.failed_tasks.items():
                if task.completed_at and task.completed_at < cutoff_time:
                    to_remove.append(task_id)
            
            for task_id in to_remove:
                del queue.failed_tasks[task_id]
                cleaned_count += 1
        
        if cleaned_count > 0:
            logger.info(f"Cleaned up {cleaned_count} old tasks")
        
        return cleaned_count


# 全局队列管理器实例
queue_manager = QueueManager()


# 便捷装饰器
def task(
    queue_name: str = "default",
    priority: TaskPriority = TaskPriority.NORMAL,
    timeout: Optional[float] = None,
    max_retries: Optional[int] = None
):
    """任务装饰器"""
    def decorator(func):
        async def wrapper(*args, **kwargs):
            return await queue_manager.enqueue_task(
                func,
                queue_name=queue_name,
                priority=priority,
                timeout=timeout,
                max_retries=max_retries,
                args=args,
                kwargs=kwargs
            )
        
        wrapper.original_func = func
        wrapper.task_config = {
            "queue_name": queue_name,
            "priority": priority,
            "timeout": timeout,
            "max_retries": max_retries
        }
        
        return wrapper
    
    return decorator


# 便捷函数
async def run_in_background(
    func: Callable,
    *args,
    queue_name: str = "default",
    **kwargs
) -> Task:
    """在后台运行任务"""
    return await queue_manager.enqueue_task(
        func,
        queue_name=queue_name,
        args=args,
        kwargs=kwargs
    )


async def schedule_task(
    func: Callable,
    delay: float,
    *args,
    queue_name: str = "default",
    **kwargs
) -> Task:
    """延迟执行任务"""
    async def delayed_task():
        await asyncio.sleep(delay)
        return await func(*args, **kwargs)
    
    return await queue_manager.enqueue_task(
        delayed_task,
        queue_name=queue_name
    )


# 初始化默认队列
async def init_default_queues():
    """初始化默认队列"""
    default_queue = await queue_manager.create_queue(QueueConfig("default"))
    await queue_manager.start_queue("default")
    
    logger.info("Default queue initialized")
