"""
Message Queue Utilities
=======================

Message queue implementation for asynchronous task processing.
Supports Redis as the message broker with task routing and retry logic.
"""

import json
import logging
import time
import uuid
from typing import Any, Callable, Dict, List, Optional, Union
from datetime import datetime, timedelta
from enum import Enum
import pickle
import redis
from celery import Celery
from celery.result import AsyncResult

# Initialize logger
logger = logging.getLogger(__name__)

class TaskStatus(Enum):
    """Task execution status enumeration."""
    PENDING = "pending"
    PROCESSING = "processing"
    SUCCESS = "success"
    FAILURE = "failure"
    RETRY = "retry"

class MessageQueueManager:
    """
    Message queue manager for asynchronous task processing.
    
    Provides a unified interface for:
    - Task creation and execution
    - Queue management
    - Task status tracking
    - Retry mechanism
    - Result handling
    """
    
    def __init__(self, broker_url: str, result_backend: str, 
                 task_serializer: str = 'json', accept_content: List[str] = None):
        """
        Initialize message queue manager.
        
        Args:
            broker_url: Message broker URL (Redis)
            result_backend: Result backend URL
            task_serializer: Task serialization format
            accept_content: Accepted content types
        """
        self.broker_url = broker_url
        self.result_backend = result_backend
        
        # Initialize Celery app
        self.celery_app = Celery(
            'flask_enterprise',
            broker=broker_url,
            backend=result_backend,
            include=['app.utils.message_queue']
        )
        
        # Configure Celery
        self.celery_app.conf.update(
            task_serializer=task_serializer,
            accept_content=accept_content or ['json'],
            result_serializer='json',
            timezone='UTC',
            enable_utc=True,
            task_track_started=True,
            task_time_limit=30 * 60,  # 30 minutes
            task_soft_time_limit=25 * 60,  # 25 minutes
            worker_prefetch_multiplier=1,
            worker_max_tasks_per_child=1000,
            result_expires=3600,  # 1 hour
            beat_schedule={
                'cleanup-task-results': {
                    'task': 'app.utils.message_queue.cleanup_task_results',
                    'schedule': timedelta(hours=1),
                },
            }
        )
        
        logger.info(f"Message queue initialized with broker: {broker_url}")
    
    def create_task(self, func: Callable, *args, **kwargs) -> str:
        """
        Create a new asynchronous task.
        
        Args:
            func: Function to execute
            *args: Function arguments
            **kwargs: Function keyword arguments
        
        Returns:
            str: Task ID
        """
        try:
            # Create task with Celery
            task = self.celery_app.send_task(
                f"{func.__module__}.{func.__name__}",
                args=args,
                kwargs=kwargs
            )
            
            logger.info(f"Created task {task.id} for function {func.__name__}")
            return task.id
        except Exception as e:
            logger.error(f"Error creating task: {str(e)}")
            raise
    
    def get_task_status(self, task_id: str) -> TaskStatus:
        """
        Get the status of a task.
        
        Args:
            task_id: Task ID
        
        Returns:
            TaskStatus: Current task status
        """
        try:
            result = AsyncResult(task_id, app=self.celery_app)
            
            if result.state == 'PENDING':
                return TaskStatus.PENDING
            elif result.state == 'STARTED':
                return TaskStatus.PROCESSING
            elif result.state == 'SUCCESS':
                return TaskStatus.SUCCESS
            elif result.state == 'FAILURE':
                return TaskStatus.FAILURE
            elif result.state == 'RETRY':
                return TaskStatus.RETRY
            else:
                return TaskStatus.PENDING
        except Exception as e:
            logger.error(f"Error getting task status for {task_id}: {str(e)}")
            return TaskStatus.FAILURE
    
    def get_task_result(self, task_id: str) -> Any:
        """
        Get the result of a completed task.
        
        Args:
            task_id: Task ID
        
        Returns:
            Task result or None if not available
        """
        try:
            result = AsyncResult(task_id, app=self.celery_app)
            
            if result.ready():
                if result.successful():
                    return result.result
                else:
                    return result.traceback
            return None
        except Exception as e:
            logger.error(f"Error getting task result for {task_id}: {str(e)}")
            return None
    
    def revoke_task(self, task_id: str, terminate: bool = False) -> bool:
        """
        Revoke/cancel a task.
        
        Args:
            task_id: Task ID
            terminate: Whether to terminate if running
        
        Returns:
            bool: True if successful
        """
        try:
            self.celery_app.control.revoke(task_id, terminate=terminate)
            logger.info(f"Revoked task {task_id}")
            return True
        except Exception as e:
            logger.error(f"Error revoking task {task_id}: {str(e)}")
            return False
    
    def get_active_tasks(self) -> List[Dict[str, Any]]:
        """
        Get all active tasks.
        
        Returns:
            List of active task information
        """
        try:
            inspect = self.celery_app.control.inspect()
            active_tasks = inspect.active()
            
            tasks = []
            if active_tasks:
                for worker, task_list in active_tasks.items():
                    for task in task_list:
                        tasks.append({
                            'id': task['id'],
                            'name': task['name'],
                            'worker': worker,
                            'args': task.get('args', []),
                            'kwargs': task.get('kwargs', {})
                        })
            
            return tasks
        except Exception as e:
            logger.error(f"Error getting active tasks: {str(e)}")
            return []
    
    def get_queue_stats(self) -> Dict[str, Any]:
        """
        Get queue statistics.
        
        Returns:
            Dictionary with queue statistics
        """
        try:
            inspect = self.celery_app.control.inspect()
            
            stats = {
                'active_tasks': len(self.get_active_tasks()),
                'scheduled_tasks': 0,
                'reserved_tasks': 0,
                'workers': 0
            }
            
            # Get scheduled tasks
            scheduled = inspect.scheduled()
            if scheduled:
                for worker, tasks in scheduled.items():
                    stats['scheduled_tasks'] += len(tasks)
            
            # Get reserved tasks
            reserved = inspect.reserved()
            if reserved:
                for worker, tasks in reserved.items():
                    stats['reserved_tasks'] += len(tasks)
            
            # Get worker count
            workers = inspect.ping()
            if workers:
                stats['workers'] = len(workers)
            
            return stats
        except Exception as e:
            logger.error(f"Error getting queue stats: {str(e)}")
            return {}

class TaskRouter:
    """
    Task router for routing tasks to appropriate queues.
    """
    
    def __init__(self):
        self.routes = {
            'email': 'email_queue',
            'notification': 'notification_queue',
            'data_processing': 'data_queue',
            'image_processing': 'media_queue',
            'default': 'default_queue'
        }
    
    def route_task(self, task_name: str, args: tuple, kwargs: dict) -> str:
        """
        Determine which queue a task should be routed to.
        
        Args:
            task_name: Name of the task
            args: Task arguments
            kwargs: Task keyword arguments
        
        Returns:
            str: Queue name
        """
        # Route based on task name patterns
        for pattern, queue in self.routes.items():
            if pattern in task_name.lower():
                return queue
        
        return self.routes['default']

class RetryHandler:
    """
    Task retry handler with exponential backoff.
    """
    
    def __init__(self, max_retries: int = 3, base_delay: int = 1):
        """
        Initialize retry handler.
        
        Args:
            max_retries: Maximum number of retries
            base_delay: Base delay in seconds
        """
        self.max_retries = max_retries
        self.base_delay = base_delay
    
    def calculate_delay(self, retry_count: int) -> int:
        """
        Calculate delay for retry using exponential backoff.
        
        Args:
            retry_count: Current retry count
        
        Returns:
            int: Delay in seconds
        """
        return self.base_delay * (2 ** retry_count)
    
    def should_retry(self, retry_count: int, exception: Exception) -> bool:
        """
        Determine if a task should be retried.
        
        Args:
            retry_count: Current retry count
            exception: Exception that occurred
        
        Returns:
            bool: True if should retry
        """
        return retry_count < self.max_retries and not isinstance(exception, (ValueError, TypeError))

# Celery task decorators and functions
@celery_app.task(bind=True, max_retries=3)
def send_email_task(self, to_email: str, subject: str, body: str, 
                   email_type: str = 'general') -> Dict[str, Any]:
    """
    Send email task with retry logic.
    
    Args:
        to_email: Recipient email address
        subject: Email subject
        body: Email body
        email_type: Type of email
    
    Returns:
        Dict with task result
    """
    try:
        # Simulate email sending (replace with actual email service)
        logger.info(f"Sending {email_type} email to {to_email}")
        
        # Add your email sending logic here
        # For example: mail.send(to_email, subject, body)
        
        return {
            'status': 'success',
            'message': f'Email sent to {to_email}',
            'timestamp': datetime.utcnow().isoformat()
        }
    
    except Exception as e:
        logger.error(f"Error sending email: {str(e)}")
        
        # Retry with exponential backoff
        retry_count = self.request.retries
        delay = 60 * (2 ** retry_count)  # 1, 2, 4 minutes
        
        if retry_count < 3:
            raise self.retry(exc=e, countdown=delay)
        else:
            return {
                'status': 'failed',
                'error': str(e),
                'timestamp': datetime.utcnow().isoformat()
            }

@celery_app.task(bind=True, max_retries=2)
def process_data_task(self, data: Dict[str, Any], operation: str) -> Dict[str, Any]:
    """
    Process data task.
    
    Args:
        data: Data to process
        operation: Processing operation type
    
    Returns:
        Dict with processing result
    """
    try:
        logger.info(f"Processing data with operation: {operation}")
        
        # Simulate data processing
        if operation == 'transform':
            processed_data = {k.upper(): v for k, v in data.items()}
        elif operation == 'filter':
            processed_data = {k: v for k, v in data.items() if v is not None}
        else:
            processed_data = data
        
        return {
            'status': 'success',
            'processed_data': processed_data,
            'timestamp': datetime.utcnow().isoformat()
        }
    
    except Exception as e:
        logger.error(f"Error processing data: {str(e)}")
        
        if self.request.retries < 2:
            raise self.retry(exc=e, countdown=30)
        else:
            return {
                'status': 'failed',
                'error': str(e),
                'timestamp': datetime.utcnow().isoformat()
            }

@celery_app.task(bind=True)
def cleanup_task_results(self):
    """
    Periodic task to clean up old task results.
    """
    try:
        from celery.result import ResultSet
        
        # Clean up results older than 24 hours
        cutoff_time = datetime.utcnow() - timedelta(hours=24)
        
        # This is a simplified cleanup - in production, you might want
        # to use a more sophisticated cleanup strategy
        logger.info("Cleaning up old task results")
        
        return {
            'status': 'success',
            'message': 'Task results cleanup completed',
            'timestamp': datetime.utcnow().isoformat()
        }
    
    except Exception as e:
        logger.error(f"Error during cleanup: {str(e)}")
        return {
            'status': 'failed',
            'error': str(e),
            'timestamp': datetime.utcnow().isoformat()
        }

# Convenience functions for common tasks
def send_email_async(to_email: str, subject: str, body: str, 
                    email_type: str = 'general') -> str:
    """
    Send email asynchronously.
    
    Args:
        to_email: Recipient email
        subject: Email subject
        body: Email body
        email_type: Email type
    
    Returns:
        str: Task ID
    """
    return send_email_task.delay(to_email, subject, body, email_type).id

def process_data_async(data: Dict[str, Any], operation: str) -> str:
    """
    Process data asynchronously.
    
    Args:
        data: Data to process
        operation: Processing operation
    
    Returns:
        str: Task ID
    """
    return process_data_task.delay(data, operation).id