"""
Enhanced logging configuration for InCloud MCP Server
"""
import logging
import logging.handlers
import os
import sys
from datetime import datetime
from typing import Optional


class ColoredFormatter(logging.Formatter):
    """Colored formatter for console output"""
    
    # ANSI color codes
    COLORS = {
        'DEBUG': '\033[36m',    # Cyan
        'INFO': '\033[32m',     # Green
        'WARNING': '\033[33m',  # Yellow
        'ERROR': '\033[31m',    # Red
        'CRITICAL': '\033[35m', # Magenta
        'RESET': '\033[0m'      # Reset
    }
    
    def format(self, record):
        # Add color to level name
        if record.levelname in self.COLORS:
            record.levelname = f"{self.COLORS[record.levelname]}{record.levelname}{self.COLORS['RESET']}"
        
        return super().format(record)


def setup_logging(
    log_level: str = "INFO",
    log_file: Optional[str] = None,
    log_dir: str = "logs",
    max_file_size: int = 10 * 1024 * 1024,  # 10MB
    backup_count: int = 5,
    console_output: bool = True
) -> None:
    """
    Setup enhanced logging configuration
    
    Args:
        log_level: Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)
        log_file: Log file name (if None, auto-generate based on timestamp)
        log_dir: Directory to store log files
        max_file_size: Maximum size of each log file in bytes
        backup_count: Number of backup files to keep
        console_output: Whether to output logs to console
    """
    
    # Create logs directory if it doesn't exist
    if not os.path.exists(log_dir):
        os.makedirs(log_dir)
    
    # Generate log file name if not provided
    if log_file is None:
        timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
        log_file = f"incloud_mcp_server_{timestamp}.log"
    
    log_file_path = os.path.join(log_dir, log_file)
    
    # Configure root logger
    root_logger = logging.getLogger()
    root_logger.setLevel(getattr(logging, log_level.upper()))
    
    # Clear existing handlers
    root_logger.handlers.clear()
    
    # Enhanced format with file and line number
    detailed_format = (
        '%(asctime)s - %(name)s - %(levelname)s - '
        '[%(filename)s:%(lineno)d] - %(funcName)s() - %(message)s'
    )
    
    simple_format = (
        '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
    )
    
    # File handler with rotation
    file_handler = logging.handlers.RotatingFileHandler(
        log_file_path,
        maxBytes=max_file_size,
        backupCount=backup_count,
        encoding='utf-8'
    )
    file_handler.setLevel(getattr(logging, log_level.upper()))
    file_formatter = logging.Formatter(detailed_format)
    file_handler.setFormatter(file_formatter)
    root_logger.addHandler(file_handler)
    
    # Console handler (optional)
    if console_output:
        console_handler = logging.StreamHandler(sys.stdout)
        console_handler.setLevel(getattr(logging, log_level.upper()))
        
        # Use colored formatter for console
        console_formatter = ColoredFormatter(simple_format)
        console_handler.setFormatter(console_formatter)
        root_logger.addHandler(console_handler)
    
    # Log the setup
    logger = logging.getLogger(__name__)
    logger.info(f"Logging configured - Level: {log_level}, File: {log_file_path}")
    logger.info(f"Log rotation: {max_file_size // (1024*1024)}MB per file, {backup_count} backups")
    
    return log_file_path


def get_logger(name: str) -> logging.Logger:
    """
    Get a logger with the specified name
    
    Args:
        name: Logger name (usually __name__)
        
    Returns:
        Configured logger instance
    """
    return logging.getLogger(name)


def log_function_call(func):
    """
    Decorator to log function calls with parameters and return values
    
    Usage:
        @log_function_call
        def my_function(param1, param2):
            return result
    """
    def wrapper(*args, **kwargs):
        logger = logging.getLogger(func.__module__)
        
        # Log function entry
        args_str = ', '.join([str(arg) for arg in args])
        kwargs_str = ', '.join([f"{k}={v}" for k, v in kwargs.items()])
        params_str = ', '.join(filter(None, [args_str, kwargs_str]))
        
        logger.debug(f"Entering {func.__name__}({params_str})")
        
        try:
            result = func(*args, **kwargs)
            logger.debug(f"Exiting {func.__name__}() -> {result}")
            return result
        except Exception as e:
            logger.error(f"Exception in {func.__name__}(): {e}")
            raise
    
    return wrapper


def log_api_request(url: str, method: str, status_code: Optional[int] = None, 
                   response_time: Optional[float] = None, error: Optional[str] = None):
    """
    Log API request details
    
    Args:
        url: Request URL
        method: HTTP method
        status_code: Response status code
        response_time: Response time in seconds
        error: Error message if request failed
    """
    logger = logging.getLogger("api_requests")
    
    if error:
        logger.error(f"{method} {url} - ERROR: {error}")
    elif status_code:
        level = logging.INFO if 200 <= status_code < 400 else logging.WARNING
        time_str = f" ({response_time:.3f}s)" if response_time else ""
        logger.log(level, f"{method} {url} - {status_code}{time_str}")
    else:
        logger.info(f"{method} {url} - Request started")


def log_swagger_discovery(service_name: str, urls_tried: list, success_url: Optional[str] = None, 
                         error: Optional[str] = None):
    """
    Log Swagger API discovery attempts
    
    Args:
        service_name: Name of the service
        urls_tried: List of URLs that were attempted
        success_url: URL that succeeded (if any)
        error: Error message if all attempts failed
    """
    logger = logging.getLogger("swagger_discovery")
    
    logger.info(f"Discovering Swagger docs for service: {service_name}")
    
    for i, url in enumerate(urls_tried, 1):
        if url == success_url:
            logger.info(f"  {i}. {url} - ✅ SUCCESS")
        else:
            logger.debug(f"  {i}. {url} - ❌ Failed")
    
    if success_url:
        logger.info(f"Successfully found Swagger docs for {service_name} at: {success_url}")
    else:
        logger.error(f"Failed to find Swagger docs for {service_name}: {error}")
        logger.error(f"Tried {len(urls_tried)} URLs: {urls_tried}")


# Configure some specific loggers to reduce noise
def configure_third_party_loggers():
    """Configure third-party library loggers to reduce noise"""
    
    # Reduce urllib3 logging
    logging.getLogger("urllib3.connectionpool").setLevel(logging.WARNING)
    
    # Reduce requests logging
    logging.getLogger("requests.packages.urllib3").setLevel(logging.WARNING)
    
    # Reduce other noisy loggers
    logging.getLogger("asyncio").setLevel(logging.WARNING)
