"""
Logging Configuration for ArXiv Scraper Service

Comprehensive logging setup with multiple handlers, formatters, and monitoring integration.
"""

import logging
import logging.handlers
import sys
import os
from pathlib import Path
from typing import Optional, Dict, Any
import json
from datetime import datetime, timezone


class JSONFormatter(logging.Formatter):
    """JSON formatter for structured logging."""
    
    def format(self, record):
        """Format log record as JSON."""
        log_entry = {
            'timestamp': datetime.fromtimestamp(record.created, timezone.utc).isoformat(),
            'level': record.levelname,
            'logger': record.name,
            'message': record.getMessage(),
            'module': record.module,
            'function': record.funcName,
            'line': record.lineno
        }
        
        # Add exception information if present
        if record.exc_info:
            log_entry['exception'] = self.formatException(record.exc_info)
        
        # Add extra fields if present
        for key, value in record.__dict__.items():
            if key not in ['name', 'msg', 'args', 'levelname', 'levelno', 'pathname',
                          'filename', 'module', 'exc_info', 'exc_text', 'stack_info',
                          'lineno', 'funcName', 'created', 'msecs', 'relativeCreated',
                          'thread', 'threadName', 'processName', 'process', 'message']:
                log_entry[key] = value
        
        return json.dumps(log_entry, default=str)


class ContextFilter(logging.Filter):
    """Filter to add context information to log records."""
    
    def __init__(self, context: Dict[str, Any]):
        """Initialize with context dictionary."""
        super().__init__()
        self.context = context
    
    def filter(self, record):
        """Add context to log record."""
        for key, value in self.context.items():
            setattr(record, key, value)
        return True


def setup_logging(level: str = 'INFO', 
                 log_file: Optional[str] = None,
                 json_logging: bool = False,
                 max_file_size: int = 10485760,  # 10MB
                 backup_count: int = 5,
                 context: Optional[Dict[str, Any]] = None) -> logging.Logger:
    """
    Setup comprehensive logging configuration.
    
    Args:
        level: Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)
        log_file: Optional log file path
        json_logging: Whether to use JSON formatting
        max_file_size: Maximum log file size in bytes
        backup_count: Number of backup files to keep
        context: Additional context to include in logs
        
    Returns:
        Configured logger
    """
    # Get root logger
    logger = logging.getLogger()
    logger.setLevel(getattr(logging, level.upper()))
    
    # Clear existing handlers
    logger.handlers.clear()
    
    # Create formatters
    if json_logging:
        formatter = JSONFormatter()
    else:
        formatter = logging.Formatter(
            '%(asctime)s - %(name)s - %(levelname)s - %(message)s',
            datefmt='%Y-%m-%d %H:%M:%S'
        )
    
    # Console handler
    console_handler = logging.StreamHandler(sys.stdout)
    console_handler.setLevel(getattr(logging, level.upper()))
    console_handler.setFormatter(formatter)
    
    # Add context filter if provided
    if context:
        context_filter = ContextFilter(context)
        console_handler.addFilter(context_filter)
    
    logger.addHandler(console_handler)
    
    # File handler if log file specified
    if log_file:
        log_path = Path(log_file)
        log_path.parent.mkdir(parents=True, exist_ok=True)
        
        file_handler = logging.handlers.RotatingFileHandler(
            log_file,
            maxBytes=max_file_size,
            backupCount=backup_count,
            encoding='utf-8'
        )
        file_handler.setLevel(getattr(logging, level.upper()))
        file_handler.setFormatter(formatter)
        
        if context:
            file_handler.addFilter(context_filter)
        
        logger.addHandler(file_handler)
    
    # Add error handler for critical errors
    error_handler = logging.StreamHandler(sys.stderr)
    error_handler.setLevel(logging.ERROR)
    error_handler.setFormatter(formatter)
    
    if context:
        error_handler.addFilter(context_filter)
    
    logger.addHandler(error_handler)
    
    # Reduce noise from external libraries
    logging.getLogger('urllib3').setLevel(logging.WARNING)
    logging.getLogger('requests').setLevel(logging.WARNING)
    logging.getLogger('sickle').setLevel(logging.INFO)
    logging.getLogger('schedule').setLevel(logging.INFO)
    
    return logger


def get_logger(name: str, context: Optional[Dict[str, Any]] = None) -> logging.Logger:
    """
    Get a logger with optional context.
    
    Args:
        name: Logger name
        context: Additional context to include in logs
        
    Returns:
        Configured logger
    """
    logger = logging.getLogger(name)
    
    if context and not any(isinstance(f, ContextFilter) for f in logger.filters):
        context_filter = ContextFilter(context)
        logger.addFilter(context_filter)
    
    return logger


class LogMetrics:
    """Collect and track logging metrics."""
    
    def __init__(self):
        """Initialize metrics collection."""
        self.metrics = {
            'DEBUG': 0,
            'INFO': 0,
            'WARNING': 0,
            'ERROR': 0,
            'CRITICAL': 0
        }
        self.start_time = datetime.now(timezone.utc)
    
    def record_log(self, level: str):
        """Record a log entry."""
        if level in self.metrics:
            self.metrics[level] += 1
    
    def get_metrics(self) -> Dict[str, Any]:
        """Get current metrics."""
        runtime = datetime.now(timezone.utc) - self.start_time
        
        return {
            'log_counts': self.metrics.copy(),
            'total_logs': sum(self.metrics.values()),
            'error_rate': self.metrics['ERROR'] / max(sum(self.metrics.values()), 1),
            'runtime_seconds': runtime.total_seconds(),
            'start_time': self.start_time.isoformat()
        }
    
    def reset(self):
        """Reset metrics."""
        for key in self.metrics:
            self.metrics[key] = 0
        self.start_time = datetime.now(timezone.utc)


class MetricsHandler(logging.Handler):
    """Logging handler that collects metrics."""
    
    def __init__(self, metrics_collector: LogMetrics):
        """Initialize with metrics collector."""
        super().__init__()
        self.metrics = metrics_collector
    
    def emit(self, record):
        """Record log metrics."""
        self.metrics.record_log(record.levelname)


def setup_file_logger(name: str, log_file: str, level: str = 'INFO',
                     max_file_size: int = 10485760, backup_count: int = 5) -> logging.Logger:
    """
    Setup a dedicated file logger.
    
    Args:
        name: Logger name
        log_file: Log file path
        level: Logging level
        max_file_size: Maximum file size
        backup_count: Number of backup files
        
    Returns:
        Configured file logger
    """
    logger = logging.getLogger(name)
    logger.setLevel(getattr(logging, level.upper()))
    
    # Remove existing handlers
    logger.handlers.clear()
    
    # Create file handler
    log_path = Path(log_file)
    log_path.parent.mkdir(parents=True, exist_ok=True)
    
    handler = logging.handlers.RotatingFileHandler(
        log_file,
        maxBytes=max_file_size,
        backupCount=backup_count,
        encoding='utf-8'
    )
    
    formatter = logging.Formatter(
        '%(asctime)s - %(name)s - %(levelname)s - %(message)s',
        datefmt='%Y-%m-%d %H:%M:%S'
    )
    handler.setFormatter(formatter)
    
    logger.addHandler(handler)
    logger.propagate = False  # Don't propagate to root logger
    
    return logger


def configure_library_logging():
    """Configure logging for external libraries."""
    # Suppress noisy loggers
    logging.getLogger('urllib3.connectionpool').setLevel(logging.WARNING)
    logging.getLogger('requests.packages.urllib3').setLevel(logging.WARNING)
    logging.getLogger('urllib3.util.retry').setLevel(logging.WARNING)
    
    # Set appropriate levels for important libraries
    logging.getLogger('sickle').setLevel(logging.INFO)
    logging.getLogger('psycopg2').setLevel(logging.WARNING)
    logging.getLogger('schedule').setLevel(logging.INFO)


def log_function_call(func):
    """Decorator to log function calls."""
    def wrapper(*args, **kwargs):
        logger = logging.getLogger(func.__module__)
        
        # Log function entry
        logger.debug(f"Entering {func.__name__}")
        
        try:
            result = func(*args, **kwargs)
            logger.debug(f"Exiting {func.__name__}")
            return result
        except Exception as e:
            logger.error(f"Error in {func.__name__}: {e}")
            raise
    
    return wrapper


def log_performance(func):
    """Decorator to log function performance."""
    def wrapper(*args, **kwargs):
        logger = logging.getLogger(func.__module__)
        start_time = datetime.now()
        
        try:
            result = func(*args, **kwargs)
            end_time = datetime.now()
            duration = (end_time - start_time).total_seconds()
            
            logger.info(f"{func.__name__} completed in {duration:.3f}s")
            return result
            
        except Exception as e:
            end_time = datetime.now()
            duration = (end_time - start_time).total_seconds()
            
            logger.error(f"{func.__name__} failed after {duration:.3f}s: {e}")
            raise
    
    return wrapper