"""
Logging infrastructure for document analysis module.

Provides centralized logging configuration with file rotation,
structured logging, and performance monitoring capabilities.
"""

import logging
import logging.handlers
import json
import sys
import time
import traceback
from datetime import datetime
from pathlib import Path
from typing import Dict, Any, Optional, Union
from contextlib import contextmanager
import threading
from functools import wraps


class StructuredFormatter(logging.Formatter):
    """Custom formatter for structured JSON logging."""

    def format(self, record: logging.LogRecord) -> str:
        """Format log record as structured JSON."""
        log_entry = {
            "timestamp": datetime.fromtimestamp(record.created).isoformat(),
            "level": record.levelname,
            "logger": record.name,
            "message": record.getMessage(),
            "module": record.module,
            "function": record.funcName,
            "line": record.lineno
        }

        # Add exception info if present
        if record.exc_info:
            log_entry["exception"] = {
                "type": record.exc_info[0].__name__,
                "message": str(record.exc_info[1]),
                "traceback": traceback.format_exception(*record.exc_info)
            }

        # Add extra fields
        for key, value in record.__dict__.items():
            if key not in {
                'name', 'msg', 'args', 'levelname', 'levelno', 'pathname',
                'filename', 'module', 'lineno', 'funcName', 'created',
                'msecs', 'relativeCreated', 'thread', 'threadName',
                'processName', 'process', 'getMessage', 'exc_info',
                'exc_text', 'stack_info'
            }:
                log_entry[key] = value

        return json.dumps(log_entry, ensure_ascii=False, default=str)


class PerformanceLogger:
    """Logger for performance monitoring and metrics."""

    def __init__(self, logger: logging.Logger):
        self.logger = logger
        self._metrics = {}
        self._lock = threading.Lock()

    def log_execution_time(self, operation: str, duration: float, **metadata):
        """Log execution time for an operation.

        Args:
            operation: Name of the operation
            duration: Execution time in seconds
            **metadata: Additional metadata to log
        """
        self.logger.info(
            f"Performance: {operation} completed in {duration:.3f}s",
            extra={
                "operation": operation,
                "duration_seconds": duration,
                "performance_metric": True,
                **metadata
            }
        )

        # Update internal metrics
        with self._lock:
            if operation not in self._metrics:
                self._metrics[operation] = {
                    "count": 0,
                    "total_duration": 0.0,
                    "min_duration": float('inf'),
                    "max_duration": 0.0
                }

            metrics = self._metrics[operation]
            metrics["count"] += 1
            metrics["total_duration"] += duration
            metrics["min_duration"] = min(metrics["min_duration"], duration)
            metrics["max_duration"] = max(metrics["max_duration"], duration)

    def get_metrics_summary(self) -> Dict[str, Any]:
        """Get summary of collected performance metrics.

        Returns:
            Dictionary with metrics summary
        """
        with self._lock:
            summary = {}
            for operation, metrics in self._metrics.items():
                if metrics["count"] > 0:
                    summary[operation] = {
                        "count": metrics["count"],
                        "total_duration": metrics["total_duration"],
                        "avg_duration": metrics["total_duration"] / metrics["count"],
                        "min_duration": metrics["min_duration"],
                        "max_duration": metrics["max_duration"]
                    }
            return summary

    def reset_metrics(self):
        """Reset all collected metrics."""
        with self._lock:
            self._metrics.clear()

    @contextmanager
    def measure_time(self, operation: str, **metadata):
        """Context manager to measure execution time.

        Args:
            operation: Name of the operation
            **metadata: Additional metadata to log
        """
        start_time = time.time()
        try:
            yield
        finally:
            duration = time.time() - start_time
            self.log_execution_time(operation, duration, **metadata)


def performance_timer(operation: str = None, **metadata):
    """Decorator to measure function execution time.

    Args:
        operation: Name of operation (defaults to function name)
        **metadata: Additional metadata to log
    """
    def decorator(func):
        @wraps(func)
        def wrapper(*args, **kwargs):
            op_name = operation or f"{func.__module__}.{func.__name__}"
            logger = logging.getLogger(func.__module__)
            perf_logger = getattr(logger, '_performance_logger', None)

            if perf_logger:
                with perf_logger.measure_time(op_name, **metadata):
                    return func(*args, **kwargs)
            else:
                return func(*args, **kwargs)
        return wrapper
    return decorator


async def async_performance_timer(operation: str = None, **metadata):
    """Decorator to measure async function execution time.

    Args:
        operation: Name of operation (defaults to function name)
        **metadata: Additional metadata to log
    """
    def decorator(func):
        @wraps(func)
        async def wrapper(*args, **kwargs):
            op_name = operation or f"{func.__module__}.{func.__name__}"
            logger = logging.getLogger(func.__module__)
            perf_logger = getattr(logger, '_performance_logger', None)

            if perf_logger:
                with perf_logger.measure_time(op_name, **metadata):
                    return await func(*args, **kwargs)
            else:
                return await func(*args, **kwargs)
        return wrapper
    return decorator


class DocumentAnalysisLogger:
    """Centralized logger configuration for document analysis module."""

    def __init__(self, config: Dict[str, Any]):
        """Initialize logger with configuration.

        Args:
            config: Logging configuration dictionary
        """
        self.config = config
        self.loggers = {}
        self._setup_root_logger()
        self._setup_module_loggers()

    def _setup_root_logger(self):
        """Setup root logger configuration."""
        # Get configuration values
        level = getattr(logging, self.config.get("level", "INFO").upper())
        log_file = self.config.get("file", "logs/document_analyzer.log")
        max_size_mb = self.config.get("max_size_mb", 10)
        backup_count = self.config.get("backup_count", 5)
        format_string = self.config.get(
            "format",
            "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
        )

        # Create logs directory
        log_path = Path(log_file)
        log_path.parent.mkdir(parents=True, exist_ok=True)

        # Configure root logger
        root_logger = logging.getLogger("modules.document_analyzer")
        root_logger.setLevel(level)

        # Clear existing handlers
        root_logger.handlers.clear()

        # Console handler
        console_handler = logging.StreamHandler(sys.stdout)
        console_handler.setLevel(level)
        console_formatter = logging.Formatter(format_string)
        console_handler.setFormatter(console_formatter)
        root_logger.addHandler(console_handler)

        # File handler with rotation
        file_handler = logging.handlers.RotatingFileHandler(
            log_file,
            maxBytes=max_size_mb * 1024 * 1024,
            backupCount=backup_count,
            encoding='utf-8'
        )
        file_handler.setLevel(level)
        file_formatter = logging.Formatter(format_string)
        file_handler.setFormatter(file_formatter)
        root_logger.addHandler(file_handler)

        # JSON structured log handler for detailed logging
        json_log_file = log_path.parent / f"{log_path.stem}_structured{log_path.suffix}"
        json_handler = logging.handlers.RotatingFileHandler(
            json_log_file,
            maxBytes=max_size_mb * 1024 * 1024,
            backupCount=backup_count,
            encoding='utf-8'
        )
        json_handler.setLevel(logging.DEBUG)
        json_formatter = StructuredFormatter()
        json_handler.setFormatter(json_formatter)
        root_logger.addHandler(json_handler)

        # Add performance logger to root logger
        root_logger._performance_logger = PerformanceLogger(root_logger)

        # Prevent propagation to avoid duplicate logs
        root_logger.propagate = False

        self.loggers["root"] = root_logger

    def _setup_module_loggers(self):
        """Setup specialized loggers for different modules."""
        modules = [
            "core.analyzer",
            "core.evaluator",
            "services.analysis_service",
            "services.storage_service",
            "utils.text_parser",
            "utils.metrics_calculator"
        ]

        for module in modules:
            logger_name = f"modules.document_analyzer.{module}"
            logger = logging.getLogger(logger_name)
            logger._performance_logger = PerformanceLogger(logger)
            self.loggers[module] = logger

    def get_logger(self, module: str = None) -> logging.Logger:
        """Get logger for specific module.

        Args:
            module: Module name (e.g., "core.analyzer")

        Returns:
            Logger instance
        """
        if module and module in self.loggers:
            return self.loggers[module]
        return self.loggers["root"]

    def get_performance_logger(self, module: str = None) -> PerformanceLogger:
        """Get performance logger for specific module.

        Args:
            module: Module name

        Returns:
            PerformanceLogger instance
        """
        logger = self.get_logger(module)
        return getattr(logger, '_performance_logger', None)

    def set_level(self, level: Union[str, int], module: str = None):
        """Set logging level for specific module or all modules.

        Args:
            level: Logging level (e.g., "INFO", logging.DEBUG)
            module: Module name (None for all modules)
        """
        if isinstance(level, str):
            level = getattr(logging, level.upper())

        if module and module in self.loggers:
            self.loggers[module].setLevel(level)
        else:
            for logger in self.loggers.values():
                logger.setLevel(level)

    def add_file_handler(self, file_path: str, level: str = "INFO", module: str = None):
        """Add additional file handler.

        Args:
            file_path: Path to log file
            level: Logging level for this handler
            module: Module name (None for all modules)
        """
        log_path = Path(file_path)
        log_path.parent.mkdir(parents=True, exist_ok=True)

        handler = logging.FileHandler(log_path, encoding='utf-8')
        handler.setLevel(getattr(logging, level.upper()))
        formatter = logging.Formatter(
            "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
        )
        handler.setFormatter(formatter)

        if module and module in self.loggers:
            self.loggers[module].addHandler(handler)
        else:
            for logger in self.loggers.values():
                logger.addHandler(handler)

    def log_system_info(self):
        """Log system information for debugging."""
        logger = self.get_logger()
        logger.info("Document Analysis Logger initialized")
        logger.info(f"Python version: {sys.version}")
        logger.info(f"Log level: {self.config.get('level', 'INFO')}")
        logger.info(f"Log file: {self.config.get('file')}")
        logger.info(f"Structured logging enabled: True")

    def get_log_stats(self) -> Dict[str, Any]:
        """Get logging statistics.

        Returns:
            Dictionary with logging statistics
        """
        stats = {
            "configured_loggers": len(self.loggers),
            "loggers": {}
        }

        for name, logger in self.loggers.items():
            stats["loggers"][name] = {
                "level": logging.getLevelName(logger.level),
                "handlers": len(logger.handlers),
                "disabled": logger.disabled
            }

            # Add performance metrics if available
            perf_logger = getattr(logger, '_performance_logger', None)
            if perf_logger:
                stats["loggers"][name]["performance_metrics"] = perf_logger.get_metrics_summary()

        return stats


# Global logger instance
_global_logger: Optional[DocumentAnalysisLogger] = None


def initialize_logging(config: Dict[str, Any]) -> DocumentAnalysisLogger:
    """Initialize global logging configuration.

    Args:
        config: Logging configuration

    Returns:
        DocumentAnalysisLogger instance
    """
    global _global_logger
    _global_logger = DocumentAnalysisLogger(config)
    _global_logger.log_system_info()
    return _global_logger


def get_logger(module: str = None) -> logging.Logger:
    """Get logger instance.

    Args:
        module: Module name

    Returns:
        Logger instance
    """
    if _global_logger is None:
        # Fallback to basic logging if not initialized
        return logging.getLogger(f"modules.document_analyzer.{module}" if module else "modules.document_analyzer")
    return _global_logger.get_logger(module)


def get_performance_logger(module: str = None) -> Optional[PerformanceLogger]:
    """Get performance logger instance.

    Args:
        module: Module name

    Returns:
        PerformanceLogger instance or None
    """
    if _global_logger is None:
        return None
    return _global_logger.get_performance_logger(module)