"""
Processing pipeline for executing preprocessing combinations.

This module provides the ProcessingPipeline class that orchestrates the execution
of preprocessing method combinations on data.
"""

import pandas as pd
import logging
import time
from typing import Dict, List, Optional, Tuple, Callable
from dataclasses import dataclass
from threading import Event
from preprocessors import PreprocessorRegistry, PreprocessorInterface

from .exceptions import (
    ProcessingError, PreprocessorError, DataValidationError, 
    ResourceError, UserCancellationError
)
from .validation import InputValidator
from .error_recovery import ErrorRecoveryManager, handle_data_operations


@dataclass
class ProcessingResult:
    """Result of processing a single combination."""
    combination_id: str
    combination: List[int]
    success: bool
    processed_data: Optional[pd.DataFrame]
    error_message: Optional[str]
    processing_time: float


@dataclass
class BatchProcessingResult:
    """Result of batch processing multiple combinations."""
    total_combinations: int
    successful_combinations: int
    failed_combinations: int
    results: Dict[str, ProcessingResult]
    total_processing_time: float
    success_rate: float
    cancelled: bool = False


class ProcessingPipelineError(Exception):
    """Custom exception for processing pipeline errors."""
    pass


class ProcessingPipeline:
    """
    Pipeline for executing preprocessing method combinations.
    
    This class orchestrates the execution of preprocessing combinations,
    applying methods in the correct layer order and handling errors gracefully.
    """
    
    def __init__(self, preprocessor_registry: PreprocessorRegistry, 
                 error_recovery_manager: Optional[ErrorRecoveryManager] = None):
        """
        Initialize the processing pipeline.
        
        Args:
            preprocessor_registry (PreprocessorRegistry): Registry containing available preprocessors
            error_recovery_manager: Optional error recovery manager for handling errors
            
        Raises:
            ProcessingError: If initialization fails
        """
        try:
            if not isinstance(preprocessor_registry, PreprocessorRegistry):
                raise ProcessingError(
                    "Invalid preprocessor registry",
                    details=f"Expected PreprocessorRegistry, got {type(preprocessor_registry).__name__}"
                )
            
            self.registry = preprocessor_registry
            self.logger = logging.getLogger(__name__)
            self.error_recovery_manager = error_recovery_manager or ErrorRecoveryManager(self.logger)
            
            # Configure logging if not already configured
            if not self.logger.handlers:
                handler = logging.StreamHandler()
                formatter = logging.Formatter(
                    '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
                )
                handler.setFormatter(formatter)
                self.logger.addHandler(handler)
                self.logger.setLevel(logging.INFO)
                
            self.logger.info("ProcessingPipeline initialized successfully")
            
        except Exception as e:
            if isinstance(e, ProcessingError):
                raise
            raise ProcessingError(
                "Failed to initialize processing pipeline",
                details=str(e)
            )
    
    @handle_data_operations()
    def execute_combination(self, data: pd.DataFrame, combination: List[int]) -> ProcessingResult:
        """
        Execute a single preprocessing combination with comprehensive error handling.
        
        Args:
            data (pd.DataFrame): Input data to process
            combination (List[int]): List of preprocessor IDs to apply in order
            
        Returns:
            ProcessingResult: Result of the processing operation
        """
        start_time = time.time()
        combination_id = self.create_combination_id(combination)
        
        try:
            # Validate inputs using InputValidator
            validator = InputValidator()
            validator.validate_dataframe(data, allow_empty=False)
            
            # Validate combination
            if not combination:
                raise ProcessingError("Combination cannot be empty")
            
            available_methods = self.registry.get_available_methods()
            for method_id in combination:
                if method_id not in available_methods:
                    raise ProcessingError(f"Method ID {method_id} not available in registry")
            
            # Start with a copy of the original data
            current_data = data.copy()
            original_shape = current_data.shape
            
            self.logger.info(f"Processing combination {combination_id}: {combination}")
            
            # Apply each preprocessing method in sequence
            for layer_index, processor_id in enumerate(combination):
                try:
                    # Get the preprocessor
                    processor = self.registry.get_processor(processor_id)
                    if processor is None:
                        raise PreprocessorError(
                            f"Preprocessor with ID {processor_id} not found in registry",
                            processor_id=processor_id,
                            layer_index=layer_index
                        )
                    
                    self.logger.debug(
                        f"Applying layer {layer_index + 1}: {processor.get_name()} (ID: {processor_id})"
                    )
                    
                    # Validate data before processing
                    self._validate_data_between_steps(current_data, layer_index)
                    
                    # Apply the preprocessing method
                    processed_data = processor.process(current_data)
                    
                    # Validate the result
                    self._validate_processed_data(processed_data, current_data, processor)
                    
                    # Update current data for next layer
                    current_data = processed_data
                    
                except Exception as e:
                    error_msg = (
                        f"Error in layer {layer_index + 1} "
                        f"(processor ID {processor_id}): {str(e)}"
                    )
                    self.logger.error(f"Combination {combination_id} failed: {error_msg}")
                    
                    processing_time = time.time() - start_time
                    return ProcessingResult(
                        combination_id=combination_id,
                        combination=combination,
                        success=False,
                        processed_data=None,
                        error_message=error_msg,
                        processing_time=processing_time
                    )
            
            # Final validation
            if current_data.shape[1] != original_shape[1]:
                self.logger.warning(
                    f"Column count changed during processing: "
                    f"{original_shape[1]} -> {current_data.shape[1]}"
                )
            
            processing_time = time.time() - start_time
            self.logger.info(
                f"Combination {combination_id} completed successfully in {processing_time:.3f}s"
            )
            
            return ProcessingResult(
                combination_id=combination_id,
                combination=combination,
                success=True,
                processed_data=current_data,
                error_message=None,
                processing_time=processing_time
            )
            
        except Exception as e:
            processing_time = time.time() - start_time
            error_msg = f"Unexpected error processing combination {combination_id}: {str(e)}"
            self.logger.error(error_msg)
            
            return ProcessingResult(
                combination_id=combination_id,
                combination=combination,
                success=False,
                processed_data=None,
                error_message=error_msg,
                processing_time=processing_time
            )
    
    def create_combination_id(self, combination: List[int]) -> str:
        """
        Create a unique identifier for a combination.
        
        Args:
            combination (List[int]): List of preprocessor IDs
            
        Returns:
            str: Unique combination identifier
        """
        if not combination:
            return "empty"
        
        return "_".join(map(str, combination))
    
    def get_combination_description(self, combination: List[int]) -> str:
        """
        Get a human-readable description of a combination.
        
        Args:
            combination (List[int]): List of preprocessor IDs
            
        Returns:
            str: Human-readable description
        """
        if not combination:
            return "No processing"
        
        descriptions = []
        for i, processor_id in enumerate(combination):
            processor = self.registry.get_processor(processor_id)
            if processor:
                descriptions.append(f"Layer {i+1}: {processor.get_name()}")
            else:
                descriptions.append(f"Layer {i+1}: Unknown (ID: {processor_id})")
        
        return " -> ".join(descriptions)
    
    def _validate_data_between_steps(self, data: pd.DataFrame, layer_index: int) -> None:
        """
        Validate data between processing steps.
        
        Args:
            data (pd.DataFrame): Data to validate
            layer_index (int): Current layer index for error reporting
            
        Raises:
            ProcessingPipelineError: If validation fails
        """
        if data is None:
            raise ProcessingPipelineError(f"Data is None before layer {layer_index + 1}")
        
        if not isinstance(data, pd.DataFrame):
            raise ProcessingPipelineError(
                f"Data is not a DataFrame before layer {layer_index + 1}: {type(data)}"
            )
        
        if data.empty:
            raise ProcessingPipelineError(f"Data is empty before layer {layer_index + 1}")
    
    def _validate_processed_data(
        self, 
        processed_data: pd.DataFrame, 
        original_data: pd.DataFrame, 
        processor: PreprocessorInterface
    ) -> None:
        """
        Validate data after processing step.
        
        Args:
            processed_data (pd.DataFrame): Data after processing
            original_data (pd.DataFrame): Data before processing
            processor (PreprocessorInterface): The processor that was applied
            
        Raises:
            ProcessingPipelineError: If validation fails
        """
        if processed_data is None:
            raise ProcessingPipelineError(
                f"Processor '{processor.get_name()}' returned None"
            )
        
        if not isinstance(processed_data, pd.DataFrame):
            raise ProcessingPipelineError(
                f"Processor '{processor.get_name()}' returned non-DataFrame: {type(processed_data)}"
            )
        
        if processed_data.empty and not original_data.empty:
            raise ProcessingPipelineError(
                f"Processor '{processor.get_name()}' returned empty DataFrame from non-empty input"
            )
    
    def validate_combination(self, combination: List[int]) -> Tuple[bool, Optional[str]]:
        """
        Validate that a combination can be executed.
        
        Args:
            combination (List[int]): List of preprocessor IDs to validate
            
        Returns:
            Tuple[bool, Optional[str]]: (is_valid, error_message)
        """
        if not combination:
            return False, "Combination is empty"
        
        if not isinstance(combination, list):
            return False, "Combination must be a list"
        
        for i, processor_id in enumerate(combination):
            if not isinstance(processor_id, int):
                return False, f"Processor ID at position {i} is not an integer: {processor_id}"
            
            processor = self.registry.get_processor(processor_id)
            if processor is None:
                return False, f"Processor with ID {processor_id} not found in registry"
        
        return True, None
    
    def batch_process(
        self, 
        data: pd.DataFrame, 
        combinations: List[List[int]],
        progress_callback: Optional[Callable[[int, int, str], None]] = None,
        cancellation_event: Optional[Event] = None
    ) -> BatchProcessingResult:
        """
        Process multiple combinations with progress tracking.
        
        Args:
            data (pd.DataFrame): Input data to process
            combinations (List[List[int]]): List of combinations to process
            progress_callback (Optional[Callable]): Callback for progress updates
            cancellation_event (Optional[Event]): Event to signal cancellation
            
        Returns:
            BatchProcessingResult: Results of batch processing
        """
        start_time = time.time()
        results = {}
        successful = 0
        failed = 0
        cancelled = False
        
        self.logger.info(f"Starting batch processing of {len(combinations)} combinations")
        
        try:
            for i, combination in enumerate(combinations):
                # Check for cancellation
                if cancellation_event and cancellation_event.is_set():
                    self.logger.info(f"Batch processing cancelled at combination {i+1}/{len(combinations)}")
                    cancelled = True
                    break
                
                # Update progress
                combination_id = self.create_combination_id(combination)
                if progress_callback:
                    progress_callback(i + 1, len(combinations), combination_id)
                
                # Process the combination
                try:
                    result = self.execute_combination(data, combination)
                    results[combination_id] = result
                    
                    if result.success:
                        successful += 1
                        self.logger.debug(f"Combination {combination_id} succeeded")
                    else:
                        failed += 1
                        self.logger.warning(f"Combination {combination_id} failed: {result.error_message}")
                
                except Exception as e:
                    # Create a failed result for unexpected errors
                    error_msg = f"Unexpected error processing combination {combination_id}: {str(e)}"
                    self.logger.error(error_msg)
                    
                    failed_result = ProcessingResult(
                        combination_id=combination_id,
                        combination=combination,
                        success=False,
                        processed_data=None,
                        error_message=error_msg,
                        processing_time=0.0
                    )
                    results[combination_id] = failed_result
                    failed += 1
        
        except Exception as e:
            self.logger.error(f"Critical error during batch processing: {str(e)}")
            # Continue with partial results
        
        total_time = time.time() - start_time
        total_processed = successful + failed
        success_rate = (successful / total_processed * 100.0) if total_processed > 0 else 0.0
        
        self.logger.info(
            f"Batch processing completed: {successful} successful, {failed} failed, "
            f"{success_rate:.1f}% success rate in {total_time:.2f}s"
        )
        
        return BatchProcessingResult(
            total_combinations=len(combinations),
            successful_combinations=successful,
            failed_combinations=failed,
            results=results,
            total_processing_time=total_time,
            success_rate=success_rate,
            cancelled=cancelled
        )
    
    def batch_process_with_memory_management(
        self,
        data: pd.DataFrame,
        combinations: List[List[int]],
        max_memory_mb: int = 500,
        progress_callback: Optional[Callable[[int, int, str], None]] = None,
        cancellation_event: Optional[Event] = None
    ) -> BatchProcessingResult:
        """
        Process combinations with memory-efficient processing for large datasets.
        
        Args:
            data (pd.DataFrame): Input data to process
            combinations (List[List[int]]): List of combinations to process
            max_memory_mb (int): Maximum memory usage in MB before cleanup
            progress_callback (Optional[Callable]): Callback for progress updates
            cancellation_event (Optional[Event]): Event to signal cancellation
            
        Returns:
            BatchProcessingResult: Results of batch processing
        """
        import gc
        
        # Try to import psutil for memory monitoring
        try:
            import psutil
            memory_monitoring_available = True
        except ImportError:
            self.logger.warning("psutil not available, memory monitoring disabled")
            memory_monitoring_available = False
        
        start_time = time.time()
        results = {}
        successful = 0
        failed = 0
        cancelled = False
        
        self.logger.info(
            f"Starting memory-efficient batch processing of {len(combinations)} combinations "
            f"(max memory: {max_memory_mb}MB, monitoring: {memory_monitoring_available})"
        )
        
        try:
            for i, combination in enumerate(combinations):
                # Check for cancellation
                if cancellation_event and cancellation_event.is_set():
                    self.logger.info(f"Batch processing cancelled at combination {i+1}/{len(combinations)}")
                    cancelled = True
                    break
                
                # Check memory usage if monitoring is available
                if memory_monitoring_available:
                    try:
                        memory_usage = psutil.Process().memory_info().rss / 1024 / 1024  # MB
                        if memory_usage > max_memory_mb:
                            self.logger.info(f"Memory usage ({memory_usage:.1f}MB) exceeds limit, performing cleanup")
                            gc.collect()
                            
                            # Check again after cleanup
                            memory_usage = psutil.Process().memory_info().rss / 1024 / 1024
                            if memory_usage > max_memory_mb:
                                self.logger.warning(
                                    f"Memory usage still high ({memory_usage:.1f}MB) after cleanup"
                                )
                    except Exception as e:
                        self.logger.warning(f"Memory monitoring failed: {e}")
                
                # Periodic garbage collection even without memory monitoring
                if i % 10 == 0:  # Every 10 combinations
                    gc.collect()
                
                # Update progress
                combination_id = self.create_combination_id(combination)
                if progress_callback:
                    progress_callback(i + 1, len(combinations), combination_id)
                
                # Process the combination
                try:
                    result = self.execute_combination(data, combination)
                    
                    # Store only essential result data to save memory
                    memory_efficient_result = ProcessingResult(
                        combination_id=result.combination_id,
                        combination=result.combination,
                        success=result.success,
                        processed_data=result.processed_data if result.success else None,
                        error_message=result.error_message,
                        processing_time=result.processing_time
                    )
                    
                    results[combination_id] = memory_efficient_result
                    
                    if result.success:
                        successful += 1
                    else:
                        failed += 1
                
                except Exception as e:
                    error_msg = f"Unexpected error processing combination {combination_id}: {str(e)}"
                    self.logger.error(error_msg)
                    
                    failed_result = ProcessingResult(
                        combination_id=combination_id,
                        combination=combination,
                        success=False,
                        processed_data=None,
                        error_message=error_msg,
                        processing_time=0.0
                    )
                    results[combination_id] = failed_result
                    failed += 1
        
        except Exception as e:
            self.logger.error(f"Critical error during memory-efficient batch processing: {str(e)}")
        
        # Final cleanup
        gc.collect()
        
        total_time = time.time() - start_time
        total_processed = successful + failed
        success_rate = (successful / total_processed * 100.0) if total_processed > 0 else 0.0
        
        self.logger.info(
            f"Memory-efficient batch processing completed: {successful} successful, {failed} failed, "
            f"{success_rate:.1f}% success rate in {total_time:.2f}s"
        )
        
        return BatchProcessingResult(
            total_combinations=len(combinations),
            successful_combinations=successful,
            failed_combinations=failed,
            results=results,
            total_processing_time=total_time,
            success_rate=success_rate,
            cancelled=cancelled
        )
    
    def get_processing_summary(self, batch_result: BatchProcessingResult) -> str:
        """
        Generate a summary report of batch processing results.
        
        Args:
            batch_result (BatchProcessingResult): Results from batch processing
            
        Returns:
            str: Human-readable summary report
        """
        summary_lines = [
            "=== Batch Processing Summary ===",
            f"Total combinations: {batch_result.total_combinations}",
            f"Successful: {batch_result.successful_combinations}",
            f"Failed: {batch_result.failed_combinations}",
            f"Success rate: {batch_result.success_rate:.1f}%",
            f"Total processing time: {batch_result.total_processing_time:.2f}s",
        ]
        
        if batch_result.cancelled:
            summary_lines.append("Status: CANCELLED")
        else:
            summary_lines.append("Status: COMPLETED")
        
        if batch_result.total_combinations > 0:
            avg_time = batch_result.total_processing_time / batch_result.total_combinations
            summary_lines.append(f"Average time per combination: {avg_time:.3f}s")
        
        # Add failed combinations details
        if batch_result.failed_combinations > 0:
            summary_lines.append("\n=== Failed Combinations ===")
            for combo_id, result in batch_result.results.items():
                if not result.success:
                    summary_lines.append(f"- {combo_id}: {result.error_message}")
        
        return "\n".join(summary_lines)
    
    def get_registry_info(self) -> Dict[str, any]:
        """
        Get information about the preprocessor registry.
        
        Returns:
            Dict[str, any]: Registry information
        """
        return {
            'processor_count': self.registry.get_processor_count(),
            'available_methods': self.registry.get_available_methods(),
            'method_names': self.registry.get_method_names()
        }