"""Cleaning pipeline for chaining multiple cleaners"""

from typing import List, Dict, Any, Optional
import yaml
import json
import logging
from datetime import datetime
from pathlib import Path

from .base_cleaner import BaseCleaner

logger = logging.getLogger(__name__)


class CleaningPipeline:
    """Pipeline for executing multiple cleaning operations in sequence
    
    Supports configuration loading from YAML/JSON files
    """
    
    def __init__(self, config_path: Optional[str] = None):
        """Initialize the cleaning pipeline
        
        Args:
            config_path: Optional path to configuration file (YAML or JSON)
        """
        self.cleaners: List[BaseCleaner] = []
        self.config: Dict[str, Any] = {}
        self.stats: Dict[str, Any] = {
            'processed': 0,
            'errors': 0,
            'start_time': None,
            'cleaners': []
        }
        
        if config_path:
            self.load_config(config_path)
    
    def load_config(self, config_path: str):
        """Load configuration from file
        
        Args:
            config_path: Path to configuration file (YAML or JSON)
        """
        path = Path(config_path)
        
        if not path.exists():
            logger.warning(f"Config file not found: {config_path}")
            return
        
        try:
            with open(path, 'r', encoding='utf-8') as f:
                if path.suffix in ['.yaml', '.yml']:
                    self.config = yaml.safe_load(f)
                elif path.suffix == '.json':
                    self.config = json.load(f)
                else:
                    logger.error(f"Unsupported config format: {path.suffix}")
                    return
            
            logger.info(f"Loaded configuration from {config_path}")
            self._build_pipeline_from_config()
            
        except Exception as e:
            logger.error(f"Failed to load config: {e}")
    
    def _build_pipeline_from_config(self):
        """Build the pipeline based on loaded configuration"""
        if 'cleaners' not in self.config:
            logger.warning("No cleaners defined in configuration")
            return
        
        for cleaner_config in self.config['cleaners']:
            cleaner_type = cleaner_config.get('type')
            if cleaner_type:
                # Dynamic import of cleaner classes
                try:
                    module_name = f".{cleaner_type.lower()}"
                    class_name = cleaner_type
                    
                    # Import the cleaner class dynamically
                    from importlib import import_module
                    module = import_module(module_name, package='src.cleaning')
                    cleaner_class = getattr(module, class_name)
                    
                    # Create cleaner instance with config
                    cleaner = cleaner_class(cleaner_config)
                    self.add_cleaner(cleaner)
                    
                except (ImportError, AttributeError) as e:
                    logger.error(f"Failed to load cleaner {cleaner_type}: {e}")
    
    def add_cleaner(self, cleaner: BaseCleaner):
        """Add a cleaner to the pipeline
        
        Args:
            cleaner: BaseCleaner instance to add
        """
        if not isinstance(cleaner, BaseCleaner):
            raise TypeError(f"Expected BaseCleaner, got {type(cleaner)}")
        
        self.cleaners.append(cleaner)
        logger.info(f"Added {cleaner.name} to pipeline")
    
    def remove_cleaner(self, cleaner_name: str):
        """Remove a cleaner from the pipeline by name
        
        Args:
            cleaner_name: Name of the cleaner to remove
        """
        self.cleaners = [c for c in self.cleaners if c.name != cleaner_name]
        logger.info(f"Removed {cleaner_name} from pipeline")
    
    def process(self, data: Dict[str, Any]) -> Dict[str, Any]:
        """Process data through all cleaners in the pipeline
        
        Args:
            data: Input data dictionary
            
        Returns:
            Cleaned data dictionary with added metadata
        """
        if not self.cleaners:
            logger.warning("No cleaners in pipeline")
            return data
        
        # Start timing if first process
        if self.stats['start_time'] is None:
            self.stats['start_time'] = datetime.utcnow()
        
        # Create a copy to preserve original
        cleaned_data = data.copy()
        
        # Track cleaning process
        cleaning_log = []
        
        for cleaner in self.cleaners:
            try:
                before = str(cleaned_data)[:100]  # Sample for logging
                cleaned_data = cleaner.process(cleaned_data)
                after = str(cleaned_data)[:100]
                
                cleaning_log.append({
                    'cleaner': cleaner.name,
                    'status': 'success',
                    'timestamp': datetime.utcnow().isoformat()
                })
                
                logger.debug(f"{cleaner.name} completed")
                
            except Exception as e:
                logger.error(f"Error in {cleaner.name}: {e}")
                cleaning_log.append({
                    'cleaner': cleaner.name,
                    'status': 'error',
                    'error': str(e),
                    'timestamp': datetime.utcnow().isoformat()
                })
                self.stats['errors'] += 1
        
        # Add metadata about cleaning process
        cleaned_data['_cleaning_metadata'] = {
            'pipeline_version': self.config.get('version', '1.0'),
            'processed_at': datetime.utcnow().isoformat(),
            'cleaners_applied': [c.name for c in self.cleaners],
            'cleaning_log': cleaning_log
        }
        
        self.stats['processed'] += 1
        
        return cleaned_data
    
    def get_stats(self) -> Dict[str, Any]:
        """Get pipeline statistics
        
        Returns:
            Dictionary containing pipeline statistics
        """
        stats = self.stats.copy()
        stats['cleaners'] = [c.get_stats() for c in self.cleaners]
        
        if self.stats['start_time']:
            elapsed = (datetime.utcnow() - self.stats['start_time']).total_seconds()
            stats['elapsed_seconds'] = elapsed
            if self.stats['processed'] > 0:
                stats['avg_processing_time'] = elapsed / self.stats['processed']
        
        return stats
    
    def reset_stats(self):
        """Reset pipeline statistics"""
        self.stats = {
            'processed': 0,
            'errors': 0,
            'start_time': None,
            'cleaners': []
        }
        logger.info("Pipeline statistics reset")
    
    def __len__(self) -> int:
        """Get the number of cleaners in the pipeline"""
        return len(self.cleaners)
    
    def __repr__(self) -> str:
        """String representation of the pipeline"""
        cleaner_names = [c.name for c in self.cleaners]
        return f"CleaningPipeline({' -> '.join(cleaner_names)})"