from typing import Dict, List, Set
from .runtime_info import RuntimeInfo
from .task_assigner import TaskAssigner

class GarbageCollector:
    """Manages memory resources and implements garbage collection strategies.
    
    This class is responsible for tracking memory usage and freeing unnecessary
    intermediate results to prevent memory overflow in the distributed system.
    """
    
    def __init__(self, runtime_info: RuntimeInfo, task_assigner: TaskAssigner,
                 config: Dict[str, any]):
        self.runtime_info = runtime_info
        self.task_assigner = task_assigner
        self.gc_threshold = config['memory']['gc_threshold']
        self.cache_size = config['memory']['cache_size']
        
        # Track data references
        self.data_refs: Dict[str, Set[str]] = {}  # data_id -> set of task_ids
        self.cached_data: Dict[str, float] = {}   # data_id -> size in MB
    
    def register_data_reference(self, data_id: str, task_id: str,
                              size: float) -> None:
        """Register a new data reference.
        
        Args:
            data_id: Identifier for the data
            task_id: Task that needs this data
            size: Size of the data in MB
        """
        if data_id not in self.data_refs:
            self.data_refs[data_id] = set()
        self.data_refs[data_id].add(task_id)
        
        if data_id not in self.cached_data:
            self.cached_data[data_id] = size
    
    def remove_data_reference(self, data_id: str, task_id: str) -> None:
        """Remove a data reference when a task no longer needs it.
        
        Args:
            data_id: Identifier for the data
            task_id: Task that no longer needs this data
        """
        if data_id in self.data_refs and task_id in self.data_refs[data_id]:
            self.data_refs[data_id].remove(task_id)
            
            # If no more references, consider for garbage collection
            if not self.data_refs[data_id]:
                self._collect_garbage(data_id)
    
    def _collect_garbage(self, data_id: str) -> None:
        """Free memory for data that is no longer needed.
        
        Args:
            data_id: Identifier for the data to be collected
        """
        if data_id in self.cached_data:
            # Remove from cache and free memory
            data_size = self.cached_data[data_id]
            del self.cached_data[data_id]
            del self.data_refs[data_id]
            
            # Notify workers to free the data
            workers = self.runtime_info.get_data_locations(data_id)
            for worker_id in workers:
                self._notify_worker_free_data(worker_id, data_id)
    
    def _notify_worker_free_data(self, worker_id: str, data_id: str) -> None:
        """Notify a worker to free specific data.
        
        Args:
            worker_id: Worker identifier
            data_id: Identifier of data to free
        """
        # Update worker's memory usage tracking
        current_usage = self.runtime_info.get_worker_load(worker_id)
        data_size = self.cached_data.get(data_id, 0)
        self.runtime_info.update_resource_usage(
            worker_id=worker_id,
            memory_usage=max(0, current_usage - data_size)
        )
    
    def check_memory_pressure(self) -> None:
        """Check system memory pressure and trigger garbage collection if needed."""
        for worker_id, memory_usage in self.runtime_info.memory_usage.items():
            if memory_usage > self.cache_size * self.gc_threshold:
                self._handle_memory_pressure(worker_id)
    
    def _handle_memory_pressure(self, worker_id: str) -> None:
        """Handle memory pressure on a specific worker.
        
        Args:
            worker_id: Worker experiencing memory pressure
        """
        # Get all data stored on this worker
        worker_data = [
            data_id for data_id, locations in self.runtime_info.data_locations.items()
            if worker_id in locations
        ]
        
        # Sort data by reference count and size
        data_scores = [
            (data_id,
             len(self.data_refs.get(data_id, set())),
             self.cached_data.get(data_id, 0))
            for data_id in worker_data
        ]
        
        # Sort by reference count (ascending) and size (descending)
        data_scores.sort(key=lambda x: (x[1], -x[2]))
        
        # Free data until memory usage is below threshold
        current_usage = self.runtime_info.get_worker_load(worker_id)
        target_usage = self.cache_size * (self.gc_threshold - 0.1)  # 10% below threshold
        
        for data_id, ref_count, size in data_scores:
            if current_usage <= target_usage:
                break
                
            if ref_count == 0:  # Only free unreferenced data
                self._collect_garbage(data_id)
                current_usage -= size
    
    def get_memory_stats(self) -> Dict[str, any]:
        """Get current memory usage statistics.
        
        Returns:
            Dict[str, any]: Memory usage statistics
        """
        total_cached = sum(self.cached_data.values())
        worker_stats = {
            worker_id: {
                'usage': usage,
                'pressure': usage > self.cache_size * self.gc_threshold
            }
            for worker_id, usage in self.runtime_info.memory_usage.items()
        }
        
        return {
            'total_cached_mb': total_cached,
            'cache_size_mb': self.cache_size,
            'gc_threshold': self.gc_threshold,
            'worker_stats': worker_stats
        }