from typing import Dict, Optional, Any
import os
import json
import numpy as np
from pathlib import Path

class DataStorage:
    """Manages local data storage for intermediate results on worker nodes.
    
    This class handles caching and retrieval of computation results,
    implementing efficient storage strategies for tensor data.
    """
    
    def __init__(self, cache_dir: str, max_cache_size: int):
        """Initialize the data storage system.
        
        Args:
            cache_dir: Directory for storing cached data
            max_cache_size: Maximum cache size in MB
        """
        self.cache_dir = Path(cache_dir)
        self.max_cache_size = max_cache_size
        self.cache_dir.mkdir(parents=True, exist_ok=True)
        
        self.data_index: Dict[str, Dict[str, Any]] = {}
        self.current_cache_size = 0
        
        # Load existing cache index if available
        self._load_index()
    
    def _load_index(self) -> None:
        """Load the cache index from disk."""
        index_path = self.cache_dir / 'index.json'
        if index_path.exists():
            try:
                with open(index_path, 'r') as f:
                    self.data_index = json.load(f)
                # Calculate current cache size
                self.current_cache_size = sum(
                    info['size'] for info in self.data_index.values()
                )
            except Exception as e:
                print(f"Error loading cache index: {str(e)}")
                self.data_index = {}
                self.current_cache_size = 0
    
    def _save_index(self) -> None:
        """Save the cache index to disk."""
        index_path = self.cache_dir / 'index.json'
        try:
            with open(index_path, 'w') as f:
                json.dump(self.data_index, f)
        except Exception as e:
            print(f"Error saving cache index: {str(e)}")
    
    def store_tensor(self, data_id: str, tensor: np.ndarray,
                     metadata: Optional[Dict[str, Any]] = None) -> bool:
        """Store a tensor in the cache.
        
        Args:
            data_id: Unique identifier for the tensor
            tensor: NumPy array to store
            metadata: Additional information about the tensor
            
        Returns:
            bool: True if storage was successful
        """
        try:
            # Calculate data size in MB
            data_size = tensor.nbytes / (1024 * 1024)
            
            # Check if we have enough space
            if data_size > self.max_cache_size:
                return False
            
            # Ensure we have enough space by removing old entries if necessary
            while (self.current_cache_size + data_size > self.max_cache_size and
                   self.data_index):
                self._remove_oldest_entry()
            
            # Save tensor to file
            file_path = self.cache_dir / f"{data_id}.npy"
            np.save(file_path, tensor)
            
            # Update index
            self.data_index[data_id] = {
                'size': data_size,
                'shape': tensor.shape,
                'dtype': str(tensor.dtype),
                'timestamp': os.path.getmtime(file_path),
                'metadata': metadata or {}
            }
            
            self.current_cache_size += data_size
            self._save_index()
            
            return True
            
        except Exception as e:
            print(f"Error storing tensor {data_id}: {str(e)}")
            return False
    
    def load_tensor(self, data_id: str) -> Optional[np.ndarray]:
        """Load a tensor from the cache.
        
        Args:
            data_id: Identifier of the tensor to load
            
        Returns:
            Optional[np.ndarray]: The loaded tensor or None if not found
        """
        if data_id not in self.data_index:
            return None
            
        try:
            file_path = self.cache_dir / f"{data_id}.npy"
            if not file_path.exists():
                del self.data_index[data_id]
                self._save_index()
                return None
                
            return np.load(file_path)
            
        except Exception as e:
            print(f"Error loading tensor {data_id}: {str(e)}")
            return None
    
    def get_metadata(self, data_id: str) -> Optional[Dict[str, Any]]:
        """Get metadata for a stored tensor.
        
        Args:
            data_id: Identifier of the tensor
            
        Returns:
            Optional[Dict[str, Any]]: Metadata dictionary or None if not found
        """
        return self.data_index.get(data_id, {}).get('metadata')
    
    def remove_tensor(self, data_id: str) -> bool:
        """Remove a tensor from the cache.
        
        Args:
            data_id: Identifier of the tensor to remove
            
        Returns:
            bool: True if removal was successful
        """
        if data_id not in self.data_index:
            return False
            
        try:
            # Remove file
            file_path = self.cache_dir / f"{data_id}.npy"
            if file_path.exists():
                file_path.unlink()
            
            # Update cache size and index
            self.current_cache_size -= self.data_index[data_id]['size']
            del self.data_index[data_id]
            self._save_index()
            
            return True
            
        except Exception as e:
            print(f"Error removing tensor {data_id}: {str(e)}")
            return False
    
    def _remove_oldest_entry(self) -> None:
        """Remove the oldest entry from the cache."""
        if not self.data_index:
            return
            
        # Find oldest entry
        oldest_id = min(self.data_index.keys(),
                       key=lambda k: self.data_index[k]['timestamp'])
        self.remove_tensor(oldest_id)
    
    def get_cache_stats(self) -> Dict[str, Any]:
        """Get statistics about the cache usage.
        
        Returns:
            Dict[str, Any]: Cache statistics
        """
        return {
            'current_size_mb': self.current_cache_size,
            'max_size_mb': self.max_cache_size,
            'usage_percent': (self.current_cache_size / self.max_cache_size) * 100
            if self.max_cache_size > 0 else 0,
            'num_tensors': len(self.data_index),
            'tensors': {
                data_id: {
                    'size_mb': info['size'],
                    'shape': info['shape'],
                    'dtype': info['dtype']
                }
                for data_id, info in self.data_index.items()
            }
        }