"""
SDMEngine: Core Sparse Distributed Memory implementation.

This module implements the foundational SDM architecture with:
- High-dimensional binary address space
- Hamming distance-based activation
- Storage and retrieval operations
- Content-addressable memory with <100ms retrieval
"""

import numpy as np
import time
from typing import Dict, List, Optional, Tuple, Union
from concurrent.futures import ThreadPoolExecutor
import logging

logger = logging.getLogger(__name__)


class SDMEngine:
    """
    Core Sparse Distributed Memory engine implementing Kanerva's SDM model.
    
    Features:
    - High-dimensional binary address space
    - Hamming distance-based activation: activate(x, hi) = 1 when dH(x, hi) ≤ H
    - Storage operations: Ci ← Ci + Σ Aij · transform(wj)
    - Retrieval operations: z = threshold(Σ Ai · Ci)
    - Content-addressable storage with <100ms retrieval
    """
    
    def __init__(
        self,
        dimension: int = 10000,
        num_locations: int = 1000000,
        activation_radius: int = 451,
        data_dimension: int = 1000,
        threshold_factor: float = 0.5,
        max_workers: int = 8
    ):
        """
        Initialize SDM engine.
        
        Args:
            dimension: Dimensionality of address space (typically 10,000)
            num_locations: Number of hard locations in memory
            activation_radius: Hamming distance threshold H for activation
            data_dimension: Dimensionality of stored data vectors
            threshold_factor: Threshold factor for retrieval decisions
            max_workers: Number of threads for parallel operations
        """
        self.dimension = dimension
        self.num_locations = num_locations
        self.activation_radius = activation_radius
        self.data_dimension = data_dimension
        self.threshold_factor = threshold_factor
        self.max_workers = max_workers
        
        # Initialize hard locations (random binary vectors)
        self.hard_locations = self._generate_hard_locations()
        
        # Initialize counters for each location (for data storage)
        self.counters = np.zeros((num_locations, data_dimension), dtype=np.int32)
        
        # Statistics tracking
        self.storage_count = 0
        self.retrieval_count = 0
        self.activation_stats = {'min': float('inf'), 'max': 0, 'avg': 0}
        
        logger.info(f"SDMEngine initialized with {num_locations} locations, "
                   f"dimension {dimension}, activation radius {activation_radius}")
    
    def _generate_hard_locations(self) -> np.ndarray:
        """Generate random binary hard locations."""
        # Use reproducible random seed for consistency
        rng = np.random.RandomState(42)
        return rng.randint(0, 2, size=(self.num_locations, self.dimension), dtype=np.uint8)
    
    def _hamming_distance(self, vec1: np.ndarray, vec2: np.ndarray) -> int:
        """Compute Hamming distance between two binary vectors."""
        return np.sum(vec1 != vec2)
    
    def _hamming_distance_batch(self, address: np.ndarray) -> np.ndarray:
        """Compute Hamming distances from address to all hard locations efficiently."""
        # XOR operation followed by bit counting
        xor_result = np.bitwise_xor(address, self.hard_locations)
        return np.sum(xor_result, axis=1)
    
    def _find_activated_locations(self, address: np.ndarray) -> np.ndarray:
        """Find all hard locations within activation radius of address."""
        distances = self._hamming_distance_batch(address)
        activated = distances <= self.activation_radius
        return np.where(activated)[0]
    
    def _transform_data(self, data: np.ndarray) -> np.ndarray:
        """
        Transform data for storage (bipolar encoding: 0 -> -1, 1 -> 1).
        
        Args:
            data: Binary data vector to transform
            
        Returns:
            Transformed data vector in bipolar format
        """
        return 2 * data - 1
    
    def store(self, address: np.ndarray, data: np.ndarray) -> bool:
        """
        Store data at given address using SDM storage rule.
        
        Storage operation: Ci ← Ci + Σ Aij · transform(wj)
        
        Args:
            address: Binary address vector of length self.dimension
            data: Binary data vector of length self.data_dimension
            
        Returns:
            True if storage was successful
        """
        start_time = time.time()
        
        try:
            # Validate inputs
            if len(address) != self.dimension:
                raise ValueError(f"Address dimension {len(address)} != {self.dimension}")
            if len(data) != self.data_dimension:
                raise ValueError(f"Data dimension {len(data)} != {self.data_dimension}")
            
            # Find activated locations
            activated_locations = self._find_activated_locations(address)
            
            if len(activated_locations) == 0:
                logger.warning("No locations activated for storage")
                return False
            
            # Transform data for storage
            transformed_data = self._transform_data(data)
            
            # Update counters for activated locations
            self.counters[activated_locations] += transformed_data
            
            # Update statistics
            self.storage_count += 1
            self._update_activation_stats(len(activated_locations))
            
            storage_time = (time.time() - start_time) * 1000
            logger.debug(f"Storage completed in {storage_time:.2f}ms, "
                        f"activated {len(activated_locations)} locations")
            
            return True
            
        except Exception as e:
            logger.error(f"Storage failed: {e}")
            return False
    
    def retrieve(self, address: np.ndarray) -> Optional[np.ndarray]:
        """
        Retrieve data from given address using SDM retrieval rule.
        
        Retrieval operation: z = threshold(Σ Ai · Ci)
        
        Args:
            address: Binary address vector of length self.dimension
            
        Returns:
            Retrieved binary data vector or None if retrieval fails
        """
        start_time = time.time()
        
        try:
            # Validate input
            if len(address) != self.dimension:
                raise ValueError(f"Address dimension {len(address)} != {self.dimension}")
            
            # Find activated locations
            activated_locations = self._find_activated_locations(address)
            
            if len(activated_locations) == 0:
                logger.warning("No locations activated for retrieval")
                return None
            
            # Sum counters from activated locations
            summed_counters = np.sum(self.counters[activated_locations], axis=0)
            
            # Apply threshold to get binary output
            threshold = len(activated_locations) * self.threshold_factor
            retrieved_data = (summed_counters >= threshold).astype(np.uint8)
            
            # Update statistics
            self.retrieval_count += 1
            self._update_activation_stats(len(activated_locations))
            
            retrieval_time = (time.time() - start_time) * 1000
            logger.debug(f"Retrieval completed in {retrieval_time:.2f}ms, "
                        f"activated {len(activated_locations)} locations")
            
            # Verify retrieval time constraint
            if retrieval_time > 100:
                logger.warning(f"Retrieval time {retrieval_time:.2f}ms exceeds 100ms target")
            
            return retrieved_data
            
        except Exception as e:
            logger.error(f"Retrieval failed: {e}")
            return None
    
    def batch_store(self, addresses: List[np.ndarray], data_list: List[np.ndarray]) -> List[bool]:
        """
        Store multiple address-data pairs in parallel.
        
        Args:
            addresses: List of binary address vectors
            data_list: List of binary data vectors
            
        Returns:
            List of success flags for each storage operation
        """
        if len(addresses) != len(data_list):
            raise ValueError("Addresses and data lists must have same length")
        
        with ThreadPoolExecutor(max_workers=self.max_workers) as executor:
            futures = [executor.submit(self.store, addr, data) 
                      for addr, data in zip(addresses, data_list)]
            results = [future.result() for future in futures]
        
        return results
    
    def batch_retrieve(self, addresses: List[np.ndarray]) -> List[Optional[np.ndarray]]:
        """
        Retrieve data from multiple addresses in parallel.
        
        Args:
            addresses: List of binary address vectors
            
        Returns:
            List of retrieved data vectors (None for failed retrievals)
        """
        with ThreadPoolExecutor(max_workers=self.max_workers) as executor:
            futures = [executor.submit(self.retrieve, addr) for addr in addresses]
            results = [future.result() for future in futures]
        
        return results
    
    def _update_activation_stats(self, num_activated: int):
        """Update statistics about location activations."""
        self.activation_stats['min'] = min(self.activation_stats['min'], num_activated)
        self.activation_stats['max'] = max(self.activation_stats['max'], num_activated)
        total_ops = self.storage_count + self.retrieval_count
        if total_ops > 0:
            self.activation_stats['avg'] = (
                (self.activation_stats['avg'] * (total_ops - 1) + num_activated) / total_ops
            )
    
    def get_stats(self) -> Dict:
        """Get engine statistics and performance metrics."""
        return {
            'storage_count': self.storage_count,
            'retrieval_count': self.retrieval_count,
            'activation_stats': self.activation_stats.copy(),
            'dimension': self.dimension,
            'num_locations': self.num_locations,
            'activation_radius': self.activation_radius,
            'data_dimension': self.data_dimension,
            'memory_usage_mb': self._estimate_memory_usage()
        }
    
    def _estimate_memory_usage(self) -> float:
        """Estimate memory usage in megabytes."""
        hard_locations_size = self.hard_locations.nbytes
        counters_size = self.counters.nbytes
        total_bytes = hard_locations_size + counters_size
        return total_bytes / (1024 * 1024)  # Convert to MB
    
    def clear_memory(self):
        """Clear all stored data (reset counters to zero)."""
        self.counters.fill(0)
        self.storage_count = 0
        self.retrieval_count = 0
        self.activation_stats = {'min': float('inf'), 'max': 0, 'avg': 0}
        logger.info("Memory cleared")
    
    def save_state(self, filepath: str):
        """Save SDM state to file."""
        state = {
            'counters': self.counters,
            'hard_locations': self.hard_locations,
            'config': {
                'dimension': self.dimension,
                'num_locations': self.num_locations,
                'activation_radius': self.activation_radius,
                'data_dimension': self.data_dimension,
                'threshold_factor': self.threshold_factor
            },
            'stats': self.get_stats()
        }
        np.savez_compressed(filepath, **state)
        logger.info(f"SDM state saved to {filepath}")
    
    def load_state(self, filepath: str):
        """Load SDM state from file."""
        data = np.load(filepath, allow_pickle=True)
        self.counters = data['counters']
        self.hard_locations = data['hard_locations']
        
        # Update configuration
        config = data['config'].item()
        self.dimension = config['dimension']
        self.num_locations = config['num_locations']
        self.activation_radius = config['activation_radius']
        self.data_dimension = config['data_dimension']
        self.threshold_factor = config['threshold_factor']
        
        logger.info(f"SDM state loaded from {filepath}")