#!/usr/bin/env python3
"""
Performance Optimization and Caching for GSM Extension System

Provides intelligent caching mechanisms to optimize extension discovery,
framework detection, and dependency validation operations.
"""

import hashlib
import json
import pickle
import sqlite3
import time
from collections import defaultdict
from contextlib import contextmanager
from dataclasses import dataclass, field
from pathlib import Path
from threading import Lock, RLock
from typing import Any, Dict, List, Optional, Set, Tuple, Union
import logging


@dataclass
class CacheStats:
    """Statistics for cache performance."""
    hits: int = 0
    misses: int = 0
    invalidations: int = 0
    memory_usage: int = 0
    disk_usage: int = 0
    
    @property
    def hit_rate(self) -> float:
        """Calculate cache hit rate."""
        total = self.hits + self.misses
        return self.hits / total if total > 0 else 0.0


@dataclass
class CacheEntry:
    """Individual cache entry with metadata."""
    key: str
    value: Any
    timestamp: float
    access_count: int = 0
    last_access: float = field(default_factory=time.time)
    ttl: Optional[float] = None
    dependencies: Set[str] = field(default_factory=set)
    
    @property
    def is_expired(self) -> bool:
        """Check if entry has expired."""
        if self.ttl is None:
            return False
        return time.time() - self.timestamp > self.ttl
    
    @property
    def age(self) -> float:
        """Get entry age in seconds."""
        return time.time() - self.timestamp


class PerformanceCache:
    """High-performance multi-level cache with intelligent eviction."""
    
    def __init__(self, 
                 max_memory_entries: int = 1000,
                 max_disk_size_mb: int = 100,
                 cache_dir: Optional[Path] = None,
                 default_ttl: Optional[float] = 3600):  # 1 hour default TTL
        self.max_memory_entries = max_memory_entries
        self.max_disk_size_mb = max_disk_size_mb
        self.default_ttl = default_ttl
        self.logger = logging.getLogger(__name__)
        
        # Memory cache
        self._memory_cache: Dict[str, CacheEntry] = {}
        self._memory_lock = RLock()
        
        # Disk cache
        self.cache_dir = cache_dir or Path.home() / ".gsm" / "cache"
        self.cache_dir.mkdir(parents=True, exist_ok=True)
        self._disk_lock = Lock()
        
        # Statistics
        self.stats = CacheStats()
        self._stats_lock = Lock()
        
        # Initialize database for metadata
        self._init_cache_db()
    
    def _init_cache_db(self):
        """Initialize SQLite database for cache metadata."""
        db_path = self.cache_dir / "cache_metadata.db"
        with sqlite3.connect(str(db_path)) as conn:
            conn.execute("""
                CREATE TABLE IF NOT EXISTS cache_entries (
                    key TEXT PRIMARY KEY,
                    timestamp REAL,
                    access_count INTEGER,
                    last_access REAL,
                    ttl REAL,
                    dependencies TEXT,
                    file_path TEXT,
                    file_size INTEGER
                )
            """)
            conn.execute("""
                CREATE INDEX IF NOT EXISTS idx_last_access ON cache_entries(last_access)
            """)
            conn.execute("""
                CREATE INDEX IF NOT EXISTS idx_timestamp ON cache_entries(timestamp)
            """)
    
    def get(self, key: str, default: Any = None) -> Any:
        """Get value from cache."""
        # Try memory cache first
        with self._memory_lock:
            if key in self._memory_cache:
                entry = self._memory_cache[key]
                if not entry.is_expired:
                    entry.access_count += 1
                    entry.last_access = time.time()
                    self._update_stats(hit=True)
                    return entry.value
                else:
                    # Remove expired entry
                    del self._memory_cache[key]
        
        # Try disk cache
        disk_value = self._get_from_disk(key)
        if disk_value is not None:
            # Promote to memory cache
            self._set_memory(key, disk_value)
            self._update_stats(hit=True)
            return disk_value
        
        self._update_stats(miss=True)
        return default
    
    def set(self, key: str, value: Any, ttl: Optional[float] = None, 
            dependencies: Optional[Set[str]] = None):
        """Set value in cache."""
        ttl = ttl or self.default_ttl
        dependencies = dependencies or set()
        
        # Set in memory cache
        self._set_memory(key, value, ttl, dependencies)
        
        # Set in disk cache for persistence
        self._set_disk(key, value, ttl, dependencies)
    
    def _set_memory(self, key: str, value: Any, ttl: Optional[float] = None,
                   dependencies: Optional[Set[str]] = None):
        """Set value in memory cache."""
        with self._memory_lock:
            entry = CacheEntry(
                key=key,
                value=value,
                timestamp=time.time(),
                ttl=ttl,
                dependencies=dependencies or set()
            )
            
            self._memory_cache[key] = entry
            
            # Evict if necessary
            if len(self._memory_cache) > self.max_memory_entries:
                self._evict_memory()
    
    def _set_disk(self, key: str, value: Any, ttl: Optional[float] = None,
                 dependencies: Optional[Set[str]] = None):
        """Set value in disk cache."""
        with self._disk_lock:
            try:
                file_path = self._get_cache_file_path(key)
                
                # Serialize value
                with open(file_path, 'wb') as f:
                    pickle.dump(value, f)
                
                # Update metadata
                self._update_disk_metadata(key, file_path, ttl, dependencies)
                
            except Exception as e:
                self.logger.warning(f"Failed to cache to disk: {e}")
    
    def _get_from_disk(self, key: str) -> Any:
        """Get value from disk cache."""
        with self._disk_lock:
            try:
                # Check metadata first
                if not self._is_disk_entry_valid(key):
                    return None
                
                file_path = self._get_cache_file_path(key)
                if not file_path.exists():
                    return None
                
                with open(file_path, 'rb') as f:
                    value = pickle.load(f)
                
                # Update access statistics
                self._update_disk_access(key)
                
                return value
                
            except Exception as e:
                self.logger.warning(f"Failed to read from disk cache: {e}")
                return None
    
    def _get_cache_file_path(self, key: str) -> Path:
        """Get file path for cache key."""
        key_hash = hashlib.sha256(key.encode()).hexdigest()
        return self.cache_dir / f"{key_hash}.pkl"
    
    def _update_disk_metadata(self, key: str, file_path: Path, 
                             ttl: Optional[float], dependencies: Optional[Set[str]]):
        """Update disk cache metadata."""
        db_path = self.cache_dir / "cache_metadata.db"
        with sqlite3.connect(str(db_path)) as conn:
            file_size = file_path.stat().st_size if file_path.exists() else 0
            deps_json = json.dumps(list(dependencies or []))
            
            conn.execute("""
                INSERT OR REPLACE INTO cache_entries 
                (key, timestamp, access_count, last_access, ttl, dependencies, file_path, file_size)
                VALUES (?, ?, ?, ?, ?, ?, ?, ?)
            """, (key, time.time(), 1, time.time(), ttl, deps_json, str(file_path), file_size))
    
    def _is_disk_entry_valid(self, key: str) -> bool:
        """Check if disk cache entry is valid."""
        db_path = self.cache_dir / "cache_metadata.db"
        with sqlite3.connect(str(db_path)) as conn:
            cursor = conn.execute("""
                SELECT timestamp, ttl FROM cache_entries WHERE key = ?
            """, (key,))
            row = cursor.fetchone()
            
            if not row:
                return False
            
            timestamp, ttl = row
            if ttl and time.time() - timestamp > ttl:
                # Clean up expired entry
                self._remove_disk_entry(key)
                return False
            
            return True
    
    def _update_disk_access(self, key: str):
        """Update disk cache access statistics."""
        db_path = self.cache_dir / "cache_metadata.db"
        with sqlite3.connect(str(db_path)) as conn:
            conn.execute("""
                UPDATE cache_entries 
                SET access_count = access_count + 1, last_access = ?
                WHERE key = ?
            """, (time.time(), key))
    
    def _evict_memory(self):
        """Evict entries from memory cache using LRU policy."""
        if not self._memory_cache:
            return
        
        # Find least recently used entry
        lru_key = min(self._memory_cache.keys(), 
                     key=lambda k: self._memory_cache[k].last_access)
        del self._memory_cache[lru_key]
    
    def _remove_disk_entry(self, key: str):
        """Remove entry from disk cache."""
        try:
            file_path = self._get_cache_file_path(key)
            if file_path.exists():
                file_path.unlink()
            
            # Remove from metadata
            db_path = self.cache_dir / "cache_metadata.db"
            with sqlite3.connect(str(db_path)) as conn:
                conn.execute("DELETE FROM cache_entries WHERE key = ?", (key,))
        except Exception as e:
            self.logger.warning(f"Failed to remove disk entry {key}: {e}")
    
    def invalidate(self, key: str):
        """Invalidate specific cache entry."""
        with self._memory_lock:
            if key in self._memory_cache:
                del self._memory_cache[key]
        
        self._remove_disk_entry(key)
        self._update_stats(invalidation=True)
    
    def invalidate_by_dependency(self, dependency: str):
        """Invalidate all entries that depend on a specific dependency."""
        # Memory cache
        with self._memory_lock:
            to_remove = []
            for key, entry in self._memory_cache.items():
                if dependency in entry.dependencies:
                    to_remove.append(key)
            
            for key in to_remove:
                del self._memory_cache[key]
                self._update_stats(invalidation=True)
        
        # Disk cache
        db_path = self.cache_dir / "cache_metadata.db"
        with sqlite3.connect(str(db_path)) as conn:
            cursor = conn.execute("""
                SELECT key, dependencies FROM cache_entries
            """)
            
            for key, deps_json in cursor.fetchall():
                try:
                    deps = set(json.loads(deps_json))
                    if dependency in deps:
                        self._remove_disk_entry(key)
                        self._update_stats(invalidation=True)
                except (json.JSONDecodeError, TypeError):
                    continue
    
    def clear(self):
        """Clear all cache entries."""
        with self._memory_lock:
            self._memory_cache.clear()
        
        # Clear disk cache
        with self._disk_lock:
            for file in self.cache_dir.glob("*.pkl"):
                try:
                    file.unlink()
                except OSError:
                    pass
            
            # Clear metadata
            db_path = self.cache_dir / "cache_metadata.db"
            with sqlite3.connect(str(db_path)) as conn:
                conn.execute("DELETE FROM cache_entries")
    
    def cleanup_expired(self):
        """Clean up expired cache entries."""
        current_time = time.time()
        
        # Memory cache
        with self._memory_lock:
            expired_keys = []
            for key, entry in self._memory_cache.items():
                if entry.is_expired:
                    expired_keys.append(key)
            
            for key in expired_keys:
                del self._memory_cache[key]
        
        # Disk cache
        db_path = self.cache_dir / "cache_metadata.db"
        with sqlite3.connect(str(db_path)) as conn:
            cursor = conn.execute("""
                SELECT key, timestamp, ttl FROM cache_entries
            """)
            
            expired_keys = []
            for key, timestamp, ttl in cursor.fetchall():
                if ttl and current_time - timestamp > ttl:
                    expired_keys.append(key)
            
            for key in expired_keys:
                self._remove_disk_entry(key)
    
    def get_stats(self) -> CacheStats:
        """Get cache performance statistics."""
        with self._stats_lock:
            # Update memory usage
            self.stats.memory_usage = len(self._memory_cache)
            
            # Update disk usage
            try:
                disk_size = sum(f.stat().st_size for f in self.cache_dir.glob("*.pkl"))
                self.stats.disk_usage = disk_size // (1024 * 1024)  # MB
            except Exception:
                pass
            
            return self.stats
    
    def _update_stats(self, hit: bool = False, miss: bool = False, invalidation: bool = False):
        """Update cache statistics."""
        with self._stats_lock:
            if hit:
                self.stats.hits += 1
            if miss:
                self.stats.misses += 1
            if invalidation:
                self.stats.invalidations += 1
    
    @contextmanager
    def batch_operations(self):
        """Context manager for batch cache operations."""
        # Implementation could include deferred writes, etc.
        yield self


class CacheManager:
    """Global cache manager for GSM extension system."""
    
    def __init__(self):
        self.caches: Dict[str, PerformanceCache] = {}
        self._lock = Lock()
    
    def get_cache(self, name: str, **kwargs) -> PerformanceCache:
        """Get or create a named cache."""
        with self._lock:
            if name not in self.caches:
                self.caches[name] = PerformanceCache(**kwargs)
            return self.caches[name]
    
    def clear_all(self):
        """Clear all managed caches."""
        with self._lock:
            for cache in self.caches.values():
                cache.clear()
    
    def cleanup_all_expired(self):
        """Clean up expired entries in all caches."""
        with self._lock:
            for cache in self.caches.values():
                cache.cleanup_expired()
    
    def get_global_stats(self) -> Dict[str, CacheStats]:
        """Get statistics for all managed caches."""
        with self._lock:
            return {name: cache.get_stats() for name, cache in self.caches.items()}


# Global cache manager instance
cache_manager = CacheManager()


def cached(cache_name: str = "default", ttl: Optional[float] = None,
          dependencies: Optional[Set[str]] = None):
    """Decorator to cache function results."""
    def decorator(func):
        def wrapper(*args, **kwargs):
            cache = cache_manager.get_cache(cache_name)
            
            # Generate cache key
            key_data = {
                'func': f"{func.__module__}.{func.__name__}",
                'args': args,
                'kwargs': sorted(kwargs.items())
            }
            key = hashlib.sha256(json.dumps(key_data, sort_keys=True, default=str).encode()).hexdigest()
            
            # Try to get from cache
            result = cache.get(key)
            if result is not None:
                return result
            
            # Compute and cache result
            result = func(*args, **kwargs)
            cache.set(key, result, ttl=ttl, dependencies=dependencies)
            
            return result
        return wrapper
    return decorator


def cache_key(*args, **kwargs) -> str:
    """Generate a cache key from arguments."""
    key_data = {'args': args, 'kwargs': sorted(kwargs.items())}
    return hashlib.sha256(json.dumps(key_data, sort_keys=True, default=str).encode()).hexdigest()


def file_hash_key(file_path: Union[str, Path]) -> str:
    """Generate cache key based on file content hash."""
    path = Path(file_path)
    if not path.exists():
        return f"missing:{path}"
    
    # Include modification time and size for quick invalidation
    stat = path.stat()
    key_data = {
        'path': str(path),
        'mtime': stat.st_mtime,
        'size': stat.st_size
    }
    
    return hashlib.sha256(json.dumps(key_data, sort_keys=True).encode()).hexdigest()