"""
Caching layer for CodeMCP framework.

Provides multi-tier caching with support for memory, Redis, and database-backed caching.
Includes cache invalidation, TTL management, and performance optimization features.
"""

import asyncio
import json
import pickle
import hashlib
import time
from abc import ABC, abstractmethod
from typing import Dict, Any, List, Optional, Union, Set, Tuple
from datetime import datetime, timedelta
from collections import OrderedDict
import threading
import weakref

try:
    import redis.asyncio as redis
    REDIS_AVAILABLE = True
except ImportError:
    REDIS_AVAILABLE = False
    redis = None

from .database import DatabaseManager
from .models import CacheEntryModel, AnalysisResultModel
from ..core.config import Config, CacheConfig
from ..core.error_handler import CodeMCPError, log_info, log_warning, log_debug, handle_error


class CacheError(CodeMCPError):
    """Cache-specific errors."""
    
    def __init__(self, message: str, cache_type: str = None, **kwargs):
        super().__init__(message, "CACHE_ERROR", **kwargs)
        if cache_type:
            self.details["cache_type"] = cache_type


class CacheBackend(ABC):
    """Abstract base class for cache backends."""
    
    @abstractmethod
    async def get(self, key: str) -> Optional[Any]:
        """Get value from cache."""
        pass
    
    @abstractmethod
    async def set(self, key: str, value: Any, ttl: Optional[int] = None) -> bool:
        """Set value in cache with optional TTL."""
        pass
    
    @abstractmethod
    async def delete(self, key: str) -> bool:
        """Delete key from cache."""
        pass
    
    @abstractmethod
    async def exists(self, key: str) -> bool:
        """Check if key exists in cache."""
        pass
    
    @abstractmethod
    async def clear(self) -> bool:
        """Clear all cache entries."""
        pass
    
    @abstractmethod
    async def get_stats(self) -> Dict[str, Any]:
        """Get cache statistics."""
        pass


class MemoryCacheBackend(CacheBackend):
    """In-memory cache backend with LRU eviction."""
    
    def __init__(self, max_size: int = 1000, default_ttl: int = 3600):
        self.max_size = max_size
        self.default_ttl = default_ttl
        self._cache = OrderedDict()
        self._expiry = {}
        self._lock = threading.RLock()
        
        # Statistics
        self.stats = {
            "hits": 0,
            "misses": 0,
            "sets": 0,
            "deletes": 0,
            "evictions": 0,
            "size": 0
        }
    
    async def get(self, key: str) -> Optional[Any]:
        """Get value from memory cache."""
        with self._lock:
            # Check if key exists and is not expired
            if key in self._cache:
                if self._is_expired(key):
                    del self._cache[key]
                    del self._expiry[key]
                    self.stats["misses"] += 1
                    self.stats["size"] = len(self._cache)
                    return None
                
                # Move to end (most recently used)
                value = self._cache.pop(key)
                self._cache[key] = value
                self.stats["hits"] += 1
                return value
            
            self.stats["misses"] += 1
            return None
    
    async def set(self, key: str, value: Any, ttl: Optional[int] = None) -> bool:
        """Set value in memory cache."""
        with self._lock:
            # Calculate expiry time
            if ttl is None:
                ttl = self.default_ttl
            
            expiry_time = time.time() + ttl
            
            # Remove existing entry if present
            if key in self._cache:
                del self._cache[key]
                del self._expiry[key]
            
            # Evict oldest entries if at capacity
            while len(self._cache) >= self.max_size:
                oldest_key = next(iter(self._cache))
                del self._cache[oldest_key]
                del self._expiry[oldest_key]
                self.stats["evictions"] += 1
            
            # Add new entry
            self._cache[key] = value
            self._expiry[key] = expiry_time
            
            self.stats["sets"] += 1
            self.stats["size"] = len(self._cache)
            return True
    
    async def delete(self, key: str) -> bool:
        """Delete key from memory cache."""
        with self._lock:
            if key in self._cache:
                del self._cache[key]
                del self._expiry[key]
                self.stats["deletes"] += 1
                self.stats["size"] = len(self._cache)
                return True
            return False
    
    async def exists(self, key: str) -> bool:
        """Check if key exists in memory cache."""
        with self._lock:
            if key in self._cache:
                if self._is_expired(key):
                    del self._cache[key]
                    del self._expiry[key]
                    self.stats["size"] = len(self._cache)
                    return False
                return True
            return False
    
    async def clear(self) -> bool:
        """Clear all entries from memory cache."""
        with self._lock:
            self._cache.clear()
            self._expiry.clear()
            self.stats["size"] = 0
            return True
    
    async def get_stats(self) -> Dict[str, Any]:
        """Get memory cache statistics."""
        with self._lock:
            self.stats["size"] = len(self._cache)
            return {
                "backend_type": "memory",
                "max_size": self.max_size,
                "current_size": len(self._cache),
                "hit_ratio": self.stats["hits"] / max(self.stats["hits"] + self.stats["misses"], 1),
                **self.stats
            }
    
    def _is_expired(self, key: str) -> bool:
        """Check if a key is expired."""
        return key in self._expiry and time.time() > self._expiry[key]
    
    async def cleanup_expired(self) -> int:
        """Remove expired entries and return count removed."""
        removed_count = 0
        with self._lock:
            current_time = time.time()
            expired_keys = [
                key for key, expiry_time in self._expiry.items()
                if current_time > expiry_time
            ]
            
            for key in expired_keys:
                del self._cache[key]
                del self._expiry[key]
                removed_count += 1
            
            self.stats["size"] = len(self._cache)
        
        return removed_count


class RedisCacheBackend(CacheBackend):
    """Redis cache backend."""
    
    def __init__(self, redis_url: str, default_ttl: int = 3600):
        if not REDIS_AVAILABLE:
            raise CacheError("Redis not available - install redis package")
        
        self.redis_url = redis_url
        self.default_ttl = default_ttl
        self.redis_client = None
        self.is_connected = False
        
        # Statistics
        self.stats = {
            "hits": 0,
            "misses": 0,
            "sets": 0,
            "deletes": 0,
            "connection_errors": 0
        }
    
    async def connect(self):
        """Connect to Redis."""
        try:
            self.redis_client = redis.from_url(self.redis_url, decode_responses=False)
            # Test connection
            await self.redis_client.ping()
            self.is_connected = True
            log_info("Connected to Redis cache")
        except Exception as e:
            self.stats["connection_errors"] += 1
            error_response = handle_error(e, {"operation": "redis_connect"})
            raise CacheError(f"Failed to connect to Redis: {error_response['message']}")
    
    async def disconnect(self):
        """Disconnect from Redis."""
        if self.redis_client:
            await self.redis_client.close()
            self.is_connected = False
            log_info("Disconnected from Redis cache")
    
    async def _ensure_connected(self):
        """Ensure Redis connection is active."""
        if not self.is_connected or not self.redis_client:
            await self.connect()
    
    async def get(self, key: str) -> Optional[Any]:
        """Get value from Redis cache."""
        try:
            await self._ensure_connected()
            data = await self.redis_client.get(key)
            
            if data is not None:
                value = pickle.loads(data)
                self.stats["hits"] += 1
                return value
            
            self.stats["misses"] += 1
            return None
            
        except Exception as e:
            self.stats["connection_errors"] += 1
            error_response = handle_error(e, {"operation": "redis_get", "key": key})
            raise CacheError(f"Failed to get from Redis: {error_response['message']}")
    
    async def set(self, key: str, value: Any, ttl: Optional[int] = None) -> bool:
        """Set value in Redis cache."""
        try:
            await self._ensure_connected()
            
            if ttl is None:
                ttl = self.default_ttl
            
            data = pickle.dumps(value)
            result = await self.redis_client.setex(key, ttl, data)
            
            self.stats["sets"] += 1
            return bool(result)
            
        except Exception as e:
            self.stats["connection_errors"] += 1
            error_response = handle_error(e, {"operation": "redis_set", "key": key})
            raise CacheError(f"Failed to set in Redis: {error_response['message']}")
    
    async def delete(self, key: str) -> bool:
        """Delete key from Redis cache."""
        try:
            await self._ensure_connected()
            result = await self.redis_client.delete(key)
            
            self.stats["deletes"] += 1
            return bool(result)
            
        except Exception as e:
            self.stats["connection_errors"] += 1
            error_response = handle_error(e, {"operation": "redis_delete", "key": key})
            raise CacheError(f"Failed to delete from Redis: {error_response['message']}")
    
    async def exists(self, key: str) -> bool:
        """Check if key exists in Redis cache."""
        try:
            await self._ensure_connected()
            result = await self.redis_client.exists(key)
            return bool(result)
            
        except Exception as e:
            self.stats["connection_errors"] += 1
            error_response = handle_error(e, {"operation": "redis_exists", "key": key})
            raise CacheError(f"Failed to check existence in Redis: {error_response['message']}")
    
    async def clear(self) -> bool:
        """Clear all entries from Redis cache."""
        try:
            await self._ensure_connected()
            result = await self.redis_client.flushdb()
            return bool(result)
            
        except Exception as e:
            self.stats["connection_errors"] += 1
            error_response = handle_error(e, {"operation": "redis_clear"})
            raise CacheError(f"Failed to clear Redis: {error_response['message']}")
    
    async def get_stats(self) -> Dict[str, Any]:
        """Get Redis cache statistics."""
        try:
            await self._ensure_connected()
            info = await self.redis_client.info()
            
            return {
                "backend_type": "redis",
                "connected": self.is_connected,
                "redis_version": info.get("redis_version"),
                "used_memory": info.get("used_memory"),
                "used_memory_human": info.get("used_memory_human"),
                "connected_clients": info.get("connected_clients"),
                "total_commands_processed": info.get("total_commands_processed"),
                "hit_ratio": self.stats["hits"] / max(self.stats["hits"] + self.stats["misses"], 1),
                **self.stats
            }
            
        except Exception as e:
            self.stats["connection_errors"] += 1
            return {
                "backend_type": "redis",
                "connected": False,
                "error": str(e),
                **self.stats
            }


class DatabaseCacheBackend(CacheBackend):
    """Database-backed cache backend."""
    
    def __init__(self, db_manager: DatabaseManager, default_ttl: int = 3600):
        self.db_manager = db_manager
        self.default_ttl = default_ttl
        
        # Statistics
        self.stats = {
            "hits": 0,
            "misses": 0,
            "sets": 0,
            "deletes": 0,
            "database_errors": 0
        }
    
    async def get(self, key: str) -> Optional[Any]:
        """Get value from database cache."""
        try:
            cache_entry = await self.db_manager.get_by_id(CacheEntryModel, key)
            
            if cache_entry:
                # Check if expired
                if cache_entry.expires_at and datetime.utcnow() > cache_entry.expires_at:
                    await self.db_manager.delete(cache_entry)
                    self.stats["misses"] += 1
                    return None
                
                # Update access count and last accessed
                await self.db_manager.update(cache_entry,
                                           access_count=cache_entry.access_count + 1,
                                           last_accessed=datetime.utcnow())
                
                # Deserialize data
                if cache_entry.data_type == "pickle":
                    value = pickle.loads(cache_entry.cache_data.encode('latin-1'))
                else:  # json
                    value = json.loads(cache_entry.cache_data)
                
                self.stats["hits"] += 1
                return value
            
            self.stats["misses"] += 1
            return None
            
        except Exception as e:
            self.stats["database_errors"] += 1
            error_response = handle_error(e, {"operation": "db_cache_get", "key": key})
            raise CacheError(f"Failed to get from database cache: {error_response['message']}")
    
    async def set(self, key: str, value: Any, ttl: Optional[int] = None) -> bool:
        """Set value in database cache."""
        try:
            if ttl is None:
                ttl = self.default_ttl
            
            expires_at = datetime.utcnow() + timedelta(seconds=ttl)
            
            # Serialize data
            try:
                cache_data = json.dumps(value)
                data_type = "json"
            except (TypeError, ValueError):
                cache_data = pickle.dumps(value).decode('latin-1')
                data_type = "pickle"
            
            # Check if entry already exists
            existing_entry = await self.db_manager.get_by_id(CacheEntryModel, key)
            
            if existing_entry:
                # Update existing entry
                await self.db_manager.update(existing_entry,
                                           cache_data=cache_data,
                                           data_type=data_type,
                                           expires_at=expires_at,
                                           data_size_bytes=len(cache_data),
                                           last_accessed=datetime.utcnow())
            else:
                # Create new entry
                cache_entry = CacheEntryModel(
                    cache_key=key,
                    cache_data=cache_data,
                    data_type=data_type,
                    expires_at=expires_at,
                    data_size_bytes=len(cache_data)
                )
                await self.db_manager.create(cache_entry)
            
            self.stats["sets"] += 1
            return True
            
        except Exception as e:
            self.stats["database_errors"] += 1
            error_response = handle_error(e, {"operation": "db_cache_set", "key": key})
            raise CacheError(f"Failed to set in database cache: {error_response['message']}")
    
    async def delete(self, key: str) -> bool:
        """Delete key from database cache."""
        try:
            result = await self.db_manager.delete_by_id(CacheEntryModel, key)
            self.stats["deletes"] += 1
            return result
            
        except Exception as e:
            self.stats["database_errors"] += 1
            error_response = handle_error(e, {"operation": "db_cache_delete", "key": key})
            raise CacheError(f"Failed to delete from database cache: {error_response['message']}")
    
    async def exists(self, key: str) -> bool:
        """Check if key exists in database cache."""
        try:
            cache_entry = await self.db_manager.get_by_id(CacheEntryModel, key)
            
            if cache_entry:
                # Check if expired
                if cache_entry.expires_at and datetime.utcnow() > cache_entry.expires_at:
                    await self.db_manager.delete(cache_entry)
                    return False
                return True
            
            return False
            
        except Exception as e:
            self.stats["database_errors"] += 1
            error_response = handle_error(e, {"operation": "db_cache_exists", "key": key})
            raise CacheError(f"Failed to check existence in database cache: {error_response['message']}")
    
    async def clear(self) -> bool:
        """Clear all entries from database cache."""
        try:
            await self.db_manager.execute_query("DELETE FROM cache_entries")
            return True
            
        except Exception as e:
            self.stats["database_errors"] += 1
            error_response = handle_error(e, {"operation": "db_cache_clear"})
            raise CacheError(f"Failed to clear database cache: {error_response['message']}")
    
    async def get_stats(self) -> Dict[str, Any]:
        """Get database cache statistics."""
        try:
            total_entries = await self.db_manager.count(CacheEntryModel)
            
            # Get size statistics
            result = await self.db_manager.execute_query(
                "SELECT SUM(data_size_bytes) as total_size, AVG(data_size_bytes) as avg_size FROM cache_entries"
            )
            row = result.fetchone()
            total_size = row[0] if row and row[0] else 0
            avg_size = row[1] if row and row[1] else 0
            
            return {
                "backend_type": "database",
                "total_entries": total_entries,
                "total_size_bytes": total_size,
                "avg_size_bytes": avg_size,
                "hit_ratio": self.stats["hits"] / max(self.stats["hits"] + self.stats["misses"], 1),
                **self.stats
            }
            
        except Exception as e:
            self.stats["database_errors"] += 1
            return {
                "backend_type": "database",
                "error": str(e),
                **self.stats
            }


class CacheManager:
    """
    Multi-tier cache manager with support for multiple backends.
    
    Provides automatic failover between cache backends and intelligent
    cache invalidation based on file changes and dependencies.
    """
    
    def __init__(self, config: Config = None, db_manager: DatabaseManager = None):
        self.config = config or Config()
        self.db_manager = db_manager
        
        # Cache backends (in order of preference)
        self.backends: List[CacheBackend] = []
        self.primary_backend: Optional[CacheBackend] = None
        
        # Cache invalidation
        self.invalidation_tags: Dict[str, Set[str]] = {}  # tag -> set of cache keys
        self.key_tags: Dict[str, Set[str]] = {}  # cache key -> set of tags
        
        # Statistics
        self.stats = {
            "total_requests": 0,
            "cache_hits": 0,
            "cache_misses": 0,
            "invalidations": 0,
            "backend_failures": 0
        }
        
        # Background cleanup task
        self._cleanup_task = None
        self._is_running = False
    
    async def initialize(self):
        """Initialize cache backends based on configuration."""
        cache_config = self.config.cache
        
        if not cache_config.enabled:
            log_warning("Caching is disabled in configuration")
            return
        
        try:
            # Initialize primary backend based on type
            if cache_config.type == "redis" and REDIS_AVAILABLE:
                if cache_config.redis_url:
                    backend = RedisCacheBackend(cache_config.redis_url, cache_config.ttl_seconds)
                    await backend.connect()
                    self.backends.append(backend)
                    self.primary_backend = backend
                    log_info("Initialized Redis cache backend")
                else:
                    log_warning("Redis cache type specified but no redis_url provided")
            
            # Always add memory cache as fallback
            memory_backend = MemoryCacheBackend(
                max_size=cache_config.max_memory_mb * 100,  # Rough estimate
                default_ttl=cache_config.ttl_seconds
            )
            self.backends.append(memory_backend)
            
            if not self.primary_backend:
                self.primary_backend = memory_backend
                log_info("Initialized memory cache backend as primary")
            
            # Add database cache if available
            if self.db_manager:
                db_backend = DatabaseCacheBackend(self.db_manager, cache_config.ttl_seconds)
                self.backends.append(db_backend)
                log_info("Initialized database cache backend")
            
            # Start background cleanup
            await self._start_cleanup_task()
            self._is_running = True
            
            log_info(f"Cache manager initialized with {len(self.backends)} backends")
            
        except Exception as e:
            error_response = handle_error(e, {"operation": "cache_manager_init"})
            raise CacheError(f"Failed to initialize cache manager: {error_response['message']}")
    
    async def close(self):
        """Close cache manager and cleanup resources."""
        self._is_running = False
        
        if self._cleanup_task:
            self._cleanup_task.cancel()
            try:
                await self._cleanup_task
            except asyncio.CancelledError:
                pass
        
        # Close Redis connections
        for backend in self.backends:
            if isinstance(backend, RedisCacheBackend):
                await backend.disconnect()
        
        log_info("Cache manager closed")
    
    def _generate_cache_key(self, namespace: str, key: str) -> str:
        """Generate a cache key with namespace."""
        return f"{namespace}:{key}"
    
    async def get(self, key: str, namespace: str = "default") -> Optional[Any]:
        """Get value from cache with namespace support."""
        cache_key = self._generate_cache_key(namespace, key)
        self.stats["total_requests"] += 1
        
        # Try each backend in order
        for backend in self.backends:
            try:
                value = await backend.get(cache_key)
                if value is not None:
                    self.stats["cache_hits"] += 1
                    log_debug(f"Cache hit for key: {cache_key} (backend: {type(backend).__name__})")
                    return value
            except Exception as e:
                self.stats["backend_failures"] += 1
                log_warning(f"Cache backend {type(backend).__name__} failed: {e}")
                continue
        
        self.stats["cache_misses"] += 1
        log_debug(f"Cache miss for key: {cache_key}")
        return None
    
    async def set(self, key: str, value: Any, ttl: Optional[int] = None, 
                 namespace: str = "default", tags: Optional[List[str]] = None) -> bool:
        """Set value in cache with optional TTL and tags."""
        cache_key = self._generate_cache_key(namespace, key)
        
        # Store in all available backends
        success = False
        for backend in self.backends:
            try:
                if await backend.set(cache_key, value, ttl):
                    success = True
            except Exception as e:
                self.stats["backend_failures"] += 1
                log_warning(f"Cache backend {type(backend).__name__} failed: {e}")
                continue
        
        # Store tags for invalidation
        if tags and success:
            self.key_tags[cache_key] = set(tags)
            for tag in tags:
                if tag not in self.invalidation_tags:
                    self.invalidation_tags[tag] = set()
                self.invalidation_tags[tag].add(cache_key)
        
        if success:
            log_debug(f"Cache set for key: {cache_key} (tags: {tags})")
        
        return success
    
    async def delete(self, key: str, namespace: str = "default") -> bool:
        """Delete key from cache."""
        cache_key = self._generate_cache_key(namespace, key)
        
        # Remove from all backends
        success = False
        for backend in self.backends:
            try:
                if await backend.delete(cache_key):
                    success = True
            except Exception as e:
                self.stats["backend_failures"] += 1
                log_warning(f"Cache backend {type(backend).__name__} failed: {e}")
                continue
        
        # Clean up tag tracking
        if cache_key in self.key_tags:
            for tag in self.key_tags[cache_key]:
                if tag in self.invalidation_tags:
                    self.invalidation_tags[tag].discard(cache_key)
                    if not self.invalidation_tags[tag]:
                        del self.invalidation_tags[tag]
            del self.key_tags[cache_key]
        
        if success:
            log_debug(f"Cache deleted for key: {cache_key}")
        
        return success
    
    async def invalidate_by_tags(self, tags: List[str]) -> int:
        """Invalidate all cache entries with the given tags."""
        invalidated_count = 0
        
        for tag in tags:
            if tag in self.invalidation_tags:
                cache_keys = self.invalidation_tags[tag].copy()
                for cache_key in cache_keys:
                    # Extract namespace and key
                    if ":" in cache_key:
                        namespace, key = cache_key.split(":", 1)
                    else:
                        namespace, key = "default", cache_key
                    
                    if await self.delete(key, namespace):
                        invalidated_count += 1
        
        self.stats["invalidations"] += invalidated_count
        log_debug(f"Invalidated {invalidated_count} cache entries for tags: {tags}")
        return invalidated_count
    
    async def invalidate_by_pattern(self, pattern: str, namespace: str = "default") -> int:
        """Invalidate cache entries matching a pattern (Redis only)."""
        cache_pattern = self._generate_cache_key(namespace, pattern)
        invalidated_count = 0
        
        for backend in self.backends:
            if isinstance(backend, RedisCacheBackend):
                try:
                    await backend._ensure_connected()
                    keys = await backend.redis_client.keys(cache_pattern)
                    if keys:
                        deleted = await backend.redis_client.delete(*keys)
                        invalidated_count += deleted
                        log_debug(f"Invalidated {deleted} cache entries matching pattern: {cache_pattern}")
                except Exception as e:
                    log_warning(f"Pattern invalidation failed for Redis backend: {e}")
        
        self.stats["invalidations"] += invalidated_count
        return invalidated_count
    
    async def clear_namespace(self, namespace: str = "default") -> bool:
        """Clear all entries in a namespace."""
        if namespace == "default":
            # Clear everything
            success = False
            for backend in self.backends:
                try:
                    if await backend.clear():
                        success = True
                except Exception as e:
                    log_warning(f"Clear failed for backend {type(backend).__name__}: {e}")
            
            # Clear tag tracking
            self.invalidation_tags.clear()
            self.key_tags.clear()
            
            return success
        else:
            # Pattern-based clear for specific namespace
            return await self.invalidate_by_pattern("*", namespace) > 0
    
    async def get_cache_stats(self) -> Dict[str, Any]:
        """Get comprehensive cache statistics."""
        backend_stats = []
        for backend in self.backends:
            try:
                stats = await backend.get_stats()
                backend_stats.append(stats)
            except Exception as e:
                backend_stats.append({
                    "backend_type": type(backend).__name__,
                    "error": str(e)
                })
        
        total_hit_ratio = (
            self.stats["cache_hits"] / max(self.stats["total_requests"], 1)
        )
        
        return {
            "manager_stats": {
                **self.stats,
                "hit_ratio": total_hit_ratio,
                "backend_count": len(self.backends),
                "tag_count": len(self.invalidation_tags),
                "tracked_keys": len(self.key_tags)
            },
            "backend_stats": backend_stats
        }
    
    async def _start_cleanup_task(self):
        """Start background cleanup task."""
        async def cleanup_loop():
            while self._is_running:
                try:
                    # Cleanup expired entries in memory cache
                    for backend in self.backends:
                        if isinstance(backend, MemoryCacheBackend):
                            removed = await backend.cleanup_expired()
                            if removed > 0:
                                log_debug(f"Removed {removed} expired entries from memory cache")
                    
                    # Cleanup expired database cache entries
                    if self.db_manager:
                        removed = await self.db_manager.cleanup_expired_cache()
                        if removed > 0:
                            log_debug(f"Removed {removed} expired entries from database cache")
                    
                    # Wait before next cleanup
                    await asyncio.sleep(300)  # 5 minutes
                    
                except asyncio.CancelledError:
                    break
                except Exception as e:
                    log_warning(f"Cache cleanup error: {e}")
                    await asyncio.sleep(60)  # Wait 1 minute on error
        
        self._cleanup_task = asyncio.create_task(cleanup_loop())


# Global cache manager instance
_global_cache_manager = None


async def get_cache_manager(config: Config = None, db_manager: DatabaseManager = None) -> CacheManager:
    """Get the global cache manager instance."""
    global _global_cache_manager
    
    if _global_cache_manager is None:
        _global_cache_manager = CacheManager(config, db_manager)
        await _global_cache_manager.initialize()
    
    return _global_cache_manager


async def close_cache_manager():
    """Close the global cache manager."""
    global _global_cache_manager
    
    if _global_cache_manager:
        await _global_cache_manager.close()
        _global_cache_manager = None