"""
Redis缓存管理模块
提供多级缓存、缓存策略、性能监控等功能
"""

import json
import pickle
import asyncio
from typing import Any, Dict, List, Optional, Union, Callable, TypeVar, Generic
from datetime import datetime, timedelta, timezone
from dataclasses import dataclass
from enum import Enum
import redis.asyncio as redis
from redis.asyncio import Redis, ConnectionPool
import structlog

from .config import SystemConfig
from .exceptions import CacheException, CacheConnectionError, CacheOperationError
from .logging import LogManager

T = TypeVar('T')

class CacheLevel(Enum):
    """缓存级别枚举"""
    L1_MEMORY = "l1_memory"      # 内存缓存
    L2_REDIS = "l2_redis"        # Redis缓存
    L3_PERSISTENT = "l3_persistent"  # 持久化缓存

class CacheStrategy(Enum):
    """缓存策略枚举"""
    LRU = "lru"                  # 最近最少使用
    LFU = "lfu"                  # 最少使用频率
    TTL = "ttl"                  # 基于时间过期
    WRITE_THROUGH = "write_through"    # 写穿透
    WRITE_BACK = "write_back"          # 写回
    WRITE_AROUND = "write_around"      # 写绕过

@dataclass
class CacheConfig:
    """缓存配置"""
    ttl: int = 3600              # 默认TTL（秒）
    max_size: int = 1000         # 最大缓存条目数
    strategy: CacheStrategy = CacheStrategy.LRU
    compress: bool = False        # 是否压缩
    serialize_method: str = "json"  # 序列化方法: json, pickle
    namespace: str = "default"    # 命名空间

@dataclass
class CacheStats:
    """缓存统计信息"""
    hits: int = 0
    misses: int = 0
    sets: int = 0
    deletes: int = 0
    evictions: int = 0
    memory_usage: int = 0
    
    @property
    def hit_rate(self) -> float:
        """命中率"""
        total = self.hits + self.misses
        return self.hits / total if total > 0 else 0.0

class CacheSerializer:
    """缓存序列化器"""
    
    @staticmethod
    def serialize(data: Any, method: str = "json") -> bytes:
        """序列化数据"""
        try:
            if method == "json":
                return json.dumps(data, ensure_ascii=False, default=str).encode('utf-8')
            elif method == "pickle":
                return pickle.dumps(data)
            else:
                raise ValueError(f"不支持的序列化方法: {method}")
        except Exception as e:
            raise CacheOperationError(f"序列化失败: {str(e)}")
    
    @staticmethod
    def deserialize(data: bytes, method: str = "json") -> Any:
        """反序列化数据"""
        try:
            if method == "json":
                return json.loads(data.decode('utf-8'))
            elif method == "pickle":
                return pickle.loads(data)
            else:
                raise ValueError(f"不支持的序列化方法: {method}")
        except Exception as e:
            raise CacheOperationError(f"反序列化失败: {str(e)}")

class MemoryCache(Generic[T]):
    """内存缓存实现"""
    
    def __init__(self, config: CacheConfig):
        self.config = config
        self._cache: Dict[str, Dict[str, Any]] = {}
        self._access_times: Dict[str, datetime] = {}
        self._access_counts: Dict[str, int] = {}
        self.stats = CacheStats()
    
    async def get(self, key: str) -> Optional[T]:
        """获取缓存值"""
        full_key = f"{self.config.namespace}:{key}"
        
        if full_key in self._cache:
            entry = self._cache[full_key]
            
            # 检查TTL
            if entry['expires_at'] and datetime.now(timezone.utc) > entry['expires_at']:
                await self.delete(key)
                self.stats.misses += 1
                return None
            
            # 更新访问统计
            self._access_times[full_key] = datetime.now(timezone.utc)
            self._access_counts[full_key] = self._access_counts.get(full_key, 0) + 1
            self.stats.hits += 1
            
            return entry['value']
        
        self.stats.misses += 1
        return None
    
    async def set(self, key: str, value: T, ttl: Optional[int] = None) -> bool:
        """设置缓存值"""
        full_key = f"{self.config.namespace}:{key}"
        expires_at = None
        
        if ttl or self.config.ttl:
            expires_at = datetime.now(timezone.utc) + timedelta(seconds=ttl or self.config.ttl)
        
        # 检查容量限制
        if len(self._cache) >= self.config.max_size and full_key not in self._cache:
            await self._evict()
        
        self._cache[full_key] = {
            'value': value,
            'created_at': datetime.now(timezone.utc),
            'expires_at': expires_at
        }
        self._access_times[full_key] = datetime.now(timezone.utc)
        self._access_counts[full_key] = 1
        self.stats.sets += 1
        
        return True
    
    async def delete(self, key: str) -> bool:
        """删除缓存值"""
        full_key = f"{self.config.namespace}:{key}"
        
        if full_key in self._cache:
            del self._cache[full_key]
            self._access_times.pop(full_key, None)
            self._access_counts.pop(full_key, None)
            self.stats.deletes += 1
            return True
        
        return False
    
    async def clear(self) -> bool:
        """清空缓存"""
        self._cache.clear()
        self._access_times.clear()
        self._access_counts.clear()
        return True
    
    async def _evict(self):
        """缓存淘汰"""
        if not self._cache:
            return
        
        if self.config.strategy == CacheStrategy.LRU:
            # 淘汰最近最少使用的
            oldest_key = min(self._access_times.keys(), key=lambda k: self._access_times[k])
        elif self.config.strategy == CacheStrategy.LFU:
            # 淘汰使用频率最低的
            oldest_key = min(self._access_counts.keys(), key=lambda k: self._access_counts[k])
        else:
            # 默认淘汰最早创建的
            oldest_key = min(self._cache.keys(), key=lambda k: self._cache[k]['created_at'])
        
        key = oldest_key.split(':', 1)[1] if ':' in oldest_key else oldest_key
        await self.delete(key)
        self.stats.evictions += 1

class RedisCache:
    """Redis缓存实现"""
    
    def __init__(self, config: CacheConfig, redis_client: Redis):
        self.config = config
        self.redis = redis_client
        self.serializer = CacheSerializer()
        self.stats = CacheStats()
    
    async def get(self, key: str) -> Optional[Any]:
        """获取缓存值"""
        try:
            full_key = f"{self.config.namespace}:{key}"
            data = await self.redis.get(full_key)
            
            if data:
                self.stats.hits += 1
                return self.serializer.deserialize(data, self.config.serialize_method)
            
            self.stats.misses += 1
            return None
            
        except Exception as e:
            raise CacheOperationError(f"Redis获取失败: {str(e)}")
    
    async def set(self, key: str, value: Any, ttl: Optional[int] = None) -> bool:
        """设置缓存值"""
        try:
            full_key = f"{self.config.namespace}:{key}"
            data = self.serializer.serialize(value, self.config.serialize_method)
            
            if ttl or self.config.ttl:
                await self.redis.setex(full_key, ttl or self.config.ttl, data)
            else:
                await self.redis.set(full_key, data)
            
            self.stats.sets += 1
            return True
            
        except Exception as e:
            raise CacheOperationError(f"Redis设置失败: {str(e)}")
    
    async def delete(self, key: str) -> bool:
        """删除缓存值"""
        try:
            full_key = f"{self.config.namespace}:{key}"
            result = await self.redis.delete(full_key)
            
            if result:
                self.stats.deletes += 1
            
            return bool(result)
            
        except Exception as e:
            raise CacheOperationError(f"Redis删除失败: {str(e)}")
    
    async def clear(self) -> bool:
        """清空命名空间下的所有缓存"""
        try:
            pattern = f"{self.config.namespace}:*"
            keys = await self.redis.keys(pattern)
            
            if keys:
                await self.redis.delete(*keys)
            
            return True
            
        except Exception as e:
            raise CacheOperationError(f"Redis清空失败: {str(e)}")
    
    async def exists(self, key: str) -> bool:
        """检查键是否存在"""
        try:
            full_key = f"{self.config.namespace}:{key}"
            return bool(await self.redis.exists(full_key))
        except Exception as e:
            raise CacheOperationError(f"Redis检查存在失败: {str(e)}")
    
    async def expire(self, key: str, ttl: int) -> bool:
        """设置键的过期时间"""
        try:
            full_key = f"{self.config.namespace}:{key}"
            return bool(await self.redis.expire(full_key, ttl))
        except Exception as e:
            raise CacheOperationError(f"Redis设置过期时间失败: {str(e)}")

class MultiLevelCache:
    """多级缓存管理器"""
    
    def __init__(self, 
                 l1_config: Optional[CacheConfig] = None,
                 l2_config: Optional[CacheConfig] = None,
                 redis_client: Optional[Redis] = None):
        
        # L1内存缓存
        self.l1_cache = MemoryCache(l1_config or CacheConfig(max_size=500, ttl=300))
        
        # L2 Redis缓存
        self.l2_cache = None
        if redis_client and l2_config:
            self.l2_cache = RedisCache(l2_config, redis_client)
        
        self.logger = structlog.get_logger(__name__)
    
    async def get(self, key: str) -> Optional[Any]:
        """多级缓存获取"""
        # 先从L1获取
        value = await self.l1_cache.get(key)
        if value is not None:
            await self.logger.adebug("L1缓存命中", key=key)
            return value
        
        # 再从L2获取
        if self.l2_cache:
            value = await self.l2_cache.get(key)
            if value is not None:
                # 回写到L1
                await self.l1_cache.set(key, value)
                await self.logger.adebug("L2缓存命中", key=key)
                return value
        
        await self.logger.adebug("缓存未命中", key=key)
        return None
    
    async def set(self, key: str, value: Any, ttl: Optional[int] = None) -> bool:
        """多级缓存设置"""
        # 同时设置L1和L2
        l1_result = await self.l1_cache.set(key, value, ttl)
        l2_result = True
        
        if self.l2_cache:
            l2_result = await self.l2_cache.set(key, value, ttl)
        
        return l1_result and l2_result
    
    async def delete(self, key: str) -> bool:
        """多级缓存删除"""
        l1_result = await self.l1_cache.delete(key)
        l2_result = True
        
        if self.l2_cache:
            l2_result = await self.l2_cache.delete(key)
        
        return l1_result or l2_result
    
    async def clear(self) -> bool:
        """清空所有级别缓存"""
        l1_result = await self.l1_cache.clear()
        l2_result = True
        
        if self.l2_cache:
            l2_result = await self.l2_cache.clear()
        
        return l1_result and l2_result
    
    def get_stats(self) -> Dict[str, CacheStats]:
        """获取缓存统计"""
        stats = {"l1": self.l1_cache.stats}
        if self.l2_cache:
            stats["l2"] = self.l2_cache.stats
        return stats

class CacheManager:
    """缓存管理器"""
    
    def __init__(self):
        self.config = SystemConfig()
        self.log_manager = LogManager()
        self._redis_pool: Optional[ConnectionPool] = None
        self._redis_client: Optional[Redis] = None
        self._caches: Dict[str, MultiLevelCache] = {}
        self._initialized = False
    
    async def initialize(self):
        """初始化缓存管理器"""
        if self._initialized:
            return
        
        try:
            # 初始化Redis连接池
            if self.config.redis.enabled:
                self._redis_pool = ConnectionPool(
                    host=self.config.redis.host,
                    port=self.config.redis.port,
                    db=self.config.redis.database,
                    password=self.config.redis.password,
                    max_connections=self.config.redis.max_connections,
                    retry_on_timeout=True,
                    socket_timeout=self.config.redis.socket_timeout,
                    socket_connect_timeout=self.config.redis.socket_connect_timeout,
                    decode_responses=False  # 保持bytes格式用于序列化
                )
                
                self._redis_client = Redis(connection_pool=self._redis_pool)
                
                # 测试连接
                await self._redis_client.ping()
                
                self.log_manager.log_system_event(
                    "cache_manager_init",
                    "Redis连接初始化成功",
                    "info"
                )
            
            self._initialized = True
            
        except Exception as e:
            self.log_manager.log_system_event(
                "cache_manager_init_error",
                f"缓存管理器初始化失败: {str(e)}",
                "error"
            )
            raise CacheConnectionError(f"缓存管理器初始化失败: {str(e)}")
    
    async def close(self):
        """关闭缓存管理器"""
        if self._redis_client:
            await self._redis_client.close()
        
        if self._redis_pool:
            await self._redis_pool.disconnect()
        
        self._initialized = False
    
    def get_cache(self, namespace: str = "default", 
                  l1_config: Optional[CacheConfig] = None,
                  l2_config: Optional[CacheConfig] = None) -> MultiLevelCache:
        """获取缓存实例"""
        if not self._initialized:
            raise CacheException("缓存管理器未初始化")
        
        if namespace not in self._caches:
            # 设置默认配置
            if not l1_config:
                l1_config = CacheConfig(
                    namespace=f"{namespace}_l1",
                    max_size=self.config.cache.l1_max_size,
                    ttl=self.config.cache.l1_ttl
                )
            
            if not l2_config and self._redis_client:
                l2_config = CacheConfig(
                    namespace=f"{namespace}_l2",
                    ttl=self.config.cache.l2_ttl,
                    serialize_method="json"
                )
            
            self._caches[namespace] = MultiLevelCache(
                l1_config=l1_config,
                l2_config=l2_config,
                redis_client=self._redis_client
            )
        
        return self._caches[namespace]
    
    async def cache_decorator(self, 
                            key_func: Callable = None,
                            ttl: int = 3600,
                            namespace: str = "default"):
        """缓存装饰器"""
        def decorator(func: Callable):
            async def wrapper(*args, **kwargs):
                # 生成缓存键
                if key_func:
                    cache_key = key_func(*args, **kwargs)
                else:
                    cache_key = f"{func.__name__}:{hash(str(args) + str(kwargs))}"
                
                cache = self.get_cache(namespace)
                
                # 尝试从缓存获取
                cached_result = await cache.get(cache_key)
                if cached_result is not None:
                    return cached_result
                
                # 执行函数并缓存结果
                result = await func(*args, **kwargs)
                await cache.set(cache_key, result, ttl)
                
                return result
            
            return wrapper
        return decorator
    
    async def get_global_stats(self) -> Dict[str, Any]:
        """获取全局缓存统计"""
        stats = {}
        
        for namespace, cache in self._caches.items():
            stats[namespace] = cache.get_stats()
        
        # Redis信息
        if self._redis_client:
            try:
                redis_info = await self._redis_client.info()
                stats["redis"] = {
                    "used_memory": redis_info.get("used_memory", 0),
                    "connected_clients": redis_info.get("connected_clients", 0),
                    "total_commands_processed": redis_info.get("total_commands_processed", 0)
                }
            except Exception as e:
                stats["redis"] = {"error": str(e)}
        
        return stats

# 全局缓存管理器实例
cache_manager = CacheManager()