"""
Cache Service for Market Data
=============================

提供一个统一的缓存层，用于存储高频查询的数据，如历史K线。
该服务基于Redis实现，并提供简单的get/set接口。

核心职责：
1. 封装Redis客户端的连接和操作
2. 提供异步的get/set方法
3. 统一处理数据的序列化和反序列化
4. 提供健康检查

作者: 架构组
创建日期: 2025-10-05
版本: v1.0
"""

import logging
import pickle
from typing import Optional, Any

import redis.asyncio as redis
from redis.asyncio.connection import ConnectionPool
from redis.exceptions import RedisError

from backend.redfire_framework.monitoring import increment

logger = logging.getLogger(__name__)


class CacheService:
    """
    基于Redis的异步缓存服务
    """

    def __init__(self, connection_pool: ConnectionPool):
        """
        初始化缓存服务

        Args:
            connection_pool: Redis连接池
        """
        self.connection_pool = connection_pool
        self.client = redis.Redis(connection_pool=self.connection_pool)
        logger.info("CacheService initialized")

    async def get(self, key: str) -> Optional[Any]:
        """
        从缓存中获取数据

        Args:
            key: 缓存键

        Returns:
            Optional[Any]: 反序列化后的数据，如果键不存在或发生错误则返回None
        """
        try:
            cached_data = await self.client.get(key)
            if cached_data:
                increment("cache_service.get.hit")
                # 使用pickle反序列化
                return pickle.loads(cached_data)
            else:
                increment("cache_service.get.miss")
                return None
        except RedisError as e:
            logger.error(f"Failed to get cache for key '{key}': {e}", exc_info=True)
            increment("cache_service.get.errors")
            return None
        except pickle.UnpicklingError as e:
            logger.error(f"Failed to unpickle data for key '{key}': {e}", exc_info=True)
            increment("cache_service.unpickle.errors")
            # 数据损坏，删除该键
            await self.delete(key)
            return None

    async def set(self, key: str, value: Any, ttl: int) -> bool:
        """
        将数据设置到缓存中

        Args:
            key: 缓存键
            value: 要缓存的数据（必须是可pickle序列化的）
            ttl: 过期时间（秒）

        Returns:
            bool: 操作是否成功
        """
        try:
            # 使用pickle序列化
            serialized_value = pickle.dumps(value)
            await self.client.set(key, serialized_value, ex=ttl)
            increment("cache_service.set.success")
            return True
        except RedisError as e:
            logger.error(f"Failed to set cache for key '{key}': {e}", exc_info=True)
            increment("cache_service.set.errors")
            return False
        except (pickle.PicklingError, TypeError) as e:
            logger.error(f"Failed to pickle value for key '{key}': {e}", exc_info=True)
            increment("cache_service.pickle.errors")
            return False
    
    async def delete(self, key: str) -> bool:
        """
        从缓存中删除一个键

        Args:
            key: 缓存键

        Returns:
            bool: 操作是否成功
        """
        try:
            await self.client.delete(key)
            increment("cache_service.delete.success")
            return True
        except RedisError as e:
            logger.error(f"Failed to delete cache for key '{key}': {e}", exc_info=True)
            increment("cache_service.delete.errors")
            return False

    async def health_check(self) -> dict:
        """
        检查到Redis的连接健康状况

        Returns:
            dict: 健康状态报告
        """
        try:
            if await self.client.ping():
                return {"status": "healthy"}
            else:
                return {"status": "unhealthy", "error": "Ping failed"}
        except RedisError as e:
            logger.error(f"Cache service health check failed: {e}")
            return {"status": "unhealthy", "error": str(e)}

    async def close(self) -> None:
        """
        关闭Redis连接
        """
        await self.client.close()
        logger.info("CacheService connection closed")
