"""
增强的缓存管理器模块
提供多级缓存和语义缓存功能
"""

import os
import time
import pickle
import hashlib
import sys
import asyncio
import threading
import logging
from typing import Any, Dict, List, Optional, Tuple, Set
from abc import ABC, abstractmethod
from functools import lru_cache

# 添加logger实例
logger = logging.getLogger(__name__)


class CacheBackend(ABC):
    """缓存后端抽象基类"""
    
    @abstractmethod
    def get(self, key: str) -> Optional[Any]:
        pass
    
    @abstractmethod
    def set(self, key: str, value: Any, ttl: Optional[int] = None) -> None:
        pass
    
    @abstractmethod
    def delete(self, key: str) -> bool:
        pass
    
    @abstractmethod
    def clear(self) -> None:
        pass
    
    @abstractmethod
    def stats(self) -> Dict[str, Any]:
        pass


class MemoryCacheBackend(CacheBackend):
    """内存缓存后端"""
    
    def __init__(self, default_ttl=3600, max_size=10000):
        self._cache: Dict[str, Tuple[Any, float, Optional[float]]] = {}  # key -> (value, timestamp, ttl)
        self._default_ttl = default_ttl
        self._max_size = max_size
        self._stats = {"hits": 0, "misses": 0, "sets": 0}
        self._lock = threading.RLock()
    
    def get(self, key: str) -> Optional[Any]:
        with self._lock:
            if key in self._cache:
                value, timestamp, ttl = self._cache[key]
                actual_ttl = ttl if ttl is not None else self._default_ttl
                if time.time() - timestamp < actual_ttl:
                    self._stats["hits"] += 1
                    return value
                else:
                    # 过期，删除
                    del self._cache[key]
            
            self._stats["misses"] += 1
            return None
    
    def set(self, key: str, value: Any, ttl: Optional[int] = None) -> None:
        with self._lock:
            # 如果缓存已满，删除最旧的项
            if len(self._cache) >= self._max_size:
                oldest_key = next(iter(self._cache))
                del self._cache[oldest_key]
            
            self._cache[key] = (value, time.time(), ttl)
            self._stats["sets"] += 1
    
    def delete(self, key: str) -> bool:
        with self._lock:
            if key in self._cache:
                del self._cache[key]
                return True
            return False
    
    def clear(self) -> None:
        with self._lock:
            self._cache.clear()
    
    def stats(self) -> Dict[str, Any]:
        with self._lock:
            total_requests = self._stats["hits"] + self._stats["misses"]
            hit_rate = self._stats["hits"] / max(1, total_requests)
            return {
                **self._stats,
                "hit_rate": hit_rate,
                "total_requests": total_requests,
                "size": len(self._cache),
                "max_size": self._max_size
            }


class DiskCacheBackend(CacheBackend):
    """磁盘缓存后端"""
    
    def __init__(self, cache_dir="./cache", default_ttl=86400, max_size=100000):
        self._cache_dir = cache_dir
        self._default_ttl = default_ttl
        self._max_size = max_size
        self._stats = {"hits": 0, "misses": 0, "sets": 0}
        os.makedirs(cache_dir, exist_ok=True)
        self._lock = threading.RLock()
    
    def _get_cache_path(self, key: str) -> str:
        return os.path.join(self._cache_dir, f"{key}.cache")
    
    def _get_cache_size(self) -> int:
        """获取缓存目录中文件数量"""
        try:
            return len([f for f in os.listdir(self._cache_dir) if f.endswith('.cache')])
        except Exception:
            return 0
    
    def _cleanup_old_cache(self) -> None:
        """清理旧缓存文件以控制大小"""
        try:
            files = [(f, os.path.getmtime(os.path.join(self._cache_dir, f))) 
                    for f in os.listdir(self._cache_dir) if f.endswith('.cache')]
            files.sort(key=lambda x: x[1])  # 按修改时间排序
            
            # 删除最旧的文件直到大小合适
            while len(files) > self._max_size * 0.9 and files:  # 保留一些缓冲
                file_to_remove, _ = files.pop(0)
                os.remove(os.path.join(self._cache_dir, file_to_remove))
        except Exception:
            pass  # 忽略清理错误
    
    def get(self, key: str) -> Optional[Any]:
        with self._lock:
            cache_path = self._get_cache_path(key)
            if os.path.exists(cache_path):
                try:
                    with open(cache_path, 'rb') as f:
                        value, timestamp, ttl = pickle.load(f)
                    
                    actual_ttl = ttl if ttl is not None else self._default_ttl
                    if time.time() - timestamp < actual_ttl:
                        self._stats["hits"] += 1
                        return value
                    else:
                        # 过期，删除
                        os.remove(cache_path)
                except Exception:
                    pass
            
            self._stats["misses"] += 1
            return None
    
    def set(self, key: str, value: Any, ttl: Optional[int] = None) -> None:
        with self._lock:
            try:
                # 检查是否需要清理
                if self._get_cache_size() >= self._max_size:
                    self._cleanup_old_cache()
                
                cache_path = self._get_cache_path(key)
                with open(cache_path, 'wb') as f:
                    pickle.dump((value, time.time(), ttl), f)
                self._stats["sets"] += 1
            except Exception:
                pass  # 忽略写入错误
    
    def delete(self, key: str) -> bool:
        with self._lock:
            cache_path = self._get_cache_path(key)
            if os.path.exists(cache_path):
                try:
                    os.remove(cache_path)
                    return True
                except Exception:
                    pass
            return False
    
    def clear(self) -> None:
        with self._lock:
            try:
                for filename in os.listdir(self._cache_dir):
                    if filename.endswith(".cache"):
                        os.remove(os.path.join(self._cache_dir, filename))
            except Exception:
                pass
    
    def stats(self) -> Dict[str, Any]:
        with self._lock:
            total_requests = self._stats["hits"] + self._stats["misses"]
            hit_rate = self._stats["hits"] / max(1, total_requests)
            return {
                **self._stats,
                "hit_rate": hit_rate,
                "total_requests": total_requests,
                "size": self._get_cache_size(),
                "max_size": self._max_size
            }


class RedisCacheBackend(CacheBackend):
    """Redis缓存后端"""
    
    def __init__(self, host="localhost", port=6379, db=0, password=None, default_ttl=3600, max_size=50000):
        self._host = host
        self._port = port
        self._db = db
        self._password = password
        self._default_ttl = default_ttl
        self._max_size = max_size
        self._stats = {"hits": 0, "misses": 0, "sets": 0}
        self._redis_client = None
        self._enabled = True
        self._lock = threading.RLock()
    
    def _get_redis_client(self):
        """延迟初始化Redis客户端"""
        if not self._enabled:
            return None
            
        if self._redis_client is None:
            try:
                import redis
                self._redis_client = redis.Redis(
                    host=self._host,
                    port=self._port,
                    db=self._db,
                    password=self._password,
                    decode_responses=False,
                    socket_connect_timeout=5,
                    socket_timeout=5
                )
                # 测试连接
                self._redis_client.ping()
            except Exception as e:
                print(f"Redis连接失败: {e}")
                self._enabled = False
                return None
        return self._redis_client
    
    def _is_coroutine(self, obj):
        """检查对象是否为协程"""
        try:
            return asyncio.iscoroutine(obj)
        except Exception:
            return False
    
    def _sync_execute(self, coro):
        """同步执行协程"""
        # 如果输入是None，直接返回
        if coro is None:
            return None
            
        # 如果不是协程，直接返回
        if not self._is_coroutine(coro):
            return coro
            
        # 如果是协程，尝试同步执行
        try:
            try:
                loop = asyncio.get_running_loop()
                # 如果在事件循环中，创建任务并等待结果
                task = loop.create_task(coro)
                return loop.run_until_complete(task)
            except RuntimeError:
                # 没有事件循环，可以直接运行
                return asyncio.run(coro)
        except Exception:
            # 如果执行失败，返回原始值
            return coro
    
    def get(self, key: str) -> Optional[Any]:
        with self._lock:
            if not self._enabled:
                self._stats["misses"] += 1
                return None
                
            client = self._get_redis_client()
            if client is None:
                self._stats["misses"] += 1
                return None
                
            try:
                data = self._sync_execute(client.get(key))
                # 检查data是否为协程，如果是则跳过处理
                if self._is_coroutine(data):
                    data = None
                    
                if data is not None:
                    # 确保数据是bytes类型，这是pickle.loads所需的类型
                    if isinstance(data, bytes):
                        value = pickle.loads(data)
                    elif isinstance(data, str):
                        # 如果是字符串形式，需要先编码为bytes
                        value = pickle.loads(data.encode('utf-8'))
                    elif isinstance(data, (int, float)):
                        # 如果是数字类型，转换为字符串再处理
                        value = pickle.loads(str(data).encode('utf-8'))
                    elif hasattr(data, 'encode'):
                        # 如果有encode方法，尝试使用它
                        value = pickle.loads(data.encode('utf-8'))
                    elif hasattr(data, '__str__'):
                        # 其他情况尝试直接转换为字符串再处理
                        value = pickle.loads(str(data).encode('utf-8'))
                    else:
                        # 最后的备选方案，尝试强制转换为bytes
                        try:
                            value = pickle.loads(bytes(str(data), 'utf-8'))
                        except:
                            # 如果所有方法都失败了，返回None
                            value = None
                    if value is not None:
                        self._stats["hits"] += 1
                        return value
            except Exception as e:
                print(f"Redis get操作出错: {e}")
                pass
                
            self._stats["misses"] += 1
            return None
    
    def set(self, key: str, value: Any, ttl: Optional[int] = None) -> None:
        with self._lock:
            if not self._enabled:
                return
                
            client = self._get_redis_client()
            if client is None:
                return
                
            try:
                data = pickle.dumps(value)
                actual_ttl = ttl if ttl is not None else self._default_ttl
                result = self._sync_execute(client.setex(key, actual_ttl, data))
                self._stats["sets"] += 1
                
                # 控制缓存大小
                dbsize_result = self._sync_execute(client.dbsize())
                # 确保dbsize_result不是协程后再转换为int
                try:
                    if dbsize_result is not None and not self._is_coroutine(dbsize_result):
                        dbsize = int(dbsize_result)
                    else:
                        dbsize = 0
                except (ValueError):
                    dbsize = 0
                    
                if dbsize > self._max_size:
                    # 获取所有键并删除一些旧的键
                    keys_result = self._sync_execute(client.keys("*"))
                    
                    # 安全地处理keys_result
                    keys_list = []
                    if keys_result is not None:
                        # 检查是否是协程对象
                        if not self._is_coroutine(keys_result):
                            # 尝试将keys_result转换为列表
                            try:
                                # 确保keys_result不是协程后再转换为列表
                                if isinstance(keys_result, (list, tuple, set)):
                                    keys_list = list(keys_result)
                                elif hasattr(keys_result, '__iter__') and not isinstance(keys_result, (str, bytes)):
                                    keys_list = list(keys_result)
                                elif hasattr(keys_result, '__len__') and not isinstance(keys_result, (str, bytes)):
                                    # 有长度但不能迭代的对象，尝试当作单个元素处理
                                    keys_list = [keys_result]
                            except Exception:
                                # 如果转换失败，保持空列表
                                keys_list = []
                    
                    # 过滤掉协程对象
                    filtered_keys = []
                    for key_item in keys_list:
                        if not self._is_coroutine(key_item):
                            filtered_keys.append(key_item)
                    
                    # 删除约10%的旧键
                    if filtered_keys:
                        keys_to_delete = filtered_keys[:max(1, len(filtered_keys)//10)]
                        if keys_to_delete:
                            try:
                                delete_result = self._sync_execute(client.delete(*keys_to_delete))
                            except Exception as e:
                                # 修复：添加日志记录而不是静默忽略
                                logger.warning(f"Redis键删除失败: {e}")
            except Exception as e:
                logger.error(f"Redis set操作出错: {e}")
                # 修复：添加日志记录而不是静默忽略
                logger.warning(f"Redis set操作异常被忽略: {e}")
    
    def delete(self, key: str) -> bool:
        with self._lock:
            if not self._enabled:
                return False
                
            client = self._get_redis_client()
            if client is None:
                return False
                
            try:
                result = self._sync_execute(client.delete(key))
                # 确保result不是None且不是协程
                if result is not None and not self._is_coroutine(result):
                    try:
                        # 安全地将结果转换为整数
                        return int(result) > 0
                    except (ValueError, TypeError):
                        return False
                else:
                    return False
            except Exception as e:
                print(f"Redis delete操作出错: {e}")
                return False
    
    def clear(self) -> None:
        with self._lock:
            if not self._enabled:
                return
                
            client = self._get_redis_client()
            if client is None:
                return
                
            try:
                result = self._sync_execute(client.flushdb())
            except Exception as e:
                print(f"Redis clear操作出错: {e}")
                pass
    
    def stats(self) -> Dict[str, Any]:
        with self._lock:
            total_requests = self._stats["hits"] + self._stats["misses"]
            hit_rate = self._stats["hits"] / max(1, total_requests)
            size = 0
            client = self._get_redis_client()
            if self._enabled and client:
                try:
                    dbsize_result = self._sync_execute(client.dbsize())
                    # 确保dbsize_result不是协程后再转换为int
                    if dbsize_result is not None and not self._is_coroutine(dbsize_result):
                        size = int(dbsize_result)
                except (ValueError):
                    # 如果转换失败，则跳过处理
                    pass
                except Exception as e:
                    print(f"Redis stats操作出错: {e}")
                    pass
                    
            return {
                **self._stats,
                "hit_rate": hit_rate,
                "total_requests": total_requests,
                "size": size,
                "max_size": self._max_size,
                "enabled": self._enabled
            }


class DistributedCacheManager:
    """分布式缓存管理器"""
    
    def __init__(self, 
                 memory_ttl=3600, 
                 disk_ttl=86400,
                 memory_max_size=10000,
                 disk_max_size=100000,
                 redis_max_size=50000,
                 disk_cache_dir="./cache",
                 redis_config: Optional[Dict[str, Any]] = None):
        """
        初始化分布式缓存管理器
        
        Args:
            memory_ttl: 内存缓存TTL（秒）
            disk_ttl: 磁盘缓存TTL（秒）
            memory_max_size: 内存缓存最大条目数
            disk_max_size: 磁盘缓存最大条目数
            redis_max_size: Redis缓存最大条目数
            disk_cache_dir: 磁盘缓存目录
            redis_config: Redis配置字典，包含host, port, db, password等
        """
        # 初始化各级缓存后端
        self.memory_backend = MemoryCacheBackend(default_ttl=memory_ttl, max_size=memory_max_size)
        self.disk_backend = DiskCacheBackend(cache_dir=disk_cache_dir, default_ttl=disk_ttl, max_size=disk_max_size)
        
        # 初始化Redis后端（如果配置了）
        if redis_config:
            self.redis_backend = RedisCacheBackend(max_size=redis_max_size, **redis_config)
        else:
            self.redis_backend = None
        
        # 缓存访问顺序：内存 -> Redis -> 磁盘
        self.backends: List[CacheBackend] = [self.memory_backend]
        if self.redis_backend:
            self.backends.append(self.redis_backend)
        self.backends.append(self.disk_backend)
        
        self._lock = threading.RLock()
    
    @lru_cache(maxsize=10000)
    def _get_cache_key(self, content: str) -> str:
        """生成缓存键，使用LRU缓存优化重复计算"""
        return hashlib.md5(content.encode('utf-8')).hexdigest()
    
    def get(self, content: str) -> Optional[Any]:
        """从缓存获取数据"""
        key = self._get_cache_key(content)
        
        # 按顺序尝试各级缓存
        for i, backend in enumerate(self.backends):
            value = backend.get(key)
            if value is not None:
                # 如果不是第一级缓存（内存），则提升到更高级别缓存
                if i > 0:
                    for higher_backend in reversed(self.backends[:i]):
                        higher_backend.set(key, value)
                return value
        
        return None
    
    def set(self, content: str, data: Any, ttl: Optional[int] = None) -> None:
        """设置缓存数据"""
        key = self._get_cache_key(content)
        
        # 同时写入所有级别的缓存
        for backend in self.backends:
            backend.set(key, data, ttl)
    
    def clear(self) -> None:
        """清除所有缓存"""
        for backend in self.backends:
            backend.clear()
        
        # 清除LRU缓存
        self._get_cache_key.cache_clear()
    
    def get_stats(self) -> Dict[str, Any]:
        """获取缓存统计信息"""
        stats = {}
        backend_names = ["memory"]
        if self.redis_backend:
            backend_names.append("redis")
        backend_names.append("disk")
        
        for name, backend in zip(backend_names, self.backends):
            stats[name] = backend.stats()
        
        return stats


class SemanticCache:
    """语义缓存实现，基于文本相似性匹配"""
    
    def __init__(self, similarity_threshold=0.85, max_items=1000):
        """
        初始化语义缓存
        
        Args:
            similarity_threshold: 相似度阈值
            max_items: 最大缓存项数
        """
        self.similarity_threshold = similarity_threshold
        self.max_items = max_items
        
        # 缓存数据
        self.keys = []
        self.values = []
        self.embeddings = []
        self.timestamps = []
        
        # 统计信息
        self.stats = {
            "hits": 0,
            "misses": 0,
            "semantic_matches": 0
        }
        
        # 延迟加载嵌入模型
        self._embedding_model = None
        self._lock = threading.RLock()
    
    def _get_embedding_model(self):
        """延迟加载嵌入模型"""
        if self._embedding_model is None:
            try:
                from sentence_transformers import SentenceTransformer
                self._embedding_model = SentenceTransformer('all-MiniLM-L6-v2')
            except ImportError:
                print("警告: sentence_transformers 未安装，语义缓存将使用简单的关键词匹配")
                self._embedding_model = EmbeddingModel()
        return self._embedding_model
    
    def _get_embedding(self, text):
        """获取文本嵌入"""
        model = self._get_embedding_model()
        return model.encode(text)
    
    def _compute_similarity(self, embedding1, embedding2):
        """计算两个嵌入向量的相似度"""
        import numpy as np
        # 使用余弦相似度
        dot_product = np.dot(embedding1, embedding2)
        norm1 = np.linalg.norm(embedding1)
        norm2 = np.linalg.norm(embedding2)
        return dot_product / (norm1 * norm2)
    
    def get(self, key):
        """获取缓存数据"""
        with self._lock:
            if not self.keys:
                self.stats["misses"] += 1
                return None
            
            # 精确匹配
            if key in self.keys:
                idx = self.keys.index(key)
                self.stats["hits"] += 1
                return self.values[idx]
            
            # 语义匹配
            try:
                # 获取查询嵌入
                query_embedding = self._get_embedding(key)
                
                # 计算相似度
                max_similarity = -1
                max_idx = -1
                
                for i, embedding in enumerate(self.embeddings):
                    similarity = self._compute_similarity(query_embedding, embedding)
                    if similarity > max_similarity:
                        max_similarity = similarity
                        max_idx = i
                
                # 检查相似度是否超过阈值
                if max_similarity >= self.similarity_threshold:
                    self.stats["hits"] += 1
                    self.stats["semantic_matches"] += 1
                    return self.values[max_idx]
                
            except Exception as e:
                print(f"语义匹配出错: {e}")
            
            self.stats["misses"] += 1
            return None
    
    def set(self, key, value):
        """设置缓存数据"""
        with self._lock:
            # 检查是否已存在
            if key in self.keys:
                idx = self.keys.index(key)
                self.values[idx] = value
                self.timestamps[idx] = time.time()
                return
            
            try:
                # 获取嵌入
                embedding = self._get_embedding(key)
                
                # 添加新项
                if len(self.keys) >= self.max_items:
                    # 移除最旧的项
                    oldest_idx = self.timestamps.index(min(self.timestamps))
                    self.keys.pop(oldest_idx)
                    self.values.pop(oldest_idx)
                    self.embeddings.pop(oldest_idx)
                    self.timestamps.pop(oldest_idx)
                
                # 添加新项
                self.keys.append(key)
                self.values.append(value)
                self.embeddings.append(embedding)
                self.timestamps.append(time.time())
                
            except Exception as e:
                print(f"设置语义缓存出错: {e}")
    
    def get_stats(self):
        """获取统计信息"""
        with self._lock:
            total = self.stats["hits"] + self.stats["misses"]
            hit_rate = self.stats["hits"] / max(1, total)
            semantic_rate = self.stats["semantic_matches"] / max(1, self.stats["hits"])
            
            return {
                "hits": self.stats["hits"],
                "misses": self.stats["misses"],
                "semantic_matches": self.stats["semantic_matches"],
                "hit_rate": hit_rate,
                "semantic_match_rate": semantic_rate,
                "total_requests": total,
                "cache_size": len(self.keys)
            }


class EmbeddingModel:
    """简单的嵌入模型，用于在没有sentence_transformers时提供基本功能"""
    
    def encode(self, text):
        """将文本转换为简单的词频向量"""
        import numpy as np
        
        # 分词
        words = text.lower().split()
        
        # 创建词汇表
        vocab = set(words)
        
        # 创建词频向量
        vector = np.zeros(len(vocab))
        vocab_list = list(vocab)
        
        for word in words:
            if word in vocab_list:
                idx = vocab_list.index(word)
                vector[idx] += 1
        
        # 归一化
        norm = np.linalg.norm(vector)
        if norm > 0:
            vector = vector / norm
        
        return vector


class CacheManager:
    """缓存管理器，整合多级缓存和语义缓存"""
    
    def __init__(self, use_semantic_cache=True, memory_ttl=3600, disk_ttl=86400,
                 memory_max_size=10000, disk_max_size=100000, redis_max_size=50000,
                 disk_cache_dir="./cache", similarity_threshold=0.85, max_items=1000,
                 redis_config: Optional[Dict[str, Any]] = None):
        """初始化缓存管理器"""
        self.distributed_cache = DistributedCacheManager(
            memory_ttl=memory_ttl,
            disk_ttl=disk_ttl,
            memory_max_size=memory_max_size,
            disk_max_size=disk_max_size,
            redis_max_size=redis_max_size,
            disk_cache_dir=disk_cache_dir,
            redis_config=redis_config
        )
        self.semantic_cache = SemanticCache(
            similarity_threshold=similarity_threshold,
            max_items=max_items
        ) if use_semantic_cache else None
        self.use_semantic_cache = use_semantic_cache
        self._lock = threading.RLock()
    
    def get(self, key):
        """获取缓存数据"""
        # 首先尝试精确匹配
        result = self.distributed_cache.get(key)
        if result is not None:
            return result
        
        # 如果启用了语义缓存，尝试语义匹配
        if self.use_semantic_cache and self.semantic_cache is not None:
            return self.semantic_cache.get(key)
        
        return None
    
    def set(self, key, value, ttl: Optional[int] = None):
        """设置缓存数据"""
        self.distributed_cache.set(key, value, ttl)
        if self.use_semantic_cache and self.semantic_cache is not None:
            self.semantic_cache.set(key, value)
    
    def clear(self):
        """清除所有缓存"""
        self.distributed_cache.clear()
        if self.use_semantic_cache:
            self.semantic_cache = SemanticCache()
    
    def get_stats(self):
        """获取统计信息"""
        stats = {
            "distributed_cache": self.distributed_cache.get_stats()
        }
        
        if self.use_semantic_cache and self.semantic_cache is not None:
            stats["semantic_cache"] = self.semantic_cache.get_stats()
        
        return stats


# 全局缓存管理器实例
# 支持Redis配置的缓存管理器
cache_manager = CacheManager(
    use_semantic_cache=True,
    redis_config={
        "host": "localhost",
        "port": 6379,
        "db": 0,
        "default_ttl": 3600
    }
)