"""
增强记忆系统架构设计
实现分层记忆存储、智能查询重写和上下文管理
"""

from typing import Dict, Any, List, Optional, Tuple, Union
from dataclasses import dataclass, field
from enum import Enum
import asyncio
import json
import time
from datetime import datetime, timedelta
import logging

logger = logging.getLogger(__name__)

class MemoryType(Enum):
    """记忆类型枚举 - 简化版本"""
    SHORT_TERM = "short_term"      # 短期记忆：当前会话上下文
    LONG_TERM = "long_term"        # 长期记忆：历史对话摘要
    # 移除未充分利用的记忆类型：
    # KNOWLEDGE = "knowledge"       # 知识记忆：领域知识和事实 - 已移除
    # CONTEXT = "context"          # 上下文记忆：当前任务上下文 - 已移除

class MemoryPriority(Enum):
    """记忆优先级"""
    HIGH = 3      # 高优先级：重要信息，长期保存
    MEDIUM = 2    # 中优先级：一般信息，定期清理
    LOW = 1       # 低优先级：临时信息，短期保存

@dataclass
class MemoryItem:
    """记忆项数据结构"""
    id: str
    content: str
    memory_type: MemoryType
    priority: MemoryPriority
    user_id: str
    session_id: str
    timestamp: datetime
    metadata: Dict[str, Any] = field(default_factory=dict)
    embedding: Optional[List[float]] = None
    relevance_score: float = 0.0
    access_count: int = 0
    last_accessed: Optional[datetime] = None
    
    def to_dict(self) -> Dict[str, Any]:
        """转换为字典格式"""
        return {
            "id": self.id,
            "content": self.content,
            "memory_type": self.memory_type.value,
            "priority": self.priority.value,
            "user_id": self.user_id,
            "session_id": self.session_id,
            "timestamp": self.timestamp.isoformat(),
            "metadata": self.metadata,
            "embedding": self.embedding,
            "relevance_score": self.relevance_score,
            "access_count": self.access_count,
            "last_accessed": self.last_accessed.isoformat() if self.last_accessed else None
        }
    
    @classmethod
    def from_dict(cls, data: Dict[str, Any]) -> 'MemoryItem':
        """从字典创建记忆项"""
        return cls(
            id=data["id"],
            content=data["content"],
            memory_type=MemoryType(data["memory_type"]),
            priority=MemoryPriority(data["priority"]),
            user_id=data["user_id"],
            session_id=data["session_id"],
            timestamp=datetime.fromisoformat(data["timestamp"]),
            metadata=data.get("metadata", {}),
            embedding=data.get("embedding"),
            relevance_score=data.get("relevance_score", 0.0),
            access_count=data.get("access_count", 0),
            last_accessed=datetime.fromisoformat(data["last_accessed"]) if data.get("last_accessed") else None
        )

@dataclass
class ContextReference:
    """上下文引用数据结构"""
    original_text: str          # 原始文本
    resolved_text: str          # 解析后的文本
    reference_type: str         # 引用类型：pronoun, ellipsis, temporal, etc.
    confidence: float           # 置信度
    source_memory_ids: List[str] # 来源记忆ID列表
    
class MemoryConfig:
    """记忆系统配置"""
    
    # 短期记忆配置
    SHORT_TERM_MAX_ITEMS = 50
    SHORT_TERM_TTL_HOURS = 24
    
    # 长期记忆配置
    LONG_TERM_SUMMARY_THRESHOLD = 80  # 触发摘要的消息数量
    LONG_TERM_MAX_ITEMS = 1000
    
    # 用户画像配置已移除 - 简化记忆系统
    
    # 缓存配置
    CACHE_TTL_SECONDS = 3600
    CACHE_MAX_SIZE = 1000
    
    # 检索配置
    RETRIEVAL_TOP_K = 5
    RELEVANCE_THRESHOLD = 0.7
    
    # 异步配置
    ASYNC_BATCH_SIZE = 10
    ASYNC_TIMEOUT_SECONDS = 30

class MemoryMetrics:
    """记忆系统指标"""
    
    def __init__(self):
        self.total_memories = 0
        self.cache_hits = 0
        self.cache_misses = 0
        self.retrieval_latency = []
        self.summary_operations = 0
        self.context_resolutions = 0
        
    def record_cache_hit(self):
        self.cache_hits += 1
        
    def record_cache_miss(self):
        self.cache_misses += 1
        
    def record_retrieval_latency(self, latency: float):
        self.retrieval_latency.append(latency)
        
    def get_cache_hit_rate(self) -> float:
        total = self.cache_hits + self.cache_misses
        return self.cache_hits / total if total > 0 else 0.0
        
    def get_avg_retrieval_latency(self) -> float:
        return sum(self.retrieval_latency) / len(self.retrieval_latency) if self.retrieval_latency else 0.0

class EnhancedMemoryArchitecture:
    """增强记忆系统架构"""
    
    def __init__(self, config: MemoryConfig = None):
        self.config = config or MemoryConfig()
        self.metrics = MemoryMetrics()
        
        # 存储层
        self.redis_client = None
        self.cache = {}  # 内存缓存
        
        # 嵌入模型
        self.embedding_model = None
        
        # 记忆管理器
        self.memory_managers = {}
        
        logger.info("Enhanced Memory Architecture initialized")
    
    async def initialize(self):
        """异步初始化"""
        # 初始化Redis连接
        await self._init_redis()
        
        # 初始化嵌入模型
        await self._init_embedding_model()
        
        # 初始化记忆管理器
        await self._init_memory_managers()
        
        logger.info("Enhanced Memory Architecture fully initialized")
    
    async def _init_redis(self):
        """初始化Redis连接，确保与事件循环生命周期同步"""
        try:
            import redis.asyncio as redis
            import os

            redis_url = os.getenv("REDIS_URL", "redis://localhost:6380/0")

            # 创建Redis客户端，配置连接池参数确保稳定性
            self.redis_client = redis.from_url(
                redis_url,
                max_connections=20,
                retry_on_timeout=True,
                socket_keepalive=True,
                socket_keepalive_options={},
                health_check_interval=30
            )

            # 测试连接
            await self.redis_client.ping()
            logger.info("Redis connection established with enhanced configuration")

        except Exception as e:
            logger.error(f"Failed to initialize Redis: {e}")
            raise
    
    async def _init_embedding_model(self):
        """初始化嵌入模型"""
        try:
            from langchain_openai import OpenAIEmbeddings
            import os
            
            self.embedding_model = OpenAIEmbeddings(
                api_key=os.getenv("OPENAI_API_KEY"),
                base_url=os.getenv("OPENAI_BASE_URL"),
                model=os.getenv("OPENAI_EMBEDDING_MODEL", "text-embedding-3-large")
            )
            
            # 测试嵌入
            test_embedding = await self.embedding_model.aembed_query("test")
            logger.info(f"Embedding model initialized, dimension: {len(test_embedding)}")
            
        except Exception as e:
            logger.error(f"Failed to initialize embedding model: {e}")
            raise
    
    async def _init_memory_managers(self):
        """初始化记忆管理器 - 简化版本"""
        try:
            from .memory_managers import (
                ShortTermMemoryManager,
                LongTermMemoryManager
                # 移除未使用的管理器：
                # ContextManager - 已移除
            )

            self.memory_managers = {
                MemoryType.SHORT_TERM: ShortTermMemoryManager(self),
                MemoryType.LONG_TERM: LongTermMemoryManager(self)
                # 移除未充分利用的管理器：
                # MemoryType.CONTEXT: ContextManager(self) - 已移除
            }

            logger.info("Simplified memory managers initialized")
        except ImportError:
            logger.warning("Memory managers not available, using basic implementation")
            self.memory_managers = {}
    
    def get_memory_key(self, memory_type: MemoryType, user_id: str, session_id: str = None) -> str:
        """生成记忆存储键"""
        if session_id:
            return f"memory:{memory_type.value}:{user_id}:{session_id}"
        else:
            return f"memory:{memory_type.value}:{user_id}"
    
    async def store_memory(self, memory_item: MemoryItem) -> bool:
        """存储记忆项，确保在事件循环关闭前完成"""
        try:
            # 确保在有效的事件循环中执行
            try:
                loop = asyncio.get_running_loop()
                if loop.is_closed():
                    logger.error("Event loop is closed, cannot perform async Redis operations")
                    return False
            except RuntimeError:
                logger.error("No running event loop for async Redis operations")
                return False

            # 生成嵌入
            if not memory_item.embedding:
                try:
                    memory_item.embedding = await self.embedding_model.aembed_query(memory_item.content)
                except Exception as e:
                    logger.warning(f"Failed to generate embedding: {e}")
                    # 继续存储，不阻塞整个流程

            # 存储到Redis，确保操作完整性
            if self.redis_client:
                try:
                    # 使用事务确保操作原子性
                    async with self.redis_client.pipeline(transaction=True) as pipe:
                        # 检查Redis连接状态
                        await pipe.ping()

                        # 准备存储操作
                        key = self.get_memory_key(memory_item.memory_type, memory_item.user_id, memory_item.session_id)
                        pipe.hset(key, memory_item.id, json.dumps(memory_item.to_dict()))

                        # 执行事务
                        await pipe.execute()

                    logger.debug(f"Successfully stored to Redis: {memory_item.id}")

                except Exception as e:
                    logger.error(f"Failed to store to Redis: {e}")
                    # 不使用降级方案，确保异步存储成功
                    return False

            # 更新缓存
            cache_key = f"{memory_item.memory_type.value}:{memory_item.user_id}:{memory_item.id}"
            self.cache[cache_key] = memory_item

            # 更新指标
            self.metrics.total_memories += 1

            logger.debug(f"Memory stored: {memory_item.id}")
            return True

        except Exception as e:
            logger.error(f"Failed to store memory: {e}")
            return False

    def _store_memory_sync(self, memory_item: MemoryItem) -> bool:
        """同步方式存储记忆项（用于没有事件循环的情况）"""
        try:
            # 生成嵌入（同步方式）
            if not memory_item.embedding:
                try:
                    # 使用同步方式生成嵌入
                    import requests
                    import os

                    api_key = os.getenv("OPENAI_API_KEY")
                    base_url = os.getenv("OPENAI_BASE_URL", "https://api.openai.com")

                    response = requests.post(
                        f"{base_url}/v1/embeddings",
                        headers={"Authorization": f"Bearer {api_key}"},
                        json={
                            "model": "text-embedding-3-large",
                            "input": memory_item.content
                        },
                        timeout=30
                    )

                    if response.status_code == 200:
                        embedding_data = response.json()
                        memory_item.embedding = embedding_data["data"][0]["embedding"]
                    else:
                        logger.warning(f"Failed to generate embedding: {response.status_code}")

                except Exception as e:
                    logger.warning(f"Failed to generate embedding (sync): {e}")

            # 存储到Redis（同步方式）
            if self.redis_client:
                try:
                    import redis
                    # 创建同步Redis客户端
                    sync_redis = redis.Redis.from_url(
                        os.getenv("REDIS_URL", "redis://localhost:6380/0"),
                        decode_responses=True
                    )

                    key = self.get_memory_key(memory_item.memory_type, memory_item.user_id, memory_item.session_id)
                    sync_redis.hset(key, memory_item.id, json.dumps(memory_item.to_dict()))

                except Exception as e:
                    logger.warning(f"Failed to store to Redis (sync): {e}")

            return True

        except Exception as e:
            logger.error(f"Failed to store memory (sync): {e}")
            return False

    async def close(self):
        """关闭Redis连接和清理资源"""
        try:
            if self.redis_client:
                await self.redis_client.close()
                logger.info("Redis client closed successfully")
        except Exception as e:
            logger.warning(f"Failed to close Redis client: {e}")

    def __del__(self):
        """析构函数，确保资源清理"""
        try:
            if hasattr(self, 'redis_client') and self.redis_client:
                # 在析构时尝试关闭连接
                import asyncio
                try:
                    loop = asyncio.get_running_loop()
                    if not loop.is_closed():
                        loop.create_task(self.close())
                except RuntimeError:
                    # 没有运行中的事件循环，无法异步关闭
                    pass
        except Exception:
            pass

    def _store_to_redis_sync(self, memory_item: MemoryItem) -> bool:
        """同步方式存储到Redis（已弃用，仅保留接口兼容性）"""
        logger.error("Sync Redis storage is deprecated. Use async storage only.")
        return False
    
    async def retrieve_memories(
        self, 
        query: str, 
        user_id: str, 
        memory_types: List[MemoryType] = None,
        top_k: int = None
    ) -> List[MemoryItem]:
        """检索相关记忆"""
        start_time = time.time()
        
        try:
            if memory_types is None:
                memory_types = list(MemoryType)
            
            if top_k is None:
                top_k = self.config.RETRIEVAL_TOP_K
            
            # 生成查询嵌入
            query_embedding = await self.embedding_model.aembed_query(query)
            
            # 从各个记忆类型中检索
            all_memories = []
            for memory_type in memory_types:
                memories = await self._retrieve_by_type(query_embedding, memory_type, user_id)
                all_memories.extend(memories)
            
            # 计算相似度并排序
            for memory in all_memories:
                if memory.embedding:
                    similarity = self._calculate_similarity(query_embedding, memory.embedding)
                    memory.relevance_score = similarity
            
            # 过滤和排序
            relevant_memories = [
                m for m in all_memories 
                if m.relevance_score >= self.config.RELEVANCE_THRESHOLD
            ]
            relevant_memories.sort(key=lambda x: x.relevance_score, reverse=True)
            
            # 更新访问统计
            for memory in relevant_memories[:top_k]:
                memory.access_count += 1
                memory.last_accessed = datetime.now()
            
            # 记录指标
            latency = time.time() - start_time
            self.metrics.record_retrieval_latency(latency)
            
            return relevant_memories[:top_k]
            
        except Exception as e:
            logger.error(f"Failed to retrieve memories: {e}")
            return []
    
    def _calculate_similarity(self, embedding1: List[float], embedding2: List[float]) -> float:
        """计算余弦相似度"""
        import numpy as np
        
        vec1 = np.array(embedding1)
        vec2 = np.array(embedding2)
        
        dot_product = np.dot(vec1, vec2)
        norm1 = np.linalg.norm(vec1)
        norm2 = np.linalg.norm(vec2)
        
        if norm1 == 0 or norm2 == 0:
            return 0.0
        
        return dot_product / (norm1 * norm2)
    
    async def _retrieve_by_type(
        self, 
        query_embedding: List[float], 
        memory_type: MemoryType, 
        user_id: str
    ) -> List[MemoryItem]:
        """按类型检索记忆"""
        try:
            # 检查缓存
            cache_key = f"retrieve:{memory_type.value}:{user_id}"
            if cache_key in self.cache:
                self.metrics.record_cache_hit()
                return self.cache[cache_key]
            
            self.metrics.record_cache_miss()
            
            # 从Redis检索
            key_pattern = f"memory:{memory_type.value}:{user_id}*"
            keys = await self.redis_client.keys(key_pattern)
            
            memories = []
            for key in keys:
                memory_data = await self.redis_client.hgetall(key)
                for memory_id, memory_json in memory_data.items():
                    try:
                        memory_dict = json.loads(memory_json)
                        memory_item = MemoryItem.from_dict(memory_dict)
                        memories.append(memory_item)
                    except Exception as e:
                        logger.warning(f"Failed to parse memory {memory_id}: {e}")
            
            # 缓存结果
            self.cache[cache_key] = memories
            
            return memories
            
        except Exception as e:
            logger.error(f"Failed to retrieve memories by type {memory_type}: {e}")
            return []
