# memory/enhanced_memory_manager.py
# 增强的记忆管理器，支持短期记忆、长期记忆和时间记忆

import json
import pickle
import asyncio
from pathlib import Path
from typing import List, Dict, Any, Optional, Tuple, Union
from datetime import datetime, timedelta
from dataclasses import dataclass, asdict
from enum import Enum
import networkx as nx
import numpy as np

from .local_embedding_client import LocalEmbeddingClient


class MemoryType(Enum):
    """记忆类型枚举"""
    SHORT_TERM = "short_term"      # 短期记忆（对话上下文）
    LONG_TERM = "long_term"        # 长期记忆（重要数据）
    TEMPORAL = "temporal"          # 时间记忆（时间相关）


class MemoryImportance(Enum):
    """记忆重要性枚举"""
    LOW = 1
    MEDIUM = 2
    HIGH = 3
    CRITICAL = 4


@dataclass
class EnhancedMemoryNode:
    """增强的记忆节点数据结构"""
    id: str
    content: str
    memory_type: MemoryType
    importance: MemoryImportance
    timestamp: str
    tags: List[str] = None
    embedding: Optional[List[float]] = None
    metadata: Optional[Dict[str, Any]] = None
    access_count: int = 0
    last_accessed: Optional[str] = None
    
    def __post_init__(self):
        if self.tags is None:
            self.tags = []
        if self.metadata is None:
            self.metadata = {}
    
    def to_dict(self) -> Dict[str, Any]:
        """转换为字典格式"""
        data = asdict(self)
        data['memory_type'] = self.memory_type.value
        data['importance'] = self.importance.value
        return data
    
    @classmethod
    def from_dict(cls, data: Dict[str, Any]) -> 'EnhancedMemoryNode':
        """从字典创建实例"""
        data['memory_type'] = MemoryType(data['memory_type'])
        data['importance'] = MemoryImportance(data['importance'])
        return cls(**data)
    
    def mark_accessed(self):
        """标记为已访问"""
        self.access_count += 1
        self.last_accessed = datetime.now().isoformat()


@dataclass
class MemoryEdge:
    """记忆边数据结构"""
    source: str
    target: str
    relationship: str
    weight: float = 1.0
    timestamp: Optional[str] = None
    
    def to_dict(self) -> Dict[str, Any]:
        return asdict(self)
    
    @classmethod
    def from_dict(cls, data: Dict[str, Any]) -> 'MemoryEdge':
        return cls(**data)


class EnhancedMemoryManager:
    """增强的记忆管理器"""
    
    def __init__(self, storage_dir: Path, embedding_client: LocalEmbeddingClient):
        self.storage_dir = Path(storage_dir)
        self.storage_dir.mkdir(exist_ok=True)
        
        # 存储文件路径
        self.short_term_file = self.storage_dir / "short_term_memories.json"
        self.long_term_file = self.storage_dir / "long_term_memories.json"
        self.temporal_file = self.storage_dir / "temporal_memories.json"
        self.graph_file = self.storage_dir / "memory_graph.pkl"
        self.edges_file = self.storage_dir / "memory_edges.json"
        
        self.embedding_client = embedding_client
        
        # 记忆存储
        self.short_term_memories: Dict[str, EnhancedMemoryNode] = {}
        self.long_term_memories: Dict[str, EnhancedMemoryNode] = {}
        self.temporal_memories: Dict[str, EnhancedMemoryNode] = {}
        self.edges: List[MemoryEdge] = []
        self.graph = nx.DiGraph()
        
        # 配置参数
        self.max_short_term_memories = 50  # 最大短期记忆数量
        self.short_term_ttl_hours = 24     # 短期记忆TTL（小时）
        self.similarity_threshold = 0.5    # 相似度阈值（降低以提高检索成功率）
        
        # 加载现有数据
        self._load_memories()
    
    def _load_memories(self):
        """从文件加载记忆数据"""
        try:
            # 批量加载记忆文件
            memory_files = [
                (self.short_term_file, self.short_term_memories),
                (self.long_term_file, self.long_term_memories),
                (self.temporal_file, self.temporal_memories)
            ]
            
            for file_path, memory_dict in memory_files:
                if file_path.exists():
                    with open(file_path, 'r', encoding='utf-8') as f:
                        data = json.load(f)
                        for node_id, node_data in data.items():
                            memory_dict[node_id] = EnhancedMemoryNode.from_dict(node_data)
            
            # 加载图结构和边数据
            if self.graph_file.exists():
                with open(self.graph_file, 'rb') as f:
                    self.graph = pickle.load(f)
            
            if self.edges_file.exists():
                with open(self.edges_file, 'r', encoding='utf-8') as f:
                    edges_data = json.load(f)
                    self.edges = [MemoryEdge.from_dict(edge_data) for edge_data in edges_data]
            
            print(f"已加载记忆: 短期{len(self.short_term_memories)}个, 长期{len(self.long_term_memories)}个, 时间{len(self.temporal_memories)}个")
            
        except Exception as e:
            print(f"加载记忆数据时发生错误: {e}")
            self._initialize_empty_memories()
    
    def _initialize_empty_memories(self):
        """初始化空的记忆存储"""
        self.short_term_memories = {}
        self.long_term_memories = {}
        self.temporal_memories = {}
        self.edges = []
        self.graph = nx.DiGraph()
    
    def _save_memories(self):
        """保存记忆数据到文件"""
        try:
            # 批量保存记忆文件
            memory_data = [
                (self.short_term_file, self.short_term_memories),
                (self.long_term_file, self.long_term_memories),
                (self.temporal_file, self.temporal_memories)
            ]
            
            for file_path, memory_dict in memory_data:
                data = {node_id: node.to_dict() for node_id, node in memory_dict.items()}
                with open(file_path, 'w', encoding='utf-8') as f:
                    json.dump(data, f, indent=2, ensure_ascii=False)
            
            # 保存图结构和边数据
            with open(self.graph_file, 'wb') as f:
                pickle.dump(self.graph, f)
            
            edges_data = [edge.to_dict() for edge in self.edges]
            with open(self.edges_file, 'w', encoding='utf-8') as f:
                json.dump(edges_data, f, indent=2, ensure_ascii=False)
                
        except Exception as e:
            print(f"保存记忆数据时发生错误: {e}")
    
    def _get_embedding(self, text: str) -> Optional[List[float]]:
        """获取embedding的辅助方法"""
        return self.embedding_client.get_embedding(text)
    
    def _generate_node_id(self, memory_type: MemoryType, content: str) -> str:
        """生成节点ID"""
        timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
        content_hash = hash(content) % 10000
        return f"{memory_type.value}_{timestamp}_{content_hash}"
    
    async def add_short_term_memory(self, content: str, tags: List[str] = None, 
                                  importance: MemoryImportance = MemoryImportance.MEDIUM,
                                  metadata: Dict[str, Any] = None) -> str:
        """
        添加短期记忆（对话上下文）
        
        Args:
            content: 记忆内容
            tags: 标签列表
            importance: 重要性级别
            metadata: 元数据
            
        Returns:
            节点ID
        """
        node_id = self._generate_node_id(MemoryType.SHORT_TERM, content)
        
        # 获取embedding
        embedding = self._get_embedding(content)
        
        # 创建记忆节点
        memory_node = EnhancedMemoryNode(
            id=node_id,
            content=content,
            memory_type=MemoryType.SHORT_TERM,
            importance=importance,
            timestamp=datetime.now().isoformat(),
            tags=tags or [],
            embedding=embedding,
            metadata=metadata or {}
        )
        
        # 添加到短期记忆存储
        self.short_term_memories[node_id] = memory_node
        self.graph.add_node(node_id, **memory_node.to_dict())
        
        # 检查是否需要清理过期记忆
        await self._cleanup_expired_short_term_memories()
        
        # 尝试建立与现有记忆的关联
        await self._build_connections(node_id, embedding)
        
        # 保存记忆
        self._save_memories()
        
        return node_id
    
    async def add_long_term_memory(self, content: str, tags: List[str] = None,
                                 importance: MemoryImportance = MemoryImportance.HIGH,
                                 metadata: Dict[str, Any] = None) -> str:
        """
        添加长期记忆（重要数据）
        
        Args:
            content: 记忆内容
            tags: 标签列表
            importance: 重要性级别
            metadata: 元数据
            
        Returns:
            节点ID
        """
        node_id = self._generate_node_id(MemoryType.LONG_TERM, content)
        
        # 获取embedding
        embedding = self._get_embedding(content)
        
        # 创建记忆节点
        memory_node = EnhancedMemoryNode(
            id=node_id,
            content=content,
            memory_type=MemoryType.LONG_TERM,
            importance=importance,
            timestamp=datetime.now().isoformat(),
            tags=tags or [],
            embedding=embedding,
            metadata=metadata or {}
        )
        
        # 添加到长期记忆存储
        self.long_term_memories[node_id] = memory_node
        self.graph.add_node(node_id, **memory_node.to_dict())
        
        # 尝试建立与现有记忆的关联
        await self._build_connections(node_id, embedding)
        
        # 保存记忆
        self._save_memories()
        
        return node_id
    
    async def add_temporal_memory(self, content: str, tags: List[str] = None,
                                importance: MemoryImportance = MemoryImportance.MEDIUM,
                                metadata: Dict[str, Any] = None,
                                time_reference: Optional[str] = None) -> str:
        """
        添加时间记忆（时间相关）
        
        Args:
            content: 记忆内容
            tags: 标签列表
            importance: 重要性级别
            metadata: 元数据
            time_reference: 时间参考（如"昨天"、"上周"等）
            
        Returns:
            节点ID
        """
        node_id = self._generate_node_id(MemoryType.TEMPORAL, content)
        
        # 获取embedding
        embedding = self._get_embedding(content)
        
        # 创建记忆节点
        memory_node = EnhancedMemoryNode(
            id=node_id,
            content=content,
            memory_type=MemoryType.TEMPORAL,
            importance=importance,
            timestamp=datetime.now().isoformat(),
            tags=tags or [],
            embedding=embedding,
            metadata=metadata or {}
        )
        
        # 添加时间参考信息
        if time_reference:
            memory_node.metadata['time_reference'] = time_reference
        
        # 添加到时间记忆存储
        self.temporal_memories[node_id] = memory_node
        self.graph.add_node(node_id, **memory_node.to_dict())
        
        # 尝试建立与现有记忆的关联
        await self._build_connections(node_id, embedding)
        
        # 保存记忆
        self._save_memories()
        
        return node_id
    
    async def _cleanup_expired_short_term_memories(self):
        """清理过期的短期记忆"""
        current_time = datetime.now()
        expired_memories = []
        
        for node_id, memory in self.short_term_memories.items():
            memory_time = datetime.fromisoformat(memory.timestamp)
            if current_time - memory_time > timedelta(hours=self.short_term_ttl_hours):
                expired_memories.append(node_id)
        
        # 删除过期记忆
        for node_id in expired_memories:
            del self.short_term_memories[node_id]
            if self.graph.has_node(node_id):
                self.graph.remove_node(node_id)
        
        # 如果短期记忆数量超过限制，删除最旧的
        if len(self.short_term_memories) > self.max_short_term_memories:
            sorted_memories = sorted(
                self.short_term_memories.items(),
                key=lambda x: x[1].timestamp
            )
            
            excess_count = len(self.short_term_memories) - self.max_short_term_memories
            for i in range(excess_count):
                node_id, _ = sorted_memories[i]
                del self.short_term_memories[node_id]
                if self.graph.has_node(node_id):
                    self.graph.remove_node(node_id)
    
    async def _build_connections(self, new_node_id: str, new_embedding: Optional[List[float]]):
        """为新节点建立与现有节点的连接"""
        if new_embedding is None:
            return
        
        # 获取所有记忆节点
        all_memories = {}
        all_memories.update(self.short_term_memories)
        all_memories.update(self.long_term_memories)
        all_memories.update(self.temporal_memories)
        
        # 计算与所有现有节点的相似度
        similarities = []
        for node_id, node in all_memories.items():
            if node_id != new_node_id and node.embedding is not None:
                similarity = self.embedding_client.cosine_similarity(new_embedding, node.embedding)
                if similarity > self.similarity_threshold:
                    similarities.append((node_id, similarity))
        
        # 按相似度排序，只保留前5个最相似的连接
        similarities.sort(key=lambda x: x[1], reverse=True)
        for node_id, similarity in similarities[:5]:
            # 检查是否已存在边
            if not self.graph.has_edge(new_node_id, node_id) and not self.graph.has_edge(node_id, new_node_id):
                edge = MemoryEdge(
                    source=new_node_id,
                    target=node_id,
                    relationship="related_to",
                    weight=similarity,
                    timestamp=datetime.now().isoformat()
                )
                self.edges.append(edge)
                self.graph.add_edge(new_node_id, node_id, **edge.to_dict())
    
    async def search_memories(self, query: str, memory_types: List[MemoryType] = None,
                            limit: int = 10, min_similarity: float = 0.6) -> List[Tuple[EnhancedMemoryNode, float]]:
        """
        搜索记忆
        
        Args:
            query: 查询文本
            memory_types: 要搜索的记忆类型列表，None表示搜索所有类型
            limit: 返回结果数量限制
            min_similarity: 最小相似度阈值
            
        Returns:
            相似记忆列表，包含节点和相似度分数
        """
        # 获取查询的embedding
        query_embedding = self._get_embedding(query)
        if query_embedding is None:
            return []
        
        # 确定要搜索的记忆类型
        if memory_types is None:
            memory_types = [MemoryType.SHORT_TERM, MemoryType.LONG_TERM, MemoryType.TEMPORAL]
        
        # 获取要搜索的记忆
        memories_to_search = {}
        memory_sources = [
            (MemoryType.SHORT_TERM, self.short_term_memories),
            (MemoryType.LONG_TERM, self.long_term_memories),
            (MemoryType.TEMPORAL, self.temporal_memories)
        ]
        
        for memory_type, memory_dict in memory_sources:
            if memory_type in memory_types:
                memories_to_search.update(memory_dict)
        
        # 计算相似度并过滤
        similarities = []
        for node in memories_to_search.values():
            if node.embedding is not None:
                similarity = self.embedding_client.cosine_similarity(query_embedding, node.embedding)
                if similarity >= min_similarity:
                    similarities.append((node, similarity))
        
        # 按相似度排序并返回前limit个结果
        similarities.sort(key=lambda x: x[1], reverse=True)
        return similarities[:limit]
    
    async def search_temporal_memories(self, time_query: str, limit: int = 10) -> List[EnhancedMemoryNode]:
        """
        搜索时间相关的记忆
        
        Args:
            time_query: 时间查询（如"昨天"、"上周"、"前天"等）
            limit: 返回结果数量限制
            
        Returns:
            时间相关的记忆列表
        """
        # 解析时间查询
        time_reference = self._parse_time_reference(time_query)
        if not time_reference:
            return []
        
        # 搜索时间记忆
        temporal_results = []
        for memory in self.temporal_memories.values():
            if memory.metadata.get('time_reference') == time_reference:
                temporal_results.append(memory)
        
        # 按时间戳排序
        temporal_results.sort(key=lambda x: x.timestamp, reverse=True)
        return temporal_results[:limit]
    
    def _parse_time_reference(self, time_query: str) -> Optional[str]:
        """解析时间查询，返回标准化的时间参考"""
        time_query = time_query.lower().strip()
        
        # 时间参考映射
        time_mappings = {
            "昨天": "yesterday",
            "前天": "day_before_yesterday", 
            "今天": "today",
            "上周": "last_week",
            "上个月": "last_month",
            "去年": "last_year",
            "最近": "recent",
            "之前": "before",
            "以前": "before"
        }
        
        for chinese, english in time_mappings.items():
            if chinese in time_query:
                return english
        
        return None
    
    async def get_contextual_memories(self, query: str, context_depth: int = 2) -> List[EnhancedMemoryNode]:
        """
        获取上下文相关的记忆，包括直接相关和间接相关的记忆
        
        Args:
            query: 查询文本
            context_depth: 上下文深度（跳数）
            
        Returns:
            上下文相关的记忆节点列表
        """
        # 首先找到最相似的记忆
        similar_memories = await self.search_memories(query, limit=3)
        if not similar_memories:
            return []
        
        # 收集相关的节点ID
        relevant_node_ids = set()
        for memory_node, _ in similar_memories:
            relevant_node_ids.add(memory_node.id)
            
            # 通过图遍历找到相关的节点
            for depth in range(1, context_depth + 1):
                # 获取当前深度的邻居节点
                neighbors = list(self.graph.neighbors(memory_node.id))
                for neighbor in neighbors:
                    relevant_node_ids.add(neighbor)
                
                # 获取前驱节点
                predecessors = list(self.graph.predecessors(memory_node.id))
                for predecessor in predecessors:
                    relevant_node_ids.add(predecessor)
        
        # 返回相关的记忆节点
        contextual_memories = []
        all_memories = {}
        all_memories.update(self.short_term_memories)
        all_memories.update(self.long_term_memories)
        all_memories.update(self.temporal_memories)
        
        for node_id in relevant_node_ids:
            if node_id in all_memories:
                contextual_memories.append(all_memories[node_id])
        
        # 按时间戳排序，最新的在前
        contextual_memories.sort(key=lambda x: x.timestamp, reverse=True)
        return contextual_memories
    
    def get_memory_stats(self) -> Dict[str, Any]:
        """获取记忆统计信息"""
        return {
            "short_term_count": len(self.short_term_memories),
            "long_term_count": len(self.long_term_memories),
            "temporal_count": len(self.temporal_memories),
            "total_nodes": len(self.short_term_memories) + len(self.long_term_memories) + len(self.temporal_memories),
            "total_edges": len(self.edges),
            "graph_density": nx.density(self.graph) if len(self.graph) > 0 else 0,
            "memory_types": {
                "short_term": len(self.short_term_memories),
                "long_term": len(self.long_term_memories),
                "temporal": len(self.temporal_memories)
            }
        }
    
    def clear_memory(self, memory_type: MemoryType = None):
        """
        清空记忆
        
        Args:
            memory_type: 要清空的记忆类型，None表示清空所有
        """
        if memory_type is None:
            self.short_term_memories.clear()
            self.long_term_memories.clear()
            self.temporal_memories.clear()
            self.graph.clear()
            self.edges.clear()
            print("所有记忆已清空")
        elif memory_type == MemoryType.SHORT_TERM:
            self.short_term_memories.clear()
            print("短期记忆已清空")
        elif memory_type == MemoryType.LONG_TERM:
            self.long_term_memories.clear()
            print("长期记忆已清空")
        elif memory_type == MemoryType.TEMPORAL:
            self.temporal_memories.clear()
            print("时间记忆已清空")
        
        self._save_memories()
