import json
from typing import List, Dict, Any

from memory.memory_base import MemoryBase
from memory.memory_type import MemoryType
from model.context_model import ReasoningStep, ToolContext
from model.episodic_model import Episode
from model.semantic_model import Semantic


class MemoryManager(MemoryBase):

    async def store_conversation(
            self, session_id: str, user_query: str, ai_reply: str, limit_words: int = 1000
    ) -> None:
        """保存近期对话记录"""
        result = await self.aget(
            MemoryType.CONVERSATIONS, session_id=session_id, key="conversations"
        )

        conversations = result['conversations'] if result else []
        conversations.append({
            'User': user_query,
            'AI': ai_reply,
        })
        current_length = sum([len(json.dumps(item)) for item in conversations])
        while True:
            if current_length <= limit_words:
                break
            conversation = conversations.pop(0)
            current_length -= len(json.dumps(conversation))

        await self.aput(
            MemoryType.CONVERSATIONS,
            session_id=session_id,
            key="conversation",
            value={
                "conversations": conversations
            }
        )

    async def store_reasoning_step(self, session_id: str, step: ReasoningStep):
        """存储ReAct推理步骤"""
        await self.aput(
            MemoryType.REASONING_STEPS,
            session_id=session_id,
            key=f'{step.step_number}',
            value={
                'step_numer': step.step_number,
                'step': step.model_dump(mode='json')
            }
        )

    async def store_tool_context(self, session_id: str, tool_context: ToolContext):
        """存储工具调用上下文"""
        await self.aput(
            MemoryType.TOOL_CONTEXT,
            session_id=session_id,
            value={
                "key": f'Action: {tool_context.tool_name}, Action Input: {tool_context.input_data}',
                "tool_context": tool_context.model_dump(mode='json')
            },
            index=["key"]
        )

    async def store_episode(self, session_id: str, episode: Episode):
        """存储场景对话"""
        await self.memory_store.aput(
            MemoryType.EPISODIC,
            session_id=session_id,
            value={
                'query': episode.query,
                'episode': episode.model_dump(mode='json')
            },
            index=['query']
        )

    async def store_semantic(self, session_id: str, semantic: Semantic):
        """存储事实性记忆"""
        await self.memory_store.aput(
            MemoryType.SEMANTIC,
            session_id=session_id,
            key=str(semantic.id),
            value={
                'query': semantic.query,
                'semantic': semantic.semantic
            },
            index=['query']
        )

    async def get_conversations(self, session_id: str) -> List:
        """获取会话记录"""
        result = await self.aget(
            MemoryType.CONVERSATIONS, session_id=session_id, key="conversations"
        )
        conversations = result['conversations'] if result else []
        return conversations

    async def get_similar_episodes(self, session_id: str, query: str, limit: int = 3) -> List[str]:
        """检索相似场景对话，并整理为可用于大模型的字符串列表"""
        similar_episodes = await self.asearch(
            MemoryType.EPISODIC, session_id=session_id, query=query, limit=limit
        )
        if len(similar_episodes) == 0:
            return []

        context_parts = ["相似场景对话记录: "]
        for episode in similar_episodes:
            episode = Episode(**episode)
            context_parts.append(f"  Query: {episode.query}")
            context_parts.append(f"  Response: {episode.response}")
            context_parts.append(f"  Tools used: {', '.join(episode.tools_used)}")
            context_parts.append("")
        return context_parts

    async def get_relevant_semantics(self, session_id: str, query: str, limit: int = 10) -> List[str]:
        """检索相关事实性记忆，并整理为可用于大模型的字符串列表"""
        relevant_semantics = await self.asearch(
            MemoryType.SEMANTIC, session_id=session_id, query=query, limit=limit,
        )
        if len(relevant_semantics) == 0:
            return []

        context_parts = ["相关事实性记忆:"]
        for semantic in relevant_semantics:
            context_parts.append(f"- {semantic['semantic']}")
        return context_parts

    async def get_relevant_context(self, session_id: str, query: str):
        pass

    async def store_facts(self):
        pass

