# @Version : 1.0
# @Author  : wxz
# @File    : chat_agent.py
# @Time    : 2025/6/19 20:33
# @Describe: 描述...
import logging
from typing import Mapping, Any

from autogen_agentchat.agents import AssistantAgent
from autogen_agentchat.messages import TextMessage
from autogen_agentchat.state import AssistantAgentState
from autogen_core import CancellationToken

from apps.chat.agent.chat_state import ChatState
from apps.chat.schemas.chat_agent_response import ChatAgentResponse
from llms.llms_clinet import LLMsClient


class ChatAgent:

    """
    单 Agent
    """

    def __init__(self):
        self.chat_agent = AssistantAgent(
            name="SimpleAgent",
            model_client=LLMsClient.autogen_qwen_chat_client(),
            system_message="你是一个乐于助人的AI助手！",
        )

    async def run_chat_agent(self, task: str, agent_state: AssistantAgentState)-> ChatAgentResponse:

        try:
            if agent_state:
                # 加载会话状态
                await self.chat_agent.load_state(agent_state.model_dump())

            result = await self.chat_agent.on_messages(
                messages=[TextMessage(content=task, source="user")],
                cancellation_token=CancellationToken(),
            )

            content = result.chat_message.content

            # 保存、获取Agent的状态
            state = await self.chat_agent.save_state()

            llm_context_messages = state.get("llm_context").get("messages")

            # Agent最终结果
            response = ChatAgentResponse(
                llm_context_messages=llm_context_messages,
                content=content,
            )
            return response
        except Exception as e:
            logging.error(e, exc_info=True)







