from csagent.core.node.base_node import BaseNode
from csagent.core.context import AgentState
from openai import OpenAI
import time
import copy
class Node(BaseNode):
    def initialize(self):
        """初始化节点"""
        print(self.conf)
        print(f"{self.node_config.name}初始化成功")
    
    def execute(self, state: AgentState, stream_writer=None) -> AgentState:
        print(f"{self.node_config.name}开始执行")
        query = state['messages'][-1]['content']
        print(f"query:{query}")
        # 构造llm request with tools
        # 模拟大模型调用
        time.sleep(3) 
        # 中间的tool call过程不放到主message列表中，所以单独copy了一份出来放在了node_context里
        if "messages" not in state["node_context"]:
            state["node_context"]["messages"] = copy.deepcopy(state['messages'])
        # 下面为模拟了一次工具调用过程
        if len(state["node_context"]["messages"]) == 1:
            state["node_context"]["messages"].append(
                {
                    "role":"assistant", 
                    "content":'',
                    "tool_calls":[
                        {
                            "id": "call_6596dafa2a6a46f7a217da",
                            "function": {
                                "arguments": "{\"query\": \"xxxx\"}",
                                "name": "search_web"
                            },
                            "type": "function",
                            "index": 0
                        }
                    ] 
                }
            )
        else:
            # 已经执行了一次工具调用，返回的最终结果放到主messages中
            state['messages'].append(
                {
                    "role":"assistant", 
                    "content":'这是大模型的最终回答',
                }
            )
        return state