from langchain_core.messages import SystemMessage, HumanMessage
from langchain_core.runnables import RunnableConfig

from owl_ai.graph.graph_node import BaseGraphNode


class ChatLLMGraphNode(BaseGraphNode):

    """
    LLM交互节点，用于WorkFlow的节点
    """

    def __init__(self, node_config: dict):
        """
        初始化
        Args:
            node_config:
        """
        self.node_name = node_config.get("nodeName")
        llm_config = node_config.get("llmConfig")
        self.chat_llm = self.chat_llm_generate(llm_config)
        self.system_prompt = node_config.get("systemPrompt")
        self.user_prompt = node_config.get("userPrompt")
        self.input = node_config.get("input")
        self.vision_support = node_config.get("visionSupport", False)

    def __call__(self, state, config: RunnableConfig):
        input_params = self.input_params_assem(state, self.input)
        system_prompt = self.system_prompt.format(**input_params)
        user_prompt = self.user_prompt.format(**input_params)

        llm_messages = [SystemMessage(system_prompt), HumanMessage(user_prompt)]
        ret = self.chat_llm.invoke(input=llm_messages)

        return {
            "node_params": {
                self.node_name: {
                    "out": ret
                }
            }
        }
