from langchain_core.messages import SystemMessage
from langchain_core.runnables import RunnableConfig

from owl_ai.graph.graph_node import BaseGraphNode


class RagRetrievalGraphNode(BaseGraphNode):
    """
    知识库检索
    """

    def __init__(self, node_config: dict):
        """

        Args:
            node_config:
        """
        # 要从哪些知识库中检索
        self.rag = node_config.get("rag")
        llm_config = node_config.get("llm_config")
        if llm_config:
            self.chat_llm = self.chat_llm_generate(llm_config)
            self.system_prompt = node_config.get("systemPrompt")
        self.input_config = node_config.get("input")

    def __call__(self, state, config: RunnableConfig):
        """
        知识库
        Args:
            state:
            config:

        Returns:
        """
        input_params = self.input_params_assem(state, input_config=self.input_config)

        search_keys = []

        # 是否需要使用LLM来生成查询关键字
        if self.chat_llm:
            system_prompt = self.system_prompt.format(input_params)
            llm_messages = [SystemMessage(system_prompt)]
            ai_message = self.chat_llm.invoke(input=llm_messages)
            search_keys = ai_message.content.split(",")
        else:
            for k, v in input_params.items():
                search_keys.append(v)

        search_documents = []
        for search_key in search_keys:
            # 从不同的知识库中检索出文档，并组装成结构化输出
            search_documents.append({
                "content": search_key
            })
        return search_documents
