from langchain_core.runnables import RunnableConfig

from owl_ai.domain.graph.nodes.graph_node import BaseGraphNode
from owl_ai.domain.knowledge.knowledge_service import KnowledgeService, VectorStoreService


class RAGRetrieveNode(BaseGraphNode):
    """
    知识库检索节点
    """

    def __init__(self, node_config: dict):
        super().__init__(node_config)
        kid = node_config.get("kid")
        self.knowledge_base = KnowledgeService.find_by_id(kid)

    def __call__(self, state, config: RunnableConfig):
        """
        执行节点
        Args:
            state: graph流转参数
            config: 运行时配置
        Returns:
            输出参数
        """
        input_params = self.input_params_assem(state, self.inputs)
        messages = input_params.get("messages")

        last_message_content = messages[-1].get("content")
        query = ""
        if isinstance(last_message_content, str):
            query = last_message_content
        elif isinstance(last_message_content, list):
            for message_content in last_message_content:
                if isinstance(message_content, dict):
                    message_content_type = message_content.get("type")
                    if message_content_type == "text":
                        query = message_content.get("text")
                        break

        if query != "":
            with state.get("app").app_context():
                # 检索
                docs = VectorStoreService.similarity_search(knowledge_base=self.knowledge_base, query=query)
                # 组装返回结果
                retrieve_result = []
                for doc in docs:
                    retrieve_result.append({
                        "content": doc.page_content,
                        "source": doc.metadata.get("file_name"),
                    })

                if len(retrieve_result) > 0:
                    return {
                        "node_params": {
                            self.node_id: {
                                "out": retrieve_result
                            }
                        }
                    }
