## openai_server.py
from typing import Dict

from elasticsearch import Elasticsearch
from langchain.agents import create_tool_calling_agent, AgentExecutor
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate, SystemMessagePromptTemplate, MessagesPlaceholder, PromptTemplate, \
    HumanMessagePromptTemplate
from langchain_core.runnables import Runnable, RunnablePassthrough
from langchain_core.runnables.history import RunnableWithMessageHistory
from langchain_core.tools import create_retriever_tool, Tool
from langchain_elasticsearch import ElasticsearchStore, ElasticsearchRetriever
from langchain_huggingface import HuggingFaceEmbeddings
from langchain_openai import ChatOpenAI
# Import relevant functionality
from langgraph.graph.graph import CompiledGraph

import config
from langchain.prompts import PromptTemplate

from server.openai_server import create_server, get_session_history
from tools.BaiduSearch import BaiduSearch
from langchain_openai import ChatOpenAI
ai_server_instance = create_server()

# system_prompt = """
# You are an intelligent assistant named 'LittleBoss', with the goal of providing accurate and useful information while maintaining friendly and humorous conversations. You need to provide the following functions:
# The following is the input provided by the user '{user_id}':
# "{input}"
# If the user's intention is to query information related to herself/himself, please use the 'Personal Knowledge Search' tool to query the knowledge base,
# Else if is searching for basic knowledge or authoritative sources, please use the 'baidu_search' tool to query Baidu,
# Otherwise, call the LLM model to meet the user's needs
# #Constraint
# -Communicate only in the language used by the user.
# -Respect user privacy and do not ask or comment on sensitive personal information unless the user voluntarily shares it.
# -Maintain the coherence of the conversation, refer to the information in the previous text, in order to provide users with a consistent experience.
# #Attention
# -Remember, your goal is to become a friendly friend and knowledge partner to users, helping them solve problems while also enjoying the fun of conversation.
# """

system_prompt = """
您是一位名为“LittleBoss”的智能助理，其目标是提供准确有用的信息，同时保持友好幽默的对话。您需要提供以下功能：
以下是用户“｛user_id｝”提供的输入：
“｛input｝”
如果用户的意图是查询与自己相关的信息，请使用“个人知识搜索”工具查询知识库，
否则，如果您正在搜索基础知识或权威来源，请使用“baidu_search”工具查询百度，
否则，调用LLM模型以满足用户的需求
#约束条件
-仅使用用户使用的语言进行交流。
-尊重用户隐私，除非用户自愿分享，否则不要询问或评论敏感的个人信息。
-保持对话的连贯性，参考前一篇文章中的信息，以便为用户提供一致的体验。
#注意
-记住，你的目标是成为用户的友好朋友和知识伙伴，帮助他们解决问题，同时享受对话的乐趣。
"""
class ChainServer:
    _instance = None  # 类级别的变量，存储单例

    def __new__(cls, *args, **kwargs):
        if cls._instance is None:
            cls._instance = super().__new__(cls)
            # 在这里初始化实例，只有第一次调用时才会执行
            cls._instance.__initialized = False
        return cls._instance

    def __init__(self, qa_chain: Runnable,
                 agent_executor: CompiledGraph, 
                 agent_with_chat_history: RunnableWithMessageHistory,):
        if not hasattr(self, '__initialized'):
            self.qa_chain = qa_chain
            self.agent_executor = agent_executor
            self.agent_with_chat_history = agent_with_chat_history
            self.__initialized = True


def create_chain_server():
    # ES知识库检索器
    retriever = ai_server_instance.elastic_vector.as_retriever()
    retriever_tool = create_retriever_tool(
        retriever,
        "Personal Knowledge Search",
        "Search for information about customer. For any questions about personal knowledge, you must use this tool!",
    )

    # QA链
    qa_chain = create_qa_chain(ai_server_instance.llm, ai_server_instance.elastic_vector)

    # 搜索引擎工具
    # bing_search_apr_wrapper = BingSearchAPIWrapper(bing_subscription_key=config.BING_SUBSCRIPTION_KEY,
    #                                                bing_search_url=config.BING_SEARCH_URL)
    # bing_search = BingSearchResults(api_wrapper=bing_search_apr_wrapper)

    baidusearch = BaiduSearch()
    # 组合所有工具
    tools = [baidusearch, retriever_tool]

        # 创建聊天提示模板
    prompt = ChatPromptTemplate.from_messages(
        [SystemMessagePromptTemplate(prompt=PromptTemplate(input_variables=[ 'user_id', 'input'], template=system_prompt)),
         MessagesPlaceholder(variable_name='chat_history', optional=True),
         HumanMessagePromptTemplate(
             prompt=PromptTemplate(
                 input_variables=['input'],
                 template="""{input}"""
             )
         ),
         MessagesPlaceholder(variable_name='agent_scratchpad')])
    # 创建agent代理
    agent = create_tool_calling_agent(ai_server_instance.llm, tools, prompt)
    
    # 创建agent执行器
    agent_executor = AgentExecutor(agent=agent, tools=tools)
    
    # 添加历史记录管理
    agent_with_chat_history = RunnableWithMessageHistory(
        agent_executor,
        get_session_history,
        input_messages_key="input",
        history_messages_key="chat_history",
    )

    # 创建服务器实例
    server = ChainServer(
        qa_chain=qa_chain,
        agent_executor=agent_executor,
        agent_with_chat_history=agent_with_chat_history,
    )

    return server


def hybrid_query(search_query: str) -> Dict:
    embeddings = HuggingFaceEmbeddings(
        model_name="./bert-base-chinese")  # 选择一个预训练模型
    vector = embeddings.embed_query(search_query)  # same embeddings as for indexing
    return {
        "retriever": {
            "rrf": {
                "retrievers": [
                    {
                        "standard": {
                            "query": {
                                "match": {
                                    "metadata.content": search_query,
                                }
                            }
                        }
                    },
                    {
                        "knn": {
                            "field": "_vector",
                            "query_vector": vector,
                            "k": 5,
                            "num_candidates": 10,
                        }
                    },
                ]
            }
        }
    }


def create_qa_chain(llm: ChatOpenAI, elastic_vector: ElasticsearchStore):
    hybrid_retriever = ElasticsearchRetriever.from_es_params(
        index_name=config.ELASTICSEARCH_KNOWLEDGE_BASE_INDEX,
        body_func=hybrid_query,
        content_field="metadata",
        url=config.ELASTICSEARCH_HOST,
    )

    prompt = ChatPromptTemplate.from_messages(
        [system_prompt]
    )

    retrieval_chain = (
            {"context": elastic_vector.as_retriever(),
             "question": RunnablePassthrough(),
             # "user_id": lambda x: x["user_id"]
             }
            | prompt
            | llm
            | StrOutputParser()
    )

    return retrieval_chain
