from langchain.chat_models import ChatOllama
from langchain.memory import ConversationBufferMemory
from langchain.prompts import SystemMessagePromptTemplate, HumanMessagePromptTemplate, MessagesPlaceholder, ChatPromptTemplate
from langchain.agents import AgentExecutor, ConversationalChatAgent, AgentOutputParser, create_structured_chat_agent
from xbase_api.model_api.xbase_model_get_model_detail import get_xbaes_model_detail

llm = ChatOllama(model='llama3', temperature=0.1, verbose=True)

SYSTEM_MESSAGE_PREFIX = """尽可能用中文回答以下问题。您可以使用以下工具"""
# 初始化对话存储，保存上下文
memory = ConversationBufferMemory(memory_key="chat_history",
                                  return_messages=True)

tools = [get_xbaes_model_detail()]
output_parser = AgentOutputParser

# 配置agent
chat_agent = ConversationalChatAgent.from_llm_and_tools(
    system_message=SYSTEM_MESSAGE_PREFIX,
    llm=llm,
    tools=tools,
    memory=memory,
    verbose=True)
# chat_agent = create_structured_chat_agent(llm=llm,
#                                           tools=tools,
#                                           prompt=prompt,
#                                           tools=tools)

agent_executor = AgentExecutor.from_agent_and_tools(
    agent=chat_agent,
    tools=tools,
    memory=memory,
    verbose=True,
    max_iterations=3,
    output_parser=output_parser)
