from langchain_community.chat_models import ChatZhipuAI
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
from langchain_community.chat_message_histories import ChatMessageHistory
from langchain_core.runnables import RunnableWithMessageHistory
from tools import get_random_number
from langgraph.prebuilt import chat_agent_executor

tools = [get_random_number]
model = ChatZhipuAI(
    model="glm-4-plus",
    temperature=0.5
)

prompt = ChatPromptTemplate.from_messages([
    ("system", """
    请生成一个随机数，并比较用户输入的数字，
    当谁的数字大于另一方，则加一分，
    如果平局则双方各加一分，
    显示双方的比分
    三局两胜制
    """),
    MessagesPlaceholder("history"),
    ("human", "{input}")
])

store = {}


def get_session_history(session_id: str):
    if session_id not in store:
        store[session_id] = ChatMessageHistory()
    return store[session_id]


chain = chat_agent_executor.create_tool_calling_executor(model, tools)

chat_with_history = RunnableWithMessageHistory(
    prompt | chain,
    get_session_history,
    input_messages_key="input",
    history_messages_key="history"
)

while True:
    messages = input("请输入内容:")
    if messages == "q":
        break
    config = {"configurable": {"session_id": "abc123"}}
    response = chat_with_history.invoke({"input": messages}, config=config)
    print(response['messages'][-1].content)
