# 安装依赖 pip install langchain_community
import os

from langchain_openai import AzureChatOpenAI
from langchain_community.chat_message_histories import ChatMessageHistory
from langchain_core.messages import HumanMessage
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
from langchain_core.runnables import RunnableWithMessageHistory
from init_llm import get_llm

llm = get_llm()

prompt = ChatPromptTemplate.from_messages([
    ("system", "你是一个{role}专家，请用你的专业知识回答问题"),
    MessagesPlaceholder(variable_name="chat_history"),
    ("human", "{question}")
])

#  prompt模板链式调用
chain = prompt | llm

# 存储聊天记录
store = {}


def get_session_history(session_id: str):
    if session_id not in store:
        store[session_id] = ChatMessageHistory()
    return store[session_id]


do_message = RunnableWithMessageHistory(
    chain,
    get_session_history,
    input_messages_key="question",  # 每次聊天时发送消息的key
    history_messages_key="chat_history",
)

config = {'configurable': {'session_id': 'zs123'}}

# 第1轮
resp = do_message.invoke(
    {
        "role": "互联网技术研发",
        "question": "学习langchain需要哪些步骤"
    },
    config
)
print(resp.content)

# 第2轮
resp = do_message.invoke(
    {
        "role": "互联网技术研发",
        "question": "什么"
    },
    config
)
print(resp.content)

# 第3轮 流式返回
for resp in do_message.stream(
        {
                "role": "互联网技术研发",
                "question": "再说一遍"
        },
        config
):
    print(resp.content, end='-')
