from dotenv import load_dotenv
from langchain_community.chat_message_histories import ChatMessageHistory
from langchain_community.llms.tongyi import Tongyi
from langchain_core.messages import HumanMessage
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
from langchain_core.runnables import RunnableWithMessageHistory


load_dotenv()

# 聊天机器人案例
# 1.创建模型
model = Tongyi(model='qwen-plus')

# 使用提示词模板
prompt_template = ChatPromptTemplate.from_messages([
    ('system', '你是一个乐于助人的助手。用{language}尽你所能的回答所有问题。'),
    MessagesPlaceholder(variable_name='my_message_key')
])

# 得到链
chain = prompt_template | model

# 保存聊天的历史记录 key: sessionId, value: ChatMessageHistory
store = {}


def get_session_history(sessionId: str):
    if sessionId not in store:
        store[sessionId] = ChatMessageHistory()
    return store[sessionId]


do_message = RunnableWithMessageHistory(
    chain,
    get_session_history,
    input_messages_key='my_message_key'  #
)

config = {'configurable': {'session_id': 'sessionId_123456'}}

# 第一轮
resp1 = do_message.invoke(
    {
        'my_message_key': [HumanMessage(content='你好吖，我是Gary！')],
        'language': '中文'
    },
    config=config
)
print(resp1)
print(type(resp1))

# 第二轮
resp2 = do_message.invoke(
    {
        'my_message_key': [HumanMessage(content='你还知道我是谁吗？')],
        'language': '中文'
    },
    config=config
)
print(resp2)

# print(store)


config = {'configurable': {'session_id': 'sessionId_123456777'}}
# 第三轮  流式输出
for resp in do_message.stream(input={'my_message_key': [HumanMessage(content='请给我讲一个小笑话。')], 'language': 'English'},
                              config=config):
    # 每次响应的是一个token
    print(resp, end='-')
