#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
@Time    : 2025/5/19 17:12
@Author  : HZP
@File    : 1.缓冲窗口记忆示例.py
"""
from operator import itemgetter

from dotenv import load_dotenv
from langchain_core.output_parsers import StrOutputParser

from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
from langchain.memory import ConversationTokenBufferMemory
from langchain_core.runnables import RunnablePassthrough, RunnableLambda
from langchain_openai import ChatOpenAI

load_dotenv()

#构造prompt 提示语
prompt=ChatPromptTemplate.from_messages([
    ("system","你是中电三公司聊天机器人,根据上下文信息回答用户问题"),
    MessagesPlaceholder("history"),
    ("human","{query}")
])
#构造缓冲窗口 记忆组件
buffer_memory=ConversationTokenBufferMemory(
    llm=ChatOpenAI(model="qwq", temperature=0.6),
    max_token_limit=1000,
    return_messages=True,
    input_key="query",
)
#创建模型
llm = ChatOpenAI(model="qwq", temperature=0.6)
#构建调用链
chain =RunnablePassthrough.assign(
    history=RunnableLambda(buffer_memory.load_memory_variables)|itemgetter("history")
)|prompt|llm|StrOutputParser()

#构建对话
while True:
    query=input("Human:")
    if query=="q":
        break
    chain_input = {"query": query,"language":"中文"}
    chain_output = chain.stream(chain_input)
    print("AI:",flush=True,end="")
    content=""
    for chunk in chain_output:
        if not chunk:
            break
        print(chunk,flush=True,end="")
        content+=chunk
    buffer_memory.save_context(inputs=chain_input,outputs= {"output":content})
    print("")
    print("history: ", buffer_memory.load_memory_variables({}))