#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
@Time    : 2025/5/19 17:13
@Author  : HZP
@File    : 1.摘要缓冲混合记忆示例.py
"""
from operator import itemgetter

import dotenv
from langchain.memory import ConversationSummaryBufferMemory
from langchain_community.callbacks import get_openai_callback
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
from langchain_core.runnables import RunnablePassthrough, RunnableLambda
from langchain_openai import ChatOpenAI

dotenv.load_dotenv()

#1、构建prompt
prompt = ChatPromptTemplate.from_messages([
    ("system", "你是中电三公司聊天机器人,根据上下文信息回答用户问题"),
    MessagesPlaceholder("history"),
    ("human", "{query}")
])
#2、构建摘要器
summary = ConversationSummaryBufferMemory(
    llm=ChatOpenAI(temperature=0.6, model="qwq"),
    input_key="query",
    return_messages=True,
    max_token_limit=1000
)
#创建模型
llm = ChatOpenAI(model="qwq", temperature=0.6)

lang_chain = (RunnablePassthrough.assign(
    history=RunnableLambda(summary.load_memory_variables) | itemgetter("history"))
              | prompt | llm | StrOutputParser())
#构建对话
while True:
    query = input("Human:")
    if query == "q":
        break
    chain_input = {"query": query, "language": "中文"}
    chain_output = lang_chain.stream(chain_input)
    print("AI:", flush=True, end="")
    content = ""
    for chunk in chain_output:
        if not chunk:
            break
        print(chunk, flush=True, end="")
        content += chunk
    summary.save_context(inputs=chain_input, outputs={"output": content})
    print("")
    print("history: ", summary.load_memory_variables({}))
