from operator import itemgetter

from langchain.memory import ConversationBufferMemory
from langchain_community.chat_message_histories import RedisChatMessageHistory
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
from langchain_core.runnables import RunnablePassthrough, RunnableLambda, RunnableWithMessageHistory
from langchain_openai import ChatOpenAI

from localModel.LimitRedisHistory import LimitedRedisChatMessageHistory
from localModel.RedisHistoryWindow import SmartRedisChatMessageHistory

llm = ChatOpenAI(
    api_key="sk-a3f7718fb81f43b2915f0a6483b6661b",
    base_url="https://dashscope.aliyuncs.com/compatible-mode/v1",
    model="llama-4-scout-17b-16e-instruct",  # 此处以qwen-plus为例，您可按需更换模型名称。模型列表：https://help.aliyun.com/zh/model-studio/getting-started/models
    # other params...
)
prompt = ChatPromptTemplate.from_messages([("system","你是一个乐于助人的机器助手"),MessagesPlaceholder(variable_name="history"),("human", "{input}")])
REDIS_URL = "redis://:123456@localhost:6379/0"
def get_by_session_id(session_id: str) -> LimitedRedisChatMessageHistory:
    # return RedisChatMessageHistory(session_id, url=REDIS_URL)
    # return LimitedRedisChatMessageHistory(session_id=session_id, url=REDIS_URL,max_messages=2)
    #在原有的RedisChatMessageHistory基础上重写了messages方法，控制最大历史对话轮数，防止传递过多信息给大模型，redis中全量保存用户对话历史
    return SmartRedisChatMessageHistory(session_id=session_id, url=REDIS_URL,chat_memory_window=20)
chain = prompt | llm
agent_with_history = RunnableWithMessageHistory(
    chain,
    get_by_session_id,
    input_messages_key="input",
    history_messages_key="history"
)
inputs = {"input":"你好，我是李华"}
res = agent_with_history.invoke(inputs,config={"configurable": {"session_id": "foo"}})
print(agent_with_history.invoke({"input":"你好，我是谁？"},config={"configurable": {"session_id": "foo"}}))
