import os

from langchain_community.chat_message_histories import ChatMessageHistory
from langchain_core.prompts import MessagesPlaceholder  # 关键导入
from langchain_core.runnables.history import RunnableWithMessageHistory  # 关键修正
from langchain_openai import ChatOpenAI
from langchain_core.messages import SystemMessage, HumanMessage  # 添加缺失的导入
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate
from fastapi import FastAPI
from langserve import add_routes

os.environ["LANGCHAIN_TRACING_V2"] = "true"
os.environ["LANGCHAIN_PROJECT"] = "LangChainDemo"
os.environ["LANGCHAIN_API_KEY"] = "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"
os.environ["LLM_MODEL"] = "Qwen/QwQ-32B"
os.environ["API_URL"] = "https://api.siliconflow.cn/v1/"
os.environ["API_KEY"] = "sk-wcsjgztdbpewpycuviarliglbfrcbpnxqluwncnvplmjnezy"

#聊天机器人案例
# 1.创建LLM对象
llm_obj = ChatOpenAI(
    openai_api_base=os.environ["API_URL"],
    openai_api_key=os.environ["API_KEY"],  
    model_name=os.environ["LLM_MODEL"], 
)

#2.准备prompt

# 定义提示模板
prompt_template = ChatPromptTemplate.from_messages([
    ("system", "你是一个乐于助人的助手，用{language}尽你所能回答所有问题"),
    MessagesPlaceholder(variable_name="my_msg")
])


#4.得到链
chain = prompt_template | llm_obj

#保存聊天的历史记录
store = {} #所有用户的聊天记录都存储在store中。 key为用户id，value为用户聊天记录

# 此函数预期接收一个session_id作为参数，并返回一个消息历史记录对象
def get_session_history(session_id: str):
   
    if  session_id not in store:
        store[session_id] = ChatMessageHistory()

    return store[session_id]

do_message = RunnableWithMessageHistory(
    chain,
    get_session_history,
    input_messages_key="my_msg", # 每次聊天之后发送消息的key
)

config = {"configurable":{"session_id": "zs123"}} #给当前会话定义一个session_id

# 第一轮
response1 = do_message.invoke(
    {
        "my_msg": [HumanMessage(content="你好，我是欢欢")],   
        "language": "中文",
    },
    config = config,

)

print(response1.content)

# 第二轮
response2 = do_message.invoke(
    {
        "my_msg": [HumanMessage(content="请问我得名字是什么？")],   
        "language": "中文",
    },
    config = config,

)

print(response2.content)

# 第三轮：返回的数据是流式的
config = {"configurable":{"session_id": "ccc123"}} #给当前会话定义一个session_id
for resp in do_message.stream({"my_msg": [HumanMessage(content="请给我讲一个笑话")],"language": "English",},config = config):

    #每一次resp都是一个token
    print(resp.content,end="-")