from langchain_openai import ChatOpenAI
from langchain_core.chat_history import InMemoryChatMessageHistory
from langchain_core.runnables.history import RunnableWithMessageHistory
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder

# 1. 指向本地模型
llm = ChatOpenAI(
    base_url="http://115.190.56.170:8000/v1",   # 去掉空格
    api_key="sk-4duyWc4ZphbqBIAQytlp3A",       # 随意，非空即可
    model="qwen3-coder-480b-a35b-instruct",
    temperature=0.7,
    streaming=True,
)

# 2. 定义提示模板（等价于旧 ConversationChain 的默认模板）
prompt = ChatPromptTemplate.from_messages([
    ("system", "You are a helpful and talkative AI assistant."),
    MessagesPlaceholder(variable_name="history"),
    ("human", "{input}"),
])

# 3. 组装 chain
chain = prompt | llm

# 4. 带历史记忆的可运行对象
store = {}   # 实际项目中可换成 Redis、文件等

def get_session_history(session_id: str) -> InMemoryChatMessageHistory:
    if session_id not in store:
        store[session_id] = InMemoryChatMessageHistory()
    return store[session_id]

chain_with_history = RunnableWithMessageHistory(
    chain,
    get_session_history,
    input_messages_key="input",
    history_messages_key="history",
)

# 5. 调用（一次性提问）
result = chain_with_history.invoke(
    {"input": "用一句话解释 LangChain"},
    config={"configurable": {"session_id": "demo"}}
)

print(result.content)