from langchain_community.vectorstores import Chroma
from langchain_core.runnables import RunnablePassthrough
# 1. 文档加载与向量化
documents = load_pdf("manual.pdf")
vectorstore = Chroma.from_documents(documents, embedding_model)
# 2. 构建检索链
retriever = vectorstore.as_retriever()
prompt_template = """基于以下上下文回答问题：  
{context}  
问题：{question}  
"""
# 3. 组合式链
rag_chain = (
    {"context": retriever, "question": RunnablePassthrough()}
    | prompt_template
    | llm
)