from langchain_community.embeddings import DashScopeEmbeddings
from langchain_community.vectorstores import FAISS
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.runnables import RunnablePassthrough
from langchain_openai import ChatOpenAI

llm = ChatOpenAI(
    api_key="sk-a3f7718fb81f43b2915f0a6483b6661b",
    base_url="https://dashscope.aliyuncs.com/compatible-mode/v1",
    model="llama-4-scout-17b-16e-instruct",  # 此处以qwen-plus为例，您可按需更换模型名称。模型列表：https://help.aliyun.com/zh/model-studio/getting-started/models
    # other params...
)
vectorstore = FAISS.from_texts(["李华住在北京朝阳小区"], embedding=DashScopeEmbeddings())
retriever = vectorstore.as_retriever()
template = """
基于下面的上下文：
{content}
问题:{question}
"""
prompt = ChatPromptTemplate.from_template(template)
#RunnablePassthrough 用于在链式调用中传递数据而不做修改，同时支持动态注入额外内容
retriever_chain = ({"content":retriever,"question":RunnablePassthrough()} | prompt | llm | StrOutputParser())
for response in retriever_chain.stream("李华住在哪儿？"):
    print(response, end="", flush=True)