from langchain.embeddings import OpenAIEmbeddings
from langchain_community.vectorstores import FAISS
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.runnables import RunnablePassthrough

from RAG.LangchainBase.langchain_rag_openai2 import model

texts = ["哈里森曾在 Kensho 工作", "熊喜欢吃蜂蜜"]
vectorstore = FAISS.from_texts(texts, embedding=OpenAIEmbeddings())
retriever = vectorstore.as_retriever()

prompt = ChatPromptTemplate.from_template(
    "根据以下上下文回答问题：{context}\n问题：{question}"
)
chain = ({"context": retriever, "question": RunnablePassthrough()}) | prompt | model | StrOutputParser()

result = chain.invoke("哈里森在哪里工作？")
print(result)
# 输出：伦敦（假设 Kensho 位于伦敦）
