from langchain_community.embeddings import DashScopeEmbeddings
from langchain_community.llms.tongyi import Tongyi
from langchain_community.vectorstores import FAISS
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.runnables import RunnableParallel, RunnablePassthrough

# llm = Tongyi()
# prompt = ChatPromptTemplate.from_template('给我讲一个关于{topic}的笑话')
# chain = prompt | llm | StrOutputParser()
# print(chain.invoke({'topic': '猫狗'}))
db = FAISS.from_texts(
    ['张三在北京工作，一个月30000人民币'],
    embedding=DashScopeEmbeddings(),
)
retriever = db.as_retriever()
template = """
根据下面的内容回答问题
{context}

问题：{question}
"""

prompt = ChatPromptTemplate.from_template(template)

chain = (
    RunnableParallel({
        'context': retriever,
        "question": RunnablePassthrough()
    })| prompt | Tongyi() | StrOutputParser()
)
res = chain.invoke(input='张三在哪里工作，一个月多少钱？')
print(res)