import os
os.environ["HF_ENDPOINT"] = "https://hf-mirror.com"
from langchain_community.vectorstores import DocArrayInMemorySearch
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.runnables import RunnableParallel, RunnablePassthrough
from langchain_huggingface import HuggingFaceEmbeddings
from langchain_openai import ChatOpenAI

from LongChainDemo01.long_chain_base_use import template

#运行一个检索增强生成链条，在回答问题时添加一些上下文
vectorstore=DocArrayInMemorySearch.from_texts(
    ["咖啡是红黑色","我的电脑cpu是12核"],
    embedding=HuggingFaceEmbeddings(model_name="sentence-transformers/all-mpnet-base-v2")
)
retriever=vectorstore.as_retriever()

template="""基于上下文回答问题:
{context}

问题：{question}
"""
prompt=ChatPromptTemplate.from_template(template)
chat_model = ChatOpenAI(
    openai_api_key="key",
    openai_api_base="https://api.moonshot.cn/v1",
    model="moonshot-v1-8k",
    temperature=0,
    request_timeout=60,
    max_retries=3,
)
output_parser = StrOutputParser()
setup_and_retrieval = RunnableParallel(
    {
        "context":retriever,"question":RunnablePassthrough()
    }
)
chain=setup_and_retrieval|prompt|chat_model|output_parser
print(chain.invoke("关于我电脑的cpu?"))