from langchain_community.vectorstores import FAISS # from langchain.embeddings.openai import OpenAIEmbeddings from langchain_openai import OpenAIEmbeddings from langchain.embeddings.huggingface import HuggingFaceEmbeddings # from langchain_openai import embeddings # embeddings = HuggingFaceEmbeddings(model_name='/Users/yunshi/Downloads/360Data/Data Center/Working-On Task/演讲与培训/2023ChatGPT/Coding/RAG/bge-large-zh') ## 切换成BGE的embedding。 # embeddings = HuggingFaceEmbeddings(model_name='/Users/yunshi/Downloads/360Data/Data Center/Working-On Task/演讲与培训/2023ChatGPT/RAG/bge-large-zh/') ## 切换成BGE的embedding。 import os import openai from dotenv import load_dotenv from openai import embeddings load_dotenv() ### 设置openai的API key os.environ["OPENAI_API_KEY"] = os.environ['user_token'] openai.api_key = os.environ['user_token'] bing_search_api_key = os.environ['bing_api_key'] # embeddings = OpenAIEmbeddings(show_progress_bar=True) ## 这里是联网情况下,部署在Huggingface上后使用。 embeddings = HuggingFaceEmbeddings(model_name='GanymedeNil/text2vec-large-chinese') ## 这里是联网情况下,部署在Huggingface上后使用。 # embeddings = HuggingFaceEmbeddings(model_name='/Users/yunshi/Downloads/360Data/Data Center/Working-On Task/演讲与培训/2023ChatGPT/Coding/RAG/bge-large-zh') ## 切换成BGE的embedding。 # embeddings = OpenAIEmbeddings(disallowed_special=()) ## 这里是联网情况下,部署在Huggingface上后使用。 vector_store = FAISS.load_local("./faiss_index/", embeddings=embeddings, allow_dangerous_deserialization=True) ## 加载vector store到本地。 user_input = """ 我是一个企业主,我需要关注哪些“存货”相关的数据资源规则? """ docs = vector_store.similarity_search(user_input) print(docs)