Spaces:
Runtime error
Runtime error
File size: 1,264 Bytes
fbb1222 6feb027 fbb1222 6feb027 fbb1222 6feb027 fbb1222 6feb027 cb7bd30 6feb027 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 |
from langchain.llms import LlamaCpp
#from langchain import HuggingFacePipeline
#from langchain.chains import ConversationalRetrievalChain
from huggingface_hub import hf_hub_download
import psutil
import os
#offload_path = "offload"
def get_chain(vectorstore):
if not os.path.exists("ggml-vic7b-q5_1.bin"):
hf_hub_download(repo_id="eachadea/ggml-vicuna-7b-1.1", filename="ggml-vic7b-q5_1.bin", local_dir=".")
llm = LlamaCpp(model_path="ggml-vic7b-q5_1.bin", n_ctx=2048, n_threads=psutil.cpu_count(logical=False))
#if not os.path.exists(offload_path):
# os.makedirs(offload_path)
#llm = HuggingFacePipeline.from_model_id(model_id="lmsys/fastchat-t5-3b-v1.0",
# task="text2text-generation",
# model_kwargs={"max_length":512,
# "device_map":"auto",
# "offload_folder":"offload"
# }
# )
qa_chain = ConversationalRetrievalChain.from_llm(
llm,
vectorstore.as_retriever(),
)
return qa_chain |