Spaces:
Runtime error
Runtime error
changes to run on cpu
Browse files- chatBot/common/llama.py +5 -5
chatBot/common/llama.py
CHANGED
@@ -7,7 +7,7 @@ from langchain.embeddings import HuggingFaceInstructEmbeddings
|
|
7 |
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
8 |
from langchain.vectorstores import Chroma
|
9 |
from pdf2image import convert_from_path
|
10 |
-
from transformers import AutoTokenizer, TextStreamer, pipeline,
|
11 |
from chatBot.common.pdfToText import loadLatestPdf
|
12 |
from transformers import LlamaTokenizer
|
13 |
from langchain.document_loaders import PyPDFLoader
|
@@ -44,10 +44,10 @@ tokenizer = AutoTokenizer.from_pretrained(model_name_or_path, use_fast=True)
|
|
44 |
# quantize_config=None,
|
45 |
# )
|
46 |
|
47 |
-
model =
|
48 |
-
|
49 |
-
|
50 |
-
)
|
51 |
|
52 |
|
53 |
DEFAULT_SYSTEM_PROMPT = """
|
|
|
7 |
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
8 |
from langchain.vectorstores import Chroma
|
9 |
from pdf2image import convert_from_path
|
10 |
+
from transformers import AutoTokenizer, TextStreamer, pipeline, AutomodelForCausalLM
|
11 |
from chatBot.common.pdfToText import loadLatestPdf
|
12 |
from transformers import LlamaTokenizer
|
13 |
from langchain.document_loaders import PyPDFLoader
|
|
|
44 |
# quantize_config=None,
|
45 |
# )
|
46 |
|
47 |
+
model = AutoModelForCausalLM.from_pretrained(model_name_or_path,
|
48 |
+
device_map="auto",
|
49 |
+
trust_remote_code=False,
|
50 |
+
revision="main")
|
51 |
|
52 |
|
53 |
DEFAULT_SYSTEM_PROMPT = """
|