Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -18,13 +18,14 @@ from langchain.chains import LLMChain
|
|
18 |
import transformers
|
19 |
|
20 |
import transformers
|
21 |
-
model_name
|
22 |
-
from
|
23 |
-
|
24 |
-
|
25 |
-
|
|
|
|
|
26 |
|
27 |
-
tokenizer = AutoTokenizer.from_pretrained(model_name, trust_remote_code=True)
|
28 |
tokenizer.pad_token = tokenizer.eos_token
|
29 |
tokenizer.padding_side = "right"
|
30 |
|
@@ -61,7 +62,7 @@ bnb_config = BitsAndBytesConfig(
|
|
61 |
# Load pre-trained config
|
62 |
#################################################################
|
63 |
model = AutoModelForCausalLM.from_pretrained(
|
64 |
-
|
65 |
quantization_config=bnb_config,
|
66 |
)
|
67 |
# Connect query to FAISS index using a retriever
|
|
|
18 |
import transformers
|
19 |
|
20 |
import transformers
|
21 |
+
model_name=# Use a pipeline as a high-level helper
|
22 |
+
from transformers import pipeline
|
23 |
+
|
24 |
+
# Load model directly
|
25 |
+
from transformers import AutoTokenizer, AutoModelForCausalLM
|
26 |
+
|
27 |
+
tokenizer = AutoTokenizer.from_pretrained("mistralai/Mistral-7B-Instruct-v0.1")
|
28 |
|
|
|
29 |
tokenizer.pad_token = tokenizer.eos_token
|
30 |
tokenizer.padding_side = "right"
|
31 |
|
|
|
62 |
# Load pre-trained config
|
63 |
#################################################################
|
64 |
model = AutoModelForCausalLM.from_pretrained(
|
65 |
+
"mistralai/Mistral-7B-Instruct-v0.1",
|
66 |
quantization_config=bnb_config,
|
67 |
)
|
68 |
# Connect query to FAISS index using a retriever
|