Spaces:
Runtime error
Runtime error
Ibrahimarain
commited on
Commit
•
da69772
1
Parent(s):
ac1af52
updated path
Browse files
app.py
CHANGED
@@ -16,15 +16,16 @@ You are a helpful assistant for a crowdfunding platform called GiveSendGo. Your
|
|
16 |
"""
|
17 |
|
18 |
model_loc = "models/llama-2-13b-chat.Q5_K_M.gguf"
|
19 |
-
|
20 |
-
|
21 |
-
|
22 |
-
|
23 |
-
|
24 |
-
|
25 |
-
|
26 |
-
|
27 |
-
#
|
|
|
28 |
|
29 |
|
30 |
# llama = llama_cpp.Llama.from_pretrained(
|
@@ -39,12 +40,12 @@ model_loc = "models/llama-2-13b-chat.Q5_K_M.gguf"
|
|
39 |
# verbose=False
|
40 |
# )
|
41 |
|
42 |
-
llama = Llama(
|
43 |
-
model_path=model_loc,
|
44 |
-
max_tokens=4096,
|
45 |
-
n_ctx=4096,
|
46 |
-
verbose=False,
|
47 |
-
)
|
48 |
|
49 |
_ = [elm for elm in prompt_template.splitlines() if elm.strip()]
|
50 |
stop_string = [elm.split(":")[0] + ":" for elm in _][-2]
|
|
|
16 |
"""
|
17 |
|
18 |
model_loc = "models/llama-2-13b-chat.Q5_K_M.gguf"
|
19 |
+
model_loc = "TheBloke/Llama-2-13B-chat-GGUF"
|
20 |
+
|
21 |
+
llama = AutoModelForCausalLM.from_pretrained(
|
22 |
+
model_loc,
|
23 |
+
model_type="llama",
|
24 |
+
context_length=4096,
|
25 |
+
max_new_tokens=2048,
|
26 |
+
hf=True
|
27 |
+
# threads=cpu_count,
|
28 |
+
)
|
29 |
|
30 |
|
31 |
# llama = llama_cpp.Llama.from_pretrained(
|
|
|
40 |
# verbose=False
|
41 |
# )
|
42 |
|
43 |
+
# llama = Llama(
|
44 |
+
# model_path=model_loc,
|
45 |
+
# max_tokens=4096,
|
46 |
+
# n_ctx=4096,
|
47 |
+
# verbose=False,
|
48 |
+
# )
|
49 |
|
50 |
_ = [elm for elm in prompt_template.splitlines() if elm.strip()]
|
51 |
stop_string = [elm.split(":")[0] + ":" for elm in _][-2]
|