Spaces:
Runtime error
Runtime error
gokaygokay
commited on
Commit
•
5077254
1
Parent(s):
63f8b1f
Update app.py
Browse files
app.py
CHANGED
@@ -15,11 +15,14 @@ hf_hub_download(
|
|
15 |
local_dir="./models"
|
16 |
)
|
17 |
|
18 |
-
|
19 |
-
|
20 |
-
|
21 |
-
|
22 |
-
|
|
|
|
|
|
|
23 |
|
24 |
@spaces.GPU(duration=120)
|
25 |
def respond(
|
@@ -33,7 +36,7 @@ def respond(
|
|
33 |
top_k,
|
34 |
repeat_penalty,
|
35 |
):
|
36 |
-
chat_template =
|
37 |
|
38 |
llm = Llama(
|
39 |
model_path=f"models/{model}",
|
@@ -90,9 +93,10 @@ demo = gr.ChatInterface(
|
|
90 |
respond,
|
91 |
additional_inputs=[
|
92 |
gr.Dropdown([
|
93 |
-
'gemma-2-9b-it-Q5_K_M.gguf'
|
|
|
94 |
],
|
95 |
-
value="gemma-2-
|
96 |
label="Model"
|
97 |
),
|
98 |
gr.Textbox(value="You are a helpful assistant.", label="System message"),
|
@@ -124,7 +128,7 @@ demo = gr.ChatInterface(
|
|
124 |
undo_btn="Undo",
|
125 |
clear_btn="Clear",
|
126 |
submit_btn="Send",
|
127 |
-
description="
|
128 |
chatbot=gr.Chatbot(
|
129 |
scale=1,
|
130 |
likeable=False,
|
|
|
15 |
local_dir="./models"
|
16 |
)
|
17 |
|
18 |
+
|
19 |
+
|
20 |
+
hf_hub_download(
|
21 |
+
repo_id="bartowski/gemma-2-27b-it-GGUF",
|
22 |
+
filename="gemma-2-27b-it-Q5_K_M.gguf",
|
23 |
+
local_dir="./models"
|
24 |
+
)
|
25 |
+
|
26 |
|
27 |
@spaces.GPU(duration=120)
|
28 |
def respond(
|
|
|
36 |
top_k,
|
37 |
repeat_penalty,
|
38 |
):
|
39 |
+
chat_template = MessagesFormatterType.GEMMA_2
|
40 |
|
41 |
llm = Llama(
|
42 |
model_path=f"models/{model}",
|
|
|
93 |
respond,
|
94 |
additional_inputs=[
|
95 |
gr.Dropdown([
|
96 |
+
'gemma-2-9b-it-Q5_K_M.gguf',
|
97 |
+
'gemma-2-27b-it-Q5_K_M.gguf'
|
98 |
],
|
99 |
+
value="gemma-2-27b-it-Q5_K_M.gguf",
|
100 |
label="Model"
|
101 |
),
|
102 |
gr.Textbox(value="You are a helpful assistant.", label="System message"),
|
|
|
128 |
undo_btn="Undo",
|
129 |
clear_btn="Clear",
|
130 |
submit_btn="Send",
|
131 |
+
description="Chat with Gemma 2 using llama.cpp",
|
132 |
chatbot=gr.Chatbot(
|
133 |
scale=1,
|
134 |
likeable=False,
|