Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -19,7 +19,11 @@ hf_hub_download(
|
|
19 |
filename="helpingai2-4x6b-q4_k_m.gguf",
|
20 |
local_dir="./models"
|
21 |
)
|
22 |
-
|
|
|
|
|
|
|
|
|
23 |
|
24 |
llm = None
|
25 |
llm_model = None
|
@@ -95,7 +99,7 @@ def respond(
|
|
95 |
outputs += output
|
96 |
yield outputs
|
97 |
|
98 |
-
description = "Defualt to
|
99 |
|
100 |
|
101 |
demo = gr.ChatInterface(
|
@@ -104,8 +108,9 @@ demo = gr.ChatInterface(
|
|
104 |
gr.Dropdown([
|
105 |
'helpingai-6b-q4_k_m.gguf',
|
106 |
'helpingai2-4x6b-q4_k_m.gguf',
|
|
|
107 |
],
|
108 |
-
value="
|
109 |
label="Model"
|
110 |
),
|
111 |
gr.Textbox(value="You are HelpingAI a emotional AI always answer my question in HelpingAI style", label="System message"),
|
|
|
19 |
filename="helpingai2-4x6b-q4_k_m.gguf",
|
20 |
local_dir="./models"
|
21 |
)
|
22 |
+
hf_hub_download(
|
23 |
+
repo_id="Abhaykoul/HelpingAI2-9B-Q5_0-GGUF",
|
24 |
+
filename="helpingai2-9b-q5_0.gguf",
|
25 |
+
local_dir="./models"
|
26 |
+
)
|
27 |
|
28 |
llm = None
|
29 |
llm_model = None
|
|
|
99 |
outputs += output
|
100 |
yield outputs
|
101 |
|
102 |
+
description = "Defualt to 9B in Additional Inputs you can change model"
|
103 |
|
104 |
|
105 |
demo = gr.ChatInterface(
|
|
|
108 |
gr.Dropdown([
|
109 |
'helpingai-6b-q4_k_m.gguf',
|
110 |
'helpingai2-4x6b-q4_k_m.gguf',
|
111 |
+
'helpingai2-9b-q5_0.gguf'
|
112 |
],
|
113 |
+
value="helpingai2-9b-q5_0.gguf",
|
114 |
label="Model"
|
115 |
),
|
116 |
gr.Textbox(value="You are HelpingAI a emotional AI always answer my question in HelpingAI style", label="System message"),
|