Update app.py
Browse files
app.py
CHANGED
|
@@ -105,8 +105,8 @@ def respond(
|
|
| 105 |
n_gpu_layers=0,
|
| 106 |
n_batch=8,
|
| 107 |
n_ctx=2048,
|
| 108 |
-
n_threads=
|
| 109 |
-
n_threads_batch=
|
| 110 |
)
|
| 111 |
llm_model = model
|
| 112 |
provider = LlamaCppPythonProvider(llm)
|
|
@@ -165,11 +165,7 @@ def respond(
|
|
| 165 |
# Create a chat interface
|
| 166 |
demo = gr.ChatInterface(
|
| 167 |
respond,
|
| 168 |
-
examples=[
|
| 169 |
-
["What is the capital of France?"],
|
| 170 |
-
["Tell me something about artificial intelligence."],
|
| 171 |
-
["What is gravity?"],
|
| 172 |
-
],
|
| 173 |
additional_inputs_accordion=gr.Accordion(
|
| 174 |
label="⚙️ Parameters", open=False, render=False
|
| 175 |
),
|
|
|
|
| 105 |
n_gpu_layers=0,
|
| 106 |
n_batch=8,
|
| 107 |
n_ctx=2048,
|
| 108 |
+
n_threads=8,
|
| 109 |
+
n_threads_batch=8,
|
| 110 |
)
|
| 111 |
llm_model = model
|
| 112 |
provider = LlamaCppPythonProvider(llm)
|
|
|
|
| 165 |
# Create a chat interface
|
| 166 |
demo = gr.ChatInterface(
|
| 167 |
respond,
|
| 168 |
+
examples=[["What is the capital of France?"], ["Tell me something about artificial intelligence."], ["What is gravity?"]],
|
|
|
|
|
|
|
|
|
|
|
|
|
| 169 |
additional_inputs_accordion=gr.Accordion(
|
| 170 |
label="⚙️ Parameters", open=False, render=False
|
| 171 |
),
|