Spaces:
Running
Running
app.py
CHANGED
|
@@ -37,6 +37,9 @@ def on_prompt_template_change(prompt_template):
|
|
| 37 |
|
| 38 |
def submit_message(user_token, prompt, prompt_template, temperature, max_tokens, state):
|
| 39 |
|
|
|
|
|
|
|
|
|
|
| 40 |
history = state['messages']
|
| 41 |
|
| 42 |
if not prompt:
|
|
@@ -131,7 +134,8 @@ with gr.Blocks(css=css) as demo:
|
|
| 131 |
user_token.change(on_token_change, inputs=[user_token], outputs=[])
|
| 132 |
|
| 133 |
|
| 134 |
-
demo.load(download_prompt_templates, inputs=None, outputs=[prompt_template])
|
| 135 |
|
| 136 |
|
| 137 |
-
demo.
|
|
|
|
|
|
| 37 |
|
| 38 |
def submit_message(user_token, prompt, prompt_template, temperature, max_tokens, state):
|
| 39 |
|
| 40 |
+
if not prompt:
|
| 41 |
+
return
|
| 42 |
+
|
| 43 |
history = state['messages']
|
| 44 |
|
| 45 |
if not prompt:
|
|
|
|
| 134 |
user_token.change(on_token_change, inputs=[user_token], outputs=[])
|
| 135 |
|
| 136 |
|
| 137 |
+
demo.load(download_prompt_templates, inputs=None, outputs=[prompt_template], queur=False)
|
| 138 |
|
| 139 |
|
| 140 |
+
demo.queue(concurrency_count=10)
|
| 141 |
+
demo.launch(height='800px')
|