gradio 5
Browse files- README.md +1 -1
- app.py +10 -10
- requirements.txt +1 -1
README.md
CHANGED
@@ -4,7 +4,7 @@ emoji: 🤖
|
|
4 |
colorFrom: yellow
|
5 |
colorTo: gray
|
6 |
sdk: gradio
|
7 |
-
sdk_version:
|
8 |
app_file: app.py
|
9 |
pinned: false
|
10 |
license: mit
|
|
|
4 |
colorFrom: yellow
|
5 |
colorTo: gray
|
6 |
sdk: gradio
|
7 |
+
sdk_version: 5.1
|
8 |
app_file: app.py
|
9 |
pinned: false
|
10 |
license: mit
|
app.py
CHANGED
@@ -161,8 +161,8 @@ def bot(message, history, oai_key, system_prompt, seed, temperature, max_tokens,
|
|
161 |
if assi is not None:
|
162 |
whisper_prompt += f"\n{assi}"
|
163 |
|
164 |
-
if message
|
165 |
-
whisper_prompt += message
|
166 |
if message.files:
|
167 |
for file in message.files:
|
168 |
audio_fn = os.path.basename(file.path)
|
@@ -181,7 +181,7 @@ def bot(message, history, oai_key, system_prompt, seed, temperature, max_tokens,
|
|
181 |
elif model == "dall-e-3":
|
182 |
response = client.images.generate(
|
183 |
model=model,
|
184 |
-
prompt=message
|
185 |
size="1792x1024",
|
186 |
quality="hd",
|
187 |
n=1,
|
@@ -219,11 +219,11 @@ def bot(message, history, oai_key, system_prompt, seed, temperature, max_tokens,
|
|
219 |
|
220 |
history_openai_format.append({"role": "assistant", "content": assi})
|
221 |
|
222 |
-
if message
|
223 |
-
user_msg_parts.append({"type": "text", "text": message
|
224 |
-
if message
|
225 |
-
for file in message
|
226 |
-
user_msg_parts.extend(encode_file(file
|
227 |
history_openai_format.append({"role": "user", "content": user_msg_parts})
|
228 |
user_msg_parts = []
|
229 |
|
@@ -350,11 +350,11 @@ with gr.Blocks(delete_cache=(86400, 86400)) as demo:
|
|
350 |
dl_settings_button.click(None, controls, js=generate_download_settings_js("oai_chat_settings.bin", control_ids))
|
351 |
ul_settings_button.click(None, None, None, js=generate_upload_settings_js(control_ids))
|
352 |
|
353 |
-
chat = gr.ChatInterface(fn=bot, multimodal=True, additional_inputs=controls,
|
354 |
chat.textbox.file_count = "multiple"
|
355 |
chatbot = chat.chatbot
|
356 |
chatbot.show_copy_button = True
|
357 |
-
chatbot.height =
|
358 |
|
359 |
if dump_controls:
|
360 |
with gr.Row():
|
|
|
161 |
if assi is not None:
|
162 |
whisper_prompt += f"\n{assi}"
|
163 |
|
164 |
+
if message["text"]:
|
165 |
+
whisper_prompt += message["text"]
|
166 |
if message.files:
|
167 |
for file in message.files:
|
168 |
audio_fn = os.path.basename(file.path)
|
|
|
181 |
elif model == "dall-e-3":
|
182 |
response = client.images.generate(
|
183 |
model=model,
|
184 |
+
prompt=message["text"],
|
185 |
size="1792x1024",
|
186 |
quality="hd",
|
187 |
n=1,
|
|
|
219 |
|
220 |
history_openai_format.append({"role": "assistant", "content": assi})
|
221 |
|
222 |
+
if message["text"]:
|
223 |
+
user_msg_parts.append({"type": "text", "text": message["text"]})
|
224 |
+
if message["files"]:
|
225 |
+
for file in message["files"]:
|
226 |
+
user_msg_parts.extend(encode_file(file))
|
227 |
history_openai_format.append({"role": "user", "content": user_msg_parts})
|
228 |
user_msg_parts = []
|
229 |
|
|
|
350 |
dl_settings_button.click(None, controls, js=generate_download_settings_js("oai_chat_settings.bin", control_ids))
|
351 |
ul_settings_button.click(None, None, None, js=generate_upload_settings_js(control_ids))
|
352 |
|
353 |
+
chat = gr.ChatInterface(fn=bot, multimodal=True, additional_inputs=controls, autofocus = False)
|
354 |
chat.textbox.file_count = "multiple"
|
355 |
chatbot = chat.chatbot
|
356 |
chatbot.show_copy_button = True
|
357 |
+
chatbot.height = 450
|
358 |
|
359 |
if dump_controls:
|
360 |
with gr.Row():
|
requirements.txt
CHANGED
@@ -1,4 +1,4 @@
|
|
1 |
-
gradio
|
2 |
openai >= 1.0.0
|
3 |
lxml
|
4 |
PyMuPDF
|
|
|
1 |
+
gradio = 5.1
|
2 |
openai >= 1.0.0
|
3 |
lxml
|
4 |
PyMuPDF
|