Spaces:
Running
on
Zero
Running
on
Zero
Update app_dialogue.py
Browse files- app_dialogue.py +8 -4
app_dialogue.py
CHANGED
@@ -416,7 +416,7 @@ top_p = gr.Slider(
|
|
416 |
|
417 |
|
418 |
chatbot = gr.Chatbot(
|
419 |
-
label="Idefics2",
|
420 |
avatar_images=[None, BOT_AVATAR],
|
421 |
height=450,
|
422 |
)
|
@@ -438,6 +438,11 @@ with gr.Blocks(
|
|
438 |
fill_height=True,
|
439 |
css=""".gradio-container .avatar-container {height: 40px width: 40px !important;}""",
|
440 |
) as demo:
|
|
|
|
|
|
|
|
|
|
|
441 |
# model selector should be set to `visbile=False` ultimately
|
442 |
with gr.Row(elem_id="model_selector_row"):
|
443 |
model_selector = gr.Dropdown(
|
@@ -447,9 +452,9 @@ with gr.Blocks(
|
|
447 |
show_label=False,
|
448 |
container=False,
|
449 |
label="Model",
|
450 |
-
visible=
|
451 |
)
|
452 |
-
|
453 |
decoding_strategy.change(
|
454 |
fn=lambda selection: gr.Slider(
|
455 |
visible=(
|
@@ -490,7 +495,6 @@ with gr.Blocks(
|
|
490 |
fn=model_inference,
|
491 |
chatbot=chatbot,
|
492 |
examples=EXAMPLES,
|
493 |
-
title="Idefics2 Playground",
|
494 |
multimodal=True,
|
495 |
cache_examples=False,
|
496 |
additional_inputs=[
|
|
|
416 |
|
417 |
|
418 |
chatbot = gr.Chatbot(
|
419 |
+
label="Idefics2-Chatty",
|
420 |
avatar_images=[None, BOT_AVATAR],
|
421 |
height=450,
|
422 |
)
|
|
|
438 |
fill_height=True,
|
439 |
css=""".gradio-container .avatar-container {height: 40px width: 40px !important;}""",
|
440 |
) as demo:
|
441 |
+
|
442 |
+
gr.Markdown("# 🐶 Idefics2-Chatty Playground 🐶")
|
443 |
+
gr.Markdown("In this demo you'll be able to chat with [Idefics2-8B-chatty](https://huggingface.co/HuggingFaceM4/idefics2-8b-chatty), a variant of [Idefics2-8B](https://huggingface.co/HuggingFaceM4/idefics2-8b-chatty) further fine-tuned on chat datasets")
|
444 |
+
gr.Markdown("If you want to learn more about Idefics2 and its variants, you can check our [blog post](https://huggingface.co/blog/idefics2).")
|
445 |
+
|
446 |
# model selector should be set to `visbile=False` ultimately
|
447 |
with gr.Row(elem_id="model_selector_row"):
|
448 |
model_selector = gr.Dropdown(
|
|
|
452 |
show_label=False,
|
453 |
container=False,
|
454 |
label="Model",
|
455 |
+
visible=False,
|
456 |
)
|
457 |
+
|
458 |
decoding_strategy.change(
|
459 |
fn=lambda selection: gr.Slider(
|
460 |
visible=(
|
|
|
495 |
fn=model_inference,
|
496 |
chatbot=chatbot,
|
497 |
examples=EXAMPLES,
|
|
|
498 |
multimodal=True,
|
499 |
cache_examples=False,
|
500 |
additional_inputs=[
|