Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -87,21 +87,22 @@ with gr.Blocks(fill_height=True) as demo:
|
|
87 |
query_input = gr.Textbox(label="Prompt")
|
88 |
submit_btn = gr.Button("Submit")
|
89 |
output = gr.Textbox(label="Output")
|
90 |
-
|
|
|
|
|
91 |
["./example_images/s2w_example.png", "What is this UI about?", "Greedy", 0.4, 512, 1.2, 0.8],
|
92 |
["./example_images/example_images_travel_tips.jpg", "I want to go somewhere similar to the one in the photo. Give me destinations and travel tips.", 0.4, 512, 1.2, 0.8],
|
93 |
["./example_images/chicken_on_money.png", "Can you tell me a very short story based on this image?", 0.4, 512, 1.2, 0.8],
|
94 |
["./example_images/baklava.png", "Where is this pastry from?", 0.4, 512, 1.2, 0.8],
|
95 |
["./example_images/dummy_pdf.png", "How much percent is the order status?", 0.4, 512, 1.2, 0.8],
|
96 |
["./example_images/art_critic.png", "As an art critic AI assistant, could you describe this painting in details and make a thorough critic?.", 0.4, 512, 1.2, 0.8]]
|
97 |
-
|
98 |
examples = examples,
|
99 |
inputs=[image_input, query_input, decoding_strategy, temperature,
|
100 |
max_new_tokens, repetition_penalty, top_p],
|
101 |
outputs=output,
|
102 |
fn=model_inference
|
103 |
)
|
104 |
-
with gr.Accordion():
|
105 |
# Hyper-parameters for generation
|
106 |
max_new_tokens = gr.Slider(
|
107 |
minimum=8,
|
|
|
87 |
query_input = gr.Textbox(label="Prompt")
|
88 |
submit_btn = gr.Button("Submit")
|
89 |
output = gr.Textbox(label="Output")
|
90 |
+
|
91 |
+
with gr.Accordion(label="Example Inputs and Advanced Generation Parameters"):
|
92 |
+
examples=[["./example_images/docvqa_example.png", "How many items are sold?", "Greedy", 0.4, 512, 1.2, 0.8],
|
93 |
["./example_images/s2w_example.png", "What is this UI about?", "Greedy", 0.4, 512, 1.2, 0.8],
|
94 |
["./example_images/example_images_travel_tips.jpg", "I want to go somewhere similar to the one in the photo. Give me destinations and travel tips.", 0.4, 512, 1.2, 0.8],
|
95 |
["./example_images/chicken_on_money.png", "Can you tell me a very short story based on this image?", 0.4, 512, 1.2, 0.8],
|
96 |
["./example_images/baklava.png", "Where is this pastry from?", 0.4, 512, 1.2, 0.8],
|
97 |
["./example_images/dummy_pdf.png", "How much percent is the order status?", 0.4, 512, 1.2, 0.8],
|
98 |
["./example_images/art_critic.png", "As an art critic AI assistant, could you describe this painting in details and make a thorough critic?.", 0.4, 512, 1.2, 0.8]]
|
99 |
+
gr.Examples(
|
100 |
examples = examples,
|
101 |
inputs=[image_input, query_input, decoding_strategy, temperature,
|
102 |
max_new_tokens, repetition_penalty, top_p],
|
103 |
outputs=output,
|
104 |
fn=model_inference
|
105 |
)
|
|
|
106 |
# Hyper-parameters for generation
|
107 |
max_new_tokens = gr.Slider(
|
108 |
minimum=8,
|