File size: 3,821 Bytes
5296c4e 7f816fb 5296c4e ccd9ec1 5296c4e ccd9ec1 5296c4e ccd9ec1 5296c4e ccd9ec1 5296c4e ccd9ec1 5296c4e ccd9ec1 5296c4e ccd9ec1 5296c4e ccd9ec1 5296c4e 6930839 5296c4e 2e4755e 5296c4e 2e4755e 5296c4e 2e4755e 5296c4e 2e4755e 5296c4e 2e4755e 5296c4e 2e4755e 5296c4e |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 |
import torch
import gradio as gr
from fastapi import FastAPI
import os
from PIL import Image
import tempfile
from transformers import TextStreamer
from utils import title_markdown
from utils import block_css
from utils import tos_markdown
from utils import learn_more_markdown
#app = FastAPI()
textbox = gr.Textbox(
show_label = False, placeholder = "Enter text and press ENTER", container = False
)
with gr.Blocks(title = '' ) as demo:
gr.Markdown(title_markdown)
with gr.Blocks(title='Asking questions about images🚀') as demo:
gr.Markdown(title_markdown)
state = gr.State()
state_ = gr.State()
first_run = gr.State()
images_tensor = gr.State()
with gr.Row():
with gr.Column(scale=3):
image1 = gr.Image(label="Input Document", type="filepath")
cur_dir = os.path.dirname(os.path.abspath(__file__))
print(cur_dir)
gr.Examples(
examples=[
[ "demo.jfif",
"What is unusual about this image?",
],
["demo.jfif",
"What are the things I should be cautious about when I visit here?",
],
["demo.jfif",
"If there are factual errors in the questions, point it out; if not, proceed answering the question. What’s happening in the desert?",
],
["demo.jfif",
"What is the title of this book?",
],
["demo.jfif",
"What type of food is the girl holding?",
],
["demo.jfif",
"What color is the train?",
],
["demo.jfif",
"What is the girl looking at?",
],
["demo.jfif",
"What might be the reason for the dog's aggressive behavior?",
],
],
inputs=[image1, textbox],
)
with gr.Column(scale=7):
#chatbot = gr.Chatbot(label="MoE-LLaVA", bubble_full_width=True).style(height=750)
with gr.Row():
with gr.Column(scale=8):
textbox.render()
with gr.Column(scale=1, min_width=50):
submit_btn = gr.Button(
value="Send", variant="primary", interactive=True
)
with gr.Row(elem_id="buttons") as button_row:
upvote_btn = gr.Button(value="👍 Upvote", interactive=True)
downvote_btn = gr.Button(value="👎 Downvote", interactive=True)
flag_btn = gr.Button(value="⚠️ Flag", interactive=True)
# stop_btn = gr.Button(value="⏹️ Stop Generation", interactive=False)
regenerate_btn = gr.Button(value="🔄 Regenerate", interactive=True)
clear_btn = gr.Button(value="🗑️ Clear history", interactive=True)
gr.Markdown(tos_markdown)
gr.Markdown(learn_more_markdown)
#submit_btn.click(generate, [image1, textbox, first_run, state, state_, images_tensor],
# [state, state_, chatbot, first_run, textbox, images_tensor, image1])
regenerate_btn.click(regenerate, [state, state_], [state, state_, chatbot, first_run]).then(
generate, [image1, textbox, first_run, state, state_, images_tensor],
[state, state_, chatbot, first_run, textbox, images_tensor, image1])
clear_btn.click(clear_history, [state, state_],
[image1, textbox, first_run, state, state_, chatbot, images_tensor])
app = gr.mount_gradio_app(app, demo, path="/")
demo.launch()
|