Spaces:
Sleeping
Sleeping
dafajudin
commited on
Commit
·
ecbb493
1
Parent(s):
9414cf8
edit
Browse files
app.py
CHANGED
@@ -1,99 +1,3 @@
|
|
1 |
-
# import gradio as gr
|
2 |
-
# from transformers import pipeline
|
3 |
-
|
4 |
-
# # Load the Visual QA model
|
5 |
-
# generator = pipeline("visual-question-answering", model="jihadzakki/blip1-medvqa")
|
6 |
-
|
7 |
-
# def format_answer(image, question, history):
|
8 |
-
# try:
|
9 |
-
# result = generator(image, question, max_new_tokens=50)
|
10 |
-
# predicted_answer = result[0].get('answer', 'No answer found')
|
11 |
-
# history.append((image, f"Question: {question} | Answer: {predicted_answer}"))
|
12 |
-
|
13 |
-
# return f"Predicted Answer: {predicted_answer}", history
|
14 |
-
# except Exception as e:
|
15 |
-
# return f"Error: {str(e)}", history
|
16 |
-
|
17 |
-
# def switch_theme(mode):
|
18 |
-
# if mode == "Light Mode":
|
19 |
-
# return gr.themes.Default()
|
20 |
-
# else:
|
21 |
-
# return gr.themes.Soft(primary_hue=gr.themes.colors.blue, secondary_hue=gr.themes.colors.orange)
|
22 |
-
|
23 |
-
# def save_feedback(feedback):
|
24 |
-
# return "Thank you for your feedback!"
|
25 |
-
|
26 |
-
# def display_history(history):
|
27 |
-
# log_entries = []
|
28 |
-
# for img, text in history:
|
29 |
-
# log_entries.append((img, text))
|
30 |
-
# return log_entries
|
31 |
-
|
32 |
-
# # Build the Visual QA application using Gradio with improvements
|
33 |
-
# with gr.Blocks(
|
34 |
-
# theme=gr.themes.Soft(
|
35 |
-
# font=[gr.themes.GoogleFont("Inconsolata"), "Arial", "sans-serif"],
|
36 |
-
# primary_hue=gr.themes.colors.blue,
|
37 |
-
# secondary_hue=gr.themes.colors.red,
|
38 |
-
# )
|
39 |
-
# ) as VisualQAApp:
|
40 |
-
# gr.Markdown("# Visual Question Answering using BLIP Model", elem_classes="title")
|
41 |
-
|
42 |
-
# with gr.Row():
|
43 |
-
# with gr.Column():
|
44 |
-
# image_input = gr.Image(label="Upload image", type="pil")
|
45 |
-
# question_input = gr.Textbox(show_label=False, placeholder="Enter your question here...")
|
46 |
-
# submit_button = gr.Button("Submit", variant="primary")
|
47 |
-
|
48 |
-
# with gr.Column():
|
49 |
-
# answer_output = gr.Textbox(label="Result Prediction")
|
50 |
-
|
51 |
-
# history_state = gr.State([]) # Initialize the history state
|
52 |
-
|
53 |
-
# submit_button.click(
|
54 |
-
# format_answer,
|
55 |
-
# inputs=[image_input, question_input, history_state],
|
56 |
-
# outputs=[answer_output, history_state],
|
57 |
-
# show_progress=True
|
58 |
-
# )
|
59 |
-
|
60 |
-
# with gr.Row():
|
61 |
-
# history_gallery = gr.Gallery(label="History Log", elem_id="history_log")
|
62 |
-
# submit_button.click(
|
63 |
-
# display_history,
|
64 |
-
# inputs=[history_state],
|
65 |
-
# outputs=[history_gallery]
|
66 |
-
# )
|
67 |
-
|
68 |
-
# with gr.Accordion("Help", open=False):
|
69 |
-
# gr.Markdown("**Upload image**: Select the chest X-ray image you want to analyze.")
|
70 |
-
# gr.Markdown("**Enter your question**: Type the question you have about the image, such as 'Is there any sign of pneumonia?'")
|
71 |
-
# gr.Markdown("**Submit**: Click the submit button to get the prediction from the model.")
|
72 |
-
|
73 |
-
# with gr.Accordion("User Preferences", open=False):
|
74 |
-
# gr.Markdown("**Mode**: Choose between light and dark mode for your comfort.")
|
75 |
-
# mode_selector = gr.Radio(choices=["Light Mode", "Dark Mode"], label="Select Mode")
|
76 |
-
# apply_theme_button = gr.Button("Apply Theme")
|
77 |
-
|
78 |
-
# apply_theme_button.click(
|
79 |
-
# switch_theme,
|
80 |
-
# inputs=[mode_selector],
|
81 |
-
# outputs=[],
|
82 |
-
# )
|
83 |
-
|
84 |
-
# with gr.Accordion("Feedback", open=False):
|
85 |
-
# gr.Markdown("**We value your feedback!** Please provide any feedback you have about this application.")
|
86 |
-
# feedback_input = gr.Textbox(label="Feedback", lines=4)
|
87 |
-
# submit_feedback_button = gr.Button("Submit Feedback")
|
88 |
-
|
89 |
-
# submit_feedback_button.click(
|
90 |
-
# save_feedback,
|
91 |
-
# inputs=[feedback_input],
|
92 |
-
# outputs=[feedback_input]
|
93 |
-
# )
|
94 |
-
|
95 |
-
# VisualQAApp.launch(share=True)
|
96 |
-
|
97 |
import gradio as gr
|
98 |
from transformers import pipeline
|
99 |
|
@@ -188,4 +92,4 @@ with gr.Blocks(
|
|
188 |
outputs=[feedback_input]
|
189 |
)
|
190 |
|
191 |
-
VisualQAApp.launch()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
import gradio as gr
|
2 |
from transformers import pipeline
|
3 |
|
|
|
92 |
outputs=[feedback_input]
|
93 |
)
|
94 |
|
95 |
+
VisualQAApp.launch(share=True)
|