File size: 595 Bytes
952b001
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
from transformers import pipeline
Visual_QA = pipeline(model="jihadzakki/blip1-medvqa")

#Build Visual QA App
import gradio as gr
VisualQAApp = gr.Interface(fn=Visual_QA,
                    inputs=[
                        gr.Image(label="Upload image", type="pil"),
                        gr.Textbox(label="Question"),
                           ],
                    outputs=[gr.Textbox(label="Answer")],
                    title="Visual Question Answering using BLIP Model",
                    description="VQA",
                    allow_flagging="never")
VisualQAApp.launch(share=True)