Spaces:
Runtime error
Runtime error
File size: 751 Bytes
66f8831 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 |
from transformers import BlipForQuestionAnswering
model = BlipForQuestionAnswering.from_pretrained(
"Salesforce/blip-vqa-base")
from transformers import AutoProcessor
from PIL import Image
import gradio as gr
def answer_question(image, question):
inputs = processor(image, question, return_tensors="pt")
out = model.generate(**inputs)
answer = processor.decode(out[0], skip_special_tokens=True)
return answer
# Create Gradio interface
image_input = gr.Image(label="Upload Image")
question_input = gr.Textbox(label="Ask a Question")
output = gr.Textbox(label="Answer")
interface = gr.Interface(fn=answer_question, inputs=[image_input, question_input], outputs=output, title="Multimodal Question Answering")
interface.launch()
|