nielsr HF staff commited on
Commit
e927f5e
1 Parent(s): f7969c0

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -2
app.py CHANGED
@@ -3,7 +3,6 @@ from transformers import ViltProcessor, ViltForQuestionAnswering
3
  import torch
4
 
5
  torch.hub.download_url_to_file('http://images.cocodataset.org/val2017/000000039769.jpg', 'cats.jpg')
6
- torch.hub.download_url_to_file('https://computing.ece.vt.edu/~harsh/visualAttention/ProjectWebpage/Figures/vqa_1.png', 'banana.png')
7
 
8
  processor = ViltProcessor.from_pretrained("dandelin/vilt-b32-finetuned-vqa")
9
  model = ViltForQuestionAnswering.from_pretrained("dandelin/vilt-b32-finetuned-vqa")
@@ -24,7 +23,7 @@ def answer_question(image, text):
24
  image = gr.inputs.Image(type="pil")
25
  question = gr.inputs.Textbox(label="Question")
26
  answer = gr.outputs.Textbox(label="Predicted answer")
27
- examples = [["cats.jpg", "How many cats are there?"], ["banana.png", "What is the mustache made of?"]]
28
 
29
  title = "Interactive demo: ViLT"
30
  description = "Gradio Demo for ViLT (Vision and Language Transformer), fine-tuned on VQAv2. To use it, simply upload your image and type a question and click 'submit', or click one of the examples to load them. Read more at the links below."
 
3
  import torch
4
 
5
  torch.hub.download_url_to_file('http://images.cocodataset.org/val2017/000000039769.jpg', 'cats.jpg')
 
6
 
7
  processor = ViltProcessor.from_pretrained("dandelin/vilt-b32-finetuned-vqa")
8
  model = ViltForQuestionAnswering.from_pretrained("dandelin/vilt-b32-finetuned-vqa")
 
23
  image = gr.inputs.Image(type="pil")
24
  question = gr.inputs.Textbox(label="Question")
25
  answer = gr.outputs.Textbox(label="Predicted answer")
26
+ examples = [["cats.jpg", "How many cats are there?"]]
27
 
28
  title = "Interactive demo: ViLT"
29
  description = "Gradio Demo for ViLT (Vision and Language Transformer), fine-tuned on VQAv2. To use it, simply upload your image and type a question and click 'submit', or click one of the examples to load them. Read more at the links below."