unography commited on
Commit
dd8705d
1 Parent(s): b5ea83b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -18,7 +18,7 @@ model_id = "unography/blip-long-cap-9ep"
18
  processor = AutoProcessor.from_pretrained(model_id)
19
  model = BlipForConditionalGeneration.from_pretrained(model_id).to(device)
20
 
21
- torch.hub.download_url_to_file("http://images.cocodataset.org/val2017/000000039769.jpg", "cats.jpg")
22
  torch.hub.download_url_to_file(
23
  "https://huggingface.co/datasets/nielsr/textcaps-sample/resolve/main/stop_sign.png", "stop_sign.png"
24
  )
@@ -43,7 +43,7 @@ with gr.Blocks(css="style.css") as demo:
43
  output = gr.Textbox(label="Result")
44
  gr.Examples(
45
  examples=[
46
- "cats.jpg",
47
  "stop_sign.png",
48
  "astronaut.jpg",
49
  ],
 
18
  processor = AutoProcessor.from_pretrained(model_id)
19
  model = BlipForConditionalGeneration.from_pretrained(model_id).to(device)
20
 
21
+ torch.hub.download_url_to_file("https://storage.googleapis.com/sfr-vision-language-research/BLIP/demo.jpg", "demo.jpg")
22
  torch.hub.download_url_to_file(
23
  "https://huggingface.co/datasets/nielsr/textcaps-sample/resolve/main/stop_sign.png", "stop_sign.png"
24
  )
 
43
  output = gr.Textbox(label="Result")
44
  gr.Examples(
45
  examples=[
46
+ "demo.jpg",
47
  "stop_sign.png",
48
  "astronaut.jpg",
49
  ],