doevent commited on
Commit
ca4921e
1 Parent(s): cb577fb

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -16,7 +16,7 @@ transform = transforms.Compose([
16
  transforms.Normalize((0.48145466, 0.4578275, 0.40821073), (0.26862954, 0.26130258, 0.27577711))
17
  ])
18
 
19
- model_url = 'https://storage.googleapis.com/sfr-vision-language-research/BLIP/models/model_large_caption.pth'
20
 
21
  model = blip_decoder(pretrained=model_url, image_size=384, vit='large')
22
  model.eval()
@@ -32,7 +32,7 @@ transform_vq = transforms.Compose([
32
  transforms.Normalize((0.48145466, 0.4578275, 0.40821073), (0.26862954, 0.26130258, 0.27577711))
33
  ])
34
 
35
- model_url_vq = 'https://storage.googleapis.com/sfr-vision-language-research/BLIP/models/model*_vqa.pth'
36
 
37
  model_vq = blip_vqa(pretrained=model_url_vq, image_size=480, vit='base')
38
  model_vq.eval()
 
16
  transforms.Normalize((0.48145466, 0.4578275, 0.40821073), (0.26862954, 0.26130258, 0.27577711))
17
  ])
18
 
19
+ model_url = 'model_large_caption.pth'
20
 
21
  model = blip_decoder(pretrained=model_url, image_size=384, vit='large')
22
  model.eval()
 
32
  transforms.Normalize((0.48145466, 0.4578275, 0.40821073), (0.26862954, 0.26130258, 0.27577711))
33
  ])
34
 
35
+ model_url_vq = 'model_vqa.pth'
36
 
37
  model_vq = blip_vqa(pretrained=model_url_vq, image_size=480, vit='base')
38
  model_vq.eval()