taesiri commited on
Commit
984b4fb
1 Parent(s): e66c25f

going back to opt

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -9,7 +9,7 @@ from PIL import Image
9
  device = torch.device("cuda") if torch.cuda.is_available() else "cpu"
10
 
11
  model, vis_processors, _ = load_model_and_preprocess(
12
- name="blip2_t5", model_type="pretrain_flant5xl", is_eval=True, device=device
13
  )
14
 
15
 
@@ -62,7 +62,7 @@ with gr.Blocks() as demo:
62
  "### BLIP-2: Bootstrapping Language-Image Pre-training with Frozen Image Encoders and Large Language Models"
63
  )
64
  gr.Markdown(
65
- "This demo uses the `pretrain_flant5xl` weights. For more information please visit [Github](https://github.com/salesforce/LAVIS/tree/main/projects/blip2) or [Paper](https://arxiv.org/abs/2301.12597)."
66
  )
67
 
68
  with gr.Row():
 
9
  device = torch.device("cuda") if torch.cuda.is_available() else "cpu"
10
 
11
  model, vis_processors, _ = load_model_and_preprocess(
12
+ name="blip2_opt", model_type="pretrain_opt2.7b", is_eval=True, device=device
13
  )
14
 
15
 
 
62
  "### BLIP-2: Bootstrapping Language-Image Pre-training with Frozen Image Encoders and Large Language Models"
63
  )
64
  gr.Markdown(
65
+ "This demo uses the `pretrain_opt2.7b` weights. For more information please visit [Github](https://github.com/salesforce/LAVIS/tree/main/projects/blip2) or [Paper](https://arxiv.org/abs/2301.12597)."
66
  )
67
 
68
  with gr.Row():