bjoernp commited on
Commit
57a4e5e
1 Parent(s): f16db55

Fixed typo in FP16 and 8bit examples

Browse files
Files changed (1) hide show
  1. README.md +2 -2
README.md CHANGED
@@ -108,7 +108,7 @@ import requests
108
  from PIL import Image
109
  from transformers import Blip2Processor, Blip2ForConditionalGeneration
110
 
111
- processor = Bli2pProcessor.from_pretrained("Salesforce/blip2-flan-t5-xxl")
112
  model = Blip2ForConditionalGeneration.from_pretrained("Salesforce/blip2-flan-t5-xxl", torch_dtype=torch.float16, device_map="auto")
113
 
114
  img_url = 'https://storage.googleapis.com/sfr-vision-language-research/BLIP/demo.jpg'
@@ -134,7 +134,7 @@ import requests
134
  from PIL import Image
135
  from transformers import Blip2Processor, Blip2ForConditionalGeneration
136
 
137
- processor = Bli2pProcessor.from_pretrained("Salesforce/blip2-flan-t5-xxl")
138
  model = Blip2ForConditionalGeneration.from_pretrained("Salesforce/blip2-flan-t5-xxl", load_in_8bit=True, device_map="auto")
139
 
140
  img_url = 'https://storage.googleapis.com/sfr-vision-language-research/BLIP/demo.jpg'
 
108
  from PIL import Image
109
  from transformers import Blip2Processor, Blip2ForConditionalGeneration
110
 
111
+ processor = Blip2Processor.from_pretrained("Salesforce/blip2-flan-t5-xxl")
112
  model = Blip2ForConditionalGeneration.from_pretrained("Salesforce/blip2-flan-t5-xxl", torch_dtype=torch.float16, device_map="auto")
113
 
114
  img_url = 'https://storage.googleapis.com/sfr-vision-language-research/BLIP/demo.jpg'
 
134
  from PIL import Image
135
  from transformers import Blip2Processor, Blip2ForConditionalGeneration
136
 
137
+ processor = Blip2Processor.from_pretrained("Salesforce/blip2-flan-t5-xxl")
138
  model = Blip2ForConditionalGeneration.from_pretrained("Salesforce/blip2-flan-t5-xxl", load_in_8bit=True, device_map="auto")
139
 
140
  img_url = 'https://storage.googleapis.com/sfr-vision-language-research/BLIP/demo.jpg'