ShreyMehra commited on
Commit
38aa5b2
1 Parent(s): a729471

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -9
app.py CHANGED
@@ -4,7 +4,6 @@ import requests
4
  import io
5
  from PIL import Image
6
  from transformers import AutoProcessor, Blip2ForConditionalGeneration
7
- from peft import PeftModel, PeftConfig
8
  import torch
9
 
10
 
@@ -57,16 +56,9 @@ class UI:
57
 
58
  class Model:
59
  def load_model(self):
60
- peft_model_id = "Shrey23/Image-Captioning"
61
- # config = PeftConfig.from_pretrained(peft_model_id)
62
  global model
63
  global processor
64
- PATH = "model/"
65
- model = Blip2ForConditionalGeneration.from_pretrained(PATH, local_files_only=True, low_cpu_mem_usage=True, torch_dtype=torch.float16) #, device_map="auto", load_in_8bit=True
66
- print(-1)
67
- model = PeftModel.from_pretrained(model, peft_model_id)
68
-
69
- print(-2)
70
  processor = AutoProcessor.from_pretrained("Salesforce/blip2-opt-2.7b")
71
 
72
  def main():
 
4
  import io
5
  from PIL import Image
6
  from transformers import AutoProcessor, Blip2ForConditionalGeneration
 
7
  import torch
8
 
9
 
 
56
 
57
  class Model:
58
  def load_model(self):
 
 
59
  global model
60
  global processor
61
+ model = Blip2ForConditionalGeneration.from_pretrained("Shrey23/Image-Captioning", torch_dtype=torch.float16) #, device_map="auto", load_in_8bit=True
 
 
 
 
 
62
  processor = AutoProcessor.from_pretrained("Salesforce/blip2-opt-2.7b")
63
 
64
  def main():