sooh-j commited on
Commit
254bcc9
1 Parent(s): abc079d

Create handler.py

Browse files
Files changed (1) hide show
  1. handler.py +30 -0
handler.py ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import requests
2
+ from PIL import Image
3
+ from transformers import Blip2Processor, Blip2ForConditionalGeneration
4
+ from typing import Dict, List, Any
5
+ import torch
6
+
7
+ class EndpointHandler():
8
+ def __init__(self, path=""):
9
+ self.base_model_name = "Salesforce/blip2-opt-2.7b"
10
+ self.model_name = "sooh-j/blip2-vizwizqa"
11
+ self.base_model = Blip2ForConditionalGeneration.from_pretrained(self.base_model_name,
12
+ load_in_8bit=True)
13
+ self.processor = Blip2Processor.from_pretrained(self.base_model_name)
14
+ self.model = PeftModel.from_pretrained(self.model_name, self.base_model_name)
15
+
16
+ self.device = "cuda" if torch.cuda.is_available() else "cpu"
17
+ self.model.to(self.device)
18
+
19
+ def __call__(self, data: Dict[str, Any]) -> List[Dict[str, Any]]:
20
+ data = data.pop("inputs", data)
21
+
22
+ image = data.image
23
+ question = data.question
24
+
25
+ prompt = f"Question: {question}, Answer:"
26
+ processed = self.processor(images=image, prompt, return_tensors="pt").to(self.device)
27
+
28
+ out = self.model.generate(**processed)
29
+
30
+ return self.processor.decode(out[0], skip_special_tokens=True)