Spaces:
Runtime error
Runtime error
make the pipeline simple
Browse files- models/blip2_model.py +6 -2
- models/controlnet_model.py +2 -1
models/blip2_model.py
CHANGED
|
@@ -14,9 +14,13 @@ class ImageCaptioning:
|
|
| 14 |
self.data_type = torch.float32
|
| 15 |
else:
|
| 16 |
self.data_type = torch.float16
|
| 17 |
-
processor = Blip2Processor.from_pretrained("pretrained_models/blip2-opt-2.7b")
|
|
|
|
|
|
|
|
|
|
|
|
|
| 18 |
model = Blip2ForConditionalGeneration.from_pretrained(
|
| 19 |
-
"
|
| 20 |
)
|
| 21 |
model.to(self.device)
|
| 22 |
return processor, model
|
|
|
|
| 14 |
self.data_type = torch.float32
|
| 15 |
else:
|
| 16 |
self.data_type = torch.float16
|
| 17 |
+
# processor = Blip2Processor.from_pretrained("pretrained_models/blip2-opt-2.7b")
|
| 18 |
+
# model = Blip2ForConditionalGeneration.from_pretrained(
|
| 19 |
+
# "pretrained_models/blip2-opt-2.7b", torch_dtype=self.data_type
|
| 20 |
+
# )
|
| 21 |
+
processor = Blip2Processor.from_pretrained("Salesforce/blip2-opt-2.7b")
|
| 22 |
model = Blip2ForConditionalGeneration.from_pretrained(
|
| 23 |
+
"Salesforce/blip2-opt-2.7b", torch_dtype=self.data_type
|
| 24 |
)
|
| 25 |
model.to(self.device)
|
| 26 |
return processor, model
|
models/controlnet_model.py
CHANGED
|
@@ -25,7 +25,8 @@ class TextToImage:
|
|
| 25 |
map_location=self.device, # Add this line
|
| 26 |
).to(self.device)
|
| 27 |
pipeline = StableDiffusionControlNetPipeline.from_pretrained(
|
| 28 |
-
"pretrained_models/stable-diffusion-v1-5",
|
|
|
|
| 29 |
controlnet=controlnet,
|
| 30 |
safety_checker=None,
|
| 31 |
torch_dtype=self.data_type,
|
|
|
|
| 25 |
map_location=self.device, # Add this line
|
| 26 |
).to(self.device)
|
| 27 |
pipeline = StableDiffusionControlNetPipeline.from_pretrained(
|
| 28 |
+
# "pretrained_models/stable-diffusion-v1-5",
|
| 29 |
+
"runwayml/stable-diffusion-v1-5",
|
| 30 |
controlnet=controlnet,
|
| 31 |
safety_checker=None,
|
| 32 |
torch_dtype=self.data_type,
|