Spaces:
Running
Running
Commit
·
45faff1
1
Parent(s):
774d798
Update app.py
Browse files
app.py
CHANGED
@@ -12,7 +12,7 @@ from torch import autocast
|
|
12 |
import cv2
|
13 |
from matplotlib import pyplot as plt
|
14 |
from torchvision import transforms
|
15 |
-
|
16 |
|
17 |
import io
|
18 |
import logging
|
@@ -95,7 +95,7 @@ def preprocess_mask(mask):
|
|
95 |
mask = torch.from_numpy(mask)
|
96 |
return mask
|
97 |
|
98 |
-
def
|
99 |
global model
|
100 |
'''
|
101 |
input = request.files
|
@@ -219,7 +219,8 @@ def predict(dict, prompt=""):
|
|
219 |
print(f'liuyz_2_', init_image.convert("RGB"))
|
220 |
print(f'liuyz_3_', init_image.convert("RGB").resize((512, 512)))
|
221 |
mask = dict["mask"] # .convert("RGB") #.resize((512, 512))
|
222 |
-
output =
|
|
|
223 |
# output = pipe(prompt = prompt, image=init_image, mask_image=mask,guidance_scale=7.5)
|
224 |
|
225 |
return output.images[0], gr.update(visible=True), gr.update(visible=True), gr.update(visible=True)
|
|
|
12 |
import cv2
|
13 |
from matplotlib import pyplot as plt
|
14 |
from torchvision import transforms
|
15 |
+
from diffusers import DiffusionPipeline
|
16 |
|
17 |
import io
|
18 |
import logging
|
|
|
95 |
mask = torch.from_numpy(mask)
|
96 |
return mask
|
97 |
|
98 |
+
def model_process(init_image, mask):
|
99 |
global model
|
100 |
'''
|
101 |
input = request.files
|
|
|
219 |
print(f'liuyz_2_', init_image.convert("RGB"))
|
220 |
print(f'liuyz_3_', init_image.convert("RGB").resize((512, 512)))
|
221 |
mask = dict["mask"] # .convert("RGB") #.resize((512, 512))
|
222 |
+
# output = model_process(init_image, mask)
|
223 |
+
output = None # output.images[0]
|
224 |
# output = pipe(prompt = prompt, image=init_image, mask_image=mask,guidance_scale=7.5)
|
225 |
|
226 |
return output.images[0], gr.update(visible=True), gr.update(visible=True), gr.update(visible=True)
|