Spaces:
Running
on
Zero
Running
on
Zero
Revert "debug"
Browse filesThis reverts commit bb8117647eb40884781b8f88fc1c2f083250767e.
app.py
CHANGED
@@ -94,7 +94,6 @@ def process(
|
|
94 |
image = image.resize((width, height), Image.LANCZOS)
|
95 |
|
96 |
if segmentation_prompt_text:
|
97 |
-
print('FLORENCE INFERENCE STARTED')
|
98 |
_, result = run_florence_inference(
|
99 |
model=FLORENCE_MODEL,
|
100 |
processor=FLORENCE_PROCESSOR,
|
@@ -108,10 +107,7 @@ def process(
|
|
108 |
result=result,
|
109 |
resolution_wh=image.size
|
110 |
)
|
111 |
-
print('FLORENCE INFERENCE DONE')
|
112 |
-
print('SAM INFERENCE STARTED')
|
113 |
detections = run_sam_inference(SAM_IMAGE_MODEL, image, detections)
|
114 |
-
print('SAM INFERENCE DONE')
|
115 |
|
116 |
if len(detections) == 0:
|
117 |
gr.Info(f"{segmentation_prompt_text} prompt did not return any detections.")
|
@@ -122,23 +118,21 @@ def process(
|
|
122 |
mask = mask.resize((width, height), Image.LANCZOS)
|
123 |
mask = mask.filter(ImageFilter.GaussianBlur(radius=10))
|
124 |
|
125 |
-
|
126 |
-
|
127 |
-
|
128 |
-
|
129 |
-
|
130 |
-
|
131 |
-
|
132 |
-
|
133 |
-
|
134 |
-
|
135 |
-
|
136 |
-
|
137 |
-
|
138 |
-
|
139 |
-
|
140 |
-
# print('INFERENCE DONE')
|
141 |
-
# return result, mask
|
142 |
|
143 |
|
144 |
with gr.Blocks() as demo:
|
|
|
94 |
image = image.resize((width, height), Image.LANCZOS)
|
95 |
|
96 |
if segmentation_prompt_text:
|
|
|
97 |
_, result = run_florence_inference(
|
98 |
model=FLORENCE_MODEL,
|
99 |
processor=FLORENCE_PROCESSOR,
|
|
|
107 |
result=result,
|
108 |
resolution_wh=image.size
|
109 |
)
|
|
|
|
|
110 |
detections = run_sam_inference(SAM_IMAGE_MODEL, image, detections)
|
|
|
111 |
|
112 |
if len(detections) == 0:
|
113 |
gr.Info(f"{segmentation_prompt_text} prompt did not return any detections.")
|
|
|
118 |
mask = mask.resize((width, height), Image.LANCZOS)
|
119 |
mask = mask.filter(ImageFilter.GaussianBlur(radius=10))
|
120 |
|
121 |
+
if randomize_seed_checkbox:
|
122 |
+
seed_slicer = random.randint(0, MAX_SEED)
|
123 |
+
generator = torch.Generator().manual_seed(seed_slicer)
|
124 |
+
result = FLUX_INPAINTING_PIPELINE(
|
125 |
+
prompt=inpainting_prompt_text,
|
126 |
+
image=image,
|
127 |
+
mask_image=mask,
|
128 |
+
width=width,
|
129 |
+
height=height,
|
130 |
+
strength=strength_slider,
|
131 |
+
generator=generator,
|
132 |
+
num_inference_steps=num_inference_steps_slider
|
133 |
+
).images[0]
|
134 |
+
print('INFERENCE DONE')
|
135 |
+
return result, mask
|
|
|
|
|
136 |
|
137 |
|
138 |
with gr.Blocks() as demo:
|