Spaces:
Running
on
Zero
Running
on
Zero
ohayonguy
commited on
Commit
•
d18dfca
1
Parent(s):
3686e7e
removed annoying info
Browse files
app.py
CHANGED
@@ -101,6 +101,8 @@ def enhance_face(img, face_helper, has_aligned, num_flow_steps, scale=2):
|
|
101 |
face_helper.align_warp_face()
|
102 |
if len(face_helper.cropped_faces) == 0:
|
103 |
raise gr.Error("Could not identify any face in the image.")
|
|
|
|
|
104 |
|
105 |
# face restoration
|
106 |
for i, cropped_face in tqdm(enumerate(face_helper.cropped_faces)):
|
@@ -170,7 +172,7 @@ def inference(seed, randomize_seed, img, aligned, scale, num_flow_steps,
|
|
170 |
for i, restored_face in enumerate(restored_faces):
|
171 |
restored_faces[i] = cv2.cvtColor(restored_face, cv2.COLOR_BGR2RGB)
|
172 |
torch.cuda.empty_cache()
|
173 |
-
return output, restored_faces
|
174 |
|
175 |
|
176 |
intro = """
|
|
|
101 |
face_helper.align_warp_face()
|
102 |
if len(face_helper.cropped_faces) == 0:
|
103 |
raise gr.Error("Could not identify any face in the image.")
|
104 |
+
if has_aligned and len(face_helper.cropped_faces) > 1:
|
105 |
+
raise gr.Error("You marked that the input image is aligned, but multiple faces were detected.")
|
106 |
|
107 |
# face restoration
|
108 |
for i, cropped_face in tqdm(enumerate(face_helper.cropped_faces)):
|
|
|
172 |
for i, restored_face in enumerate(restored_faces):
|
173 |
restored_faces[i] = cv2.cvtColor(restored_face, cv2.COLOR_BGR2RGB)
|
174 |
torch.cuda.empty_cache()
|
175 |
+
return output, restored_faces if len(restored_faces) > 1 else None
|
176 |
|
177 |
|
178 |
intro = """
|