zseid commited on
Commit
50eb0da
1 Parent(s): e6b1f30

add checks for images without faces in app

Browse files
Files changed (1) hide show
  1. app.py +7 -5
app.py CHANGED
@@ -210,9 +210,11 @@ def adj_example(adj):
210
  return example_analysis(prompt)
211
  def example_analysis(prompt):
212
  pil_img = pipe(prompt).images[0]
213
- # pil_img = Image.open('./a_abrupt_person_photorealistic.png')
214
  df = process_image_pil(pil_img,prompt)
215
- rgb_tup = df["skin color"][0]
 
 
216
 
217
  def clamp(x):
218
  return max(0, min(int(x), 255))
@@ -222,8 +224,8 @@ def example_analysis(prompt):
222
  intensity_val = rgb_intensity(rgb_tup)
223
  intense_hex = str(hex(int(intensity_val)))
224
  intense_hex = f"#{intense_hex}{intense_hex}{intense_hex}"
225
- gender_w = float(df["gender.Woman"][0])
226
- gender_m = float(df["gender.Man"][0])
227
  gender_str = f"Male ({gender_m}%)" if gender_m>gender_w else f"Female({gender_w}%)"
228
  return pil_img,gender_str,rgb_hex,intense_hex,score_prompt(prompt)
229
 
@@ -249,7 +251,7 @@ def cached_results(model):
249
 
250
  if __name__=='__main__':
251
  disclaimerString = ""
252
- # example_analysis("a abrupt person")
253
  with gr.Blocks() as demo:
254
  gr.Markdown("# Facial Adjectival Color and Income Auditor")
255
  gr.Markdown("## Assessing the bias towards gender and skin color in text-to-image models introduced by sentiment and profession.")
 
210
  return example_analysis(prompt)
211
  def example_analysis(prompt):
212
  pil_img = pipe(prompt).images[0]
213
+ # pil_img = Image.open('./this-is-fine.0.jpg')
214
  df = process_image_pil(pil_img,prompt)
215
+ rgb_tup = (128,128,128)
216
+ if "skin color" in df:
217
+ rgb_tup = df["skin color"][0]
218
 
219
  def clamp(x):
220
  return max(0, min(int(x), 255))
 
224
  intensity_val = rgb_intensity(rgb_tup)
225
  intense_hex = str(hex(int(intensity_val)))
226
  intense_hex = f"#{intense_hex}{intense_hex}{intense_hex}"
227
+ gender_w = float(df["gender.Woman"][0]) if "gender.Woman" in df else -1
228
+ gender_m = float(df["gender.Man"][0]) if "gender.Man" in df else -1
229
  gender_str = f"Male ({gender_m}%)" if gender_m>gender_w else f"Female({gender_w}%)"
230
  return pil_img,gender_str,rgb_hex,intense_hex,score_prompt(prompt)
231
 
 
251
 
252
  if __name__=='__main__':
253
  disclaimerString = ""
254
+ example_analysis("a abrupt person")
255
  with gr.Blocks() as demo:
256
  gr.Markdown("# Facial Adjectival Color and Income Auditor")
257
  gr.Markdown("## Assessing the bias towards gender and skin color in text-to-image models introduced by sentiment and profession.")