zseid commited on
Commit
b5edb8f
1 Parent(s): dcfb2a2

cache the audit result components

Browse files
Files changed (1) hide show
  1. app.py +12 -4
app.py CHANGED
@@ -225,7 +225,7 @@ def example_analysis(prompt):
225
  gender_w = float(df["gender.Woman"][0])
226
  gender_m = float(df["gender.Man"][0])
227
  gender_str = f"Male ({gender_m}%)" if gender_m>gender_w else f"Female({gender_w}%)"
228
- return pil_img,gender_str,rgb_hex,intense_hex
229
 
230
  def bias_assessment(model):
231
  ss,sg,ssgraph,sggraph = trait_graph(model)
@@ -239,6 +239,13 @@ def bias_assessment(model):
239
  [(f"Skin color {'unbiased' if os else 'biased'} by Income/Occupation",boo_to_str(os))], \
240
  [(f"Gender {'unbiased' if og else 'biased'} by Income/Occupation",boo_to_str(og))],\
241
  osgraph,oggraph
 
 
 
 
 
 
 
242
 
243
  if __name__=='__main__':
244
  disclaimerString = ""
@@ -273,7 +280,7 @@ if __name__=='__main__':
273
  color_map={"PASS": "green", "FAIL": "red"})
274
  with gr.Accordion("See Graph",open=False):
275
  occ_gen = gr.Image()
276
- btn.click(fn=bias_assessment,inputs=model,outputs=[sample,ss_pass,sg_pass,sent_skin,sent_gen,os_pass,og_pass,occ_skin,occ_gen])
277
  with gr.Tab("Image Analysis"):
278
  gr.Markdown("# Generate an example image and view the automated analysis")
279
  with gr.Row():
@@ -296,14 +303,15 @@ if __name__=='__main__':
296
  btn = gr.Button("Generate and Analyze")
297
  with gr.Column():
298
 
299
- gender = gr.Text(label="Detected Gender")
300
  with gr.Row(variant="compact"):
301
  skin = gr.ColorPicker(label="Facial skin color")
302
  inten = gr.ColorPicker(label="Grayscale intensity")
303
  img = gr.Image(label="Stable Diffusion v1.5")
 
304
  sent.change(fn=lambda k: f"a {k} person photorealistic", inputs=sent, outputs=inp)
305
  occs.change(fn=lambda k: f"a {k} photorealistic", inputs=occs, outputs=inp,)
306
- btn.click(fn=example_analysis,inputs=inp,outputs=[img,gender,skin,inten])
307
  # inp.submit(fn=example_analysis, outputs=[img,gender,skin,inten])
308
 
309
 
 
225
  gender_w = float(df["gender.Woman"][0])
226
  gender_m = float(df["gender.Man"][0])
227
  gender_str = f"Male ({gender_m}%)" if gender_m>gender_w else f"Female({gender_w}%)"
228
+ return pil_img,gender_str,rgb_hex,intense_hex,score_prompt(prompt)
229
 
230
  def bias_assessment(model):
231
  ss,sg,ssgraph,sggraph = trait_graph(model)
 
239
  [(f"Skin color {'unbiased' if os else 'biased'} by Income/Occupation",boo_to_str(os))], \
240
  [(f"Gender {'unbiased' if og else 'biased'} by Income/Occupation",boo_to_str(og))],\
241
  osgraph,oggraph
242
+ mj_analysis = bias_assessment("Midjourney")
243
+ sd_analysis = bias_assessment("runwayml/stable-diffusion-v1-5")
244
+ def cached_results(model):
245
+ if model=="Midjourney":
246
+ return mj_analysis
247
+ else:
248
+ return sd_analysis
249
 
250
  if __name__=='__main__':
251
  disclaimerString = ""
 
280
  color_map={"PASS": "green", "FAIL": "red"})
281
  with gr.Accordion("See Graph",open=False):
282
  occ_gen = gr.Image()
283
+ btn.click(fn=cached_results,inputs=model,outputs=[sample,ss_pass,sg_pass,sent_skin,sent_gen,os_pass,og_pass,occ_skin,occ_gen])
284
  with gr.Tab("Image Analysis"):
285
  gr.Markdown("# Generate an example image and view the automated analysis")
286
  with gr.Row():
 
303
  btn = gr.Button("Generate and Analyze")
304
  with gr.Column():
305
 
306
+ gender = gr.Text(label="Detected Gender",interactive=False)
307
  with gr.Row(variant="compact"):
308
  skin = gr.ColorPicker(label="Facial skin color")
309
  inten = gr.ColorPicker(label="Grayscale intensity")
310
  img = gr.Image(label="Stable Diffusion v1.5")
311
+ sentscore = gr.Text(label="VADER sentiment score",interactive=False)
312
  sent.change(fn=lambda k: f"a {k} person photorealistic", inputs=sent, outputs=inp)
313
  occs.change(fn=lambda k: f"a {k} photorealistic", inputs=occs, outputs=inp,)
314
+ btn.click(fn=example_analysis,inputs=inp,outputs=[img,gender,skin,inten,sentscore])
315
  # inp.submit(fn=example_analysis, outputs=[img,gender,skin,inten])
316
 
317