zseid commited on
Commit
a0e2c1f
1 Parent(s): c24da7b

example analysis function

Browse files
Files changed (2) hide show
  1. README.md +1 -1
  2. app.py +32 -4
README.md CHANGED
@@ -8,7 +8,7 @@ python_version: 3.9
8
  sdk_version: 3.19.1
9
  app_file: app.py
10
  pinned: false
11
- license: MIT
12
  ---
13
 
14
  Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
8
  sdk_version: 3.19.1
9
  app_file: app.py
10
  pinned: false
11
+ license: mit
12
  ---
13
 
14
  Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
app.py CHANGED
@@ -15,7 +15,8 @@ import pandas as pd
15
  import io
16
  from saac.prompt_generation.prompts import generate_prompts,generate_occupations,generate_traits
17
  from saac.prompt_generation.prompt_utils import score_prompt
18
- from saac.evaluation.eval_utils import generate_countplot, lumia_violinplot, process_analysis, generate_histplot
 
19
  from datasets import load_dataset
20
  from diffusers import DiffusionPipeline, PNDMScheduler
21
 
@@ -177,7 +178,7 @@ def trait_graph(trait,hist=True):
177
  y_label = 'Skincolor Intensity',
178
  title = 'Skin Color Intensity, Binned by TDA Sentiment',)
179
  return fig2img(fig),fig2img(fig2)
180
- def occ_graph(occ):
181
  tda_res, occ_result = process_analysis()
182
  fig = generate_histplot(occ_result, 'a_median', 'gender_detected_val',
183
  title='Gender Distribution by Median Annual Salary',
@@ -193,12 +194,39 @@ def occ_graph(occ):
193
  title='Skin Color Intensity, Binned by Median Salary')
194
  return fig2img(fig),fig2img(fig2)
195
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
196
  if __name__=='__main__':
197
  disclaimerString = ""
 
198
 
199
- jobInterface = gr.Interface(fn=occ_graph,
200
  inputs=[gr.Dropdown(JOBS, label="occupation")],
201
- outputs=['image','image'],
202
  description="Referencing a specific profession comes loaded with associations of gender and ethnicity."
203
  " Text to image models provide an opportunity to explicitly specify an underrepresented group, but first we must understand our default behavior.",
204
  title="How occupation affects txt2img gender and skin color representation",
 
15
  import io
16
  from saac.prompt_generation.prompts import generate_prompts,generate_occupations,generate_traits
17
  from saac.prompt_generation.prompt_utils import score_prompt
18
+ from saac.image_analysis.process import process_image_pil
19
+ from saac.evaluation.eval_utils import generate_countplot, lumia_violinplot, process_analysis, generate_histplot,rgb_intensity
20
  from datasets import load_dataset
21
  from diffusers import DiffusionPipeline, PNDMScheduler
22
 
 
178
  y_label = 'Skincolor Intensity',
179
  title = 'Skin Color Intensity, Binned by TDA Sentiment',)
180
  return fig2img(fig),fig2img(fig2)
181
+ def occ_graph(model):
182
  tda_res, occ_result = process_analysis()
183
  fig = generate_histplot(occ_result, 'a_median', 'gender_detected_val',
184
  title='Gender Distribution by Median Annual Salary',
 
194
  title='Skin Color Intensity, Binned by Median Salary')
195
  return fig2img(fig),fig2img(fig2)
196
 
197
+ def occ_example(occ):
198
+ prompt = f"a {occ} photorealistic"
199
+ return example_analysis(prompt)
200
+ def adj_example(adj):
201
+ prompt = f"a {adj} person photorealistic"
202
+ return example_analysis(prompt)
203
+ def example_analysis(prompt):
204
+ pil_img = pipe(prompt).images[0]
205
+ # pil_img = Image.open('./a_abrupt_person_photorealistic.png')
206
+ df = process_image_pil(pil_img,prompt)
207
+
208
+ rgb_tup = df["skin color"][0]
209
+
210
+ def clamp(x):
211
+ return max(0, min(int(x), 255))
212
+ def hex_from_tup(in_tup):
213
+ return "#{0:02x}{1:02x}{2:02x}".format(clamp(in_tup[0]), clamp(in_tup[1]), clamp(in_tup[2]))
214
+ rgb_hex = hex_from_tup(rgb_tup)
215
+ intensity_val = rgb_intensity(rgb_tup)
216
+ intense_hex = str(hex(int(intensity_val)))
217
+ intense_hex = f"#{intense_hex}{intense_hex}{intense_hex}"
218
+ gender_w = float(df["gender.Woman"][0])
219
+ gender_m = float(df["gender.Man"][0])
220
+ gender_str = f"Male ({gender_m})" if gender_m>gender_w else f"Female({gender_w}"
221
+ return pil_img,gender_str,rgb_hex,intense_hex
222
+
223
  if __name__=='__main__':
224
  disclaimerString = ""
225
+ # example_analysis("a abrupt person")
226
 
227
+ jobInterface = gr.Interface(fn=occ_example,
228
  inputs=[gr.Dropdown(JOBS, label="occupation")],
229
+ outputs=['image','text','colorpicker','colorpicker'],
230
  description="Referencing a specific profession comes loaded with associations of gender and ethnicity."
231
  " Text to image models provide an opportunity to explicitly specify an underrepresented group, but first we must understand our default behavior.",
232
  title="How occupation affects txt2img gender and skin color representation",