hagenw commited on
Commit
0c25e83
1 Parent(s): 78d5044

Output expression as 3D plot

Browse files
Files changed (2) hide show
  1. app.py +59 -6
  2. requirements.txt +1 -0
app.py CHANGED
@@ -1,4 +1,5 @@
1
  import gradio as gr
 
2
  import numpy as np
3
  import spaces
4
  import torch
@@ -147,6 +148,10 @@ def process_func(x: np.ndarray, sampling_rate: int) -> dict:
147
  y = y.detach().cpu().numpy()
148
  results.append(y[0])
149
 
 
 
 
 
150
  return (
151
  f"{round(100 * results[0][0])} years", # age
152
  {
@@ -154,11 +159,12 @@ def process_func(x: np.ndarray, sampling_rate: int) -> dict:
154
  "male": results[0][2],
155
  "child": results[0][3],
156
  },
157
- {
158
- "arousal": results[1][0],
159
- "dominance": results[1][1],
160
- "valence": results[1][2],
161
- }
 
162
  )
163
 
164
 
@@ -181,6 +187,52 @@ def recognize(input_file):
181
  return process_func(signal, target_rate)
182
 
183
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
184
  description = (
185
  "Recognize "
186
  f"[age](https://huggingface.co/{age_gender_model_name}), "
@@ -204,7 +256,8 @@ with gr.Blocks() as demo:
204
  with gr.Column():
205
  output_age = gr.Textbox(label="Age")
206
  output_gender = gr.Label(label="Gender")
207
- output_expression = gr.Label(label="Expression")
 
208
 
209
  outputs = [output_age, output_gender, output_expression]
210
  submit_btn.click(recognize, input, outputs)
 
1
  import gradio as gr
2
+ import matplotlib.pyplot as plt
3
  import numpy as np
4
  import spaces
5
  import torch
 
148
  y = y.detach().cpu().numpy()
149
  results.append(y[0])
150
 
151
+ # Plot A/D/V values
152
+ plot_expression(results[1][0], results[1][1], results[1][2])
153
+ expression_file = "expression.png"
154
+ plt.savefig(expression_file)
155
  return (
156
  f"{round(100 * results[0][0])} years", # age
157
  {
 
159
  "male": results[0][2],
160
  "child": results[0][3],
161
  },
162
+ expression_file,
163
+ # {
164
+ # "arousal": results[1][0],
165
+ # "dominance": results[1][1],
166
+ # "valence": results[1][2],
167
+ # }
168
  )
169
 
170
 
 
187
  return process_func(signal, target_rate)
188
 
189
 
190
+ def plot_expression(arousal, dominance, valence):
191
+ r"""3D pixel plot of arousal, dominance, valence."""
192
+ # Voxels per dimension
193
+ voxels = 7
194
+ # Create voxel grid
195
+ x, y, z = np.indices((voxels + 1, voxels + 1, voxels + 1))
196
+ voxel = (
197
+ (x == round(arousal * voxels))
198
+ & (y == round(dominance * voxels))
199
+ & (z == round(valence * voxels))
200
+ )
201
+ colors = np.empty(voxel.shape, dtype=object)
202
+ colors[voxel] = "#fcb06c"
203
+ ax = plt.figure().add_subplot(projection='3d')
204
+ ax.voxels(voxel, facecolors=colors, edgecolor='k')
205
+ ax.set_aspect("equal")
206
+ ax.set_xlim([0, voxels])
207
+ ax.set_ylim([0, voxels])
208
+ ax.set_zlim([0, voxels])
209
+ ax.set_xlabel("arousal", fontsize="large", labelpad=0)
210
+ ax.set_ylabel("dominance", fontsize="large", labelpad=0)
211
+ ax.set_zlabel("valence", fontsize="large", labelpad=0)
212
+ ax.set_xticks(
213
+ list(range(voxels + 1)),
214
+ labels=["low", None, None, None, None, None, None, "high"],
215
+ rotation=45,
216
+ rotation_mode="anchor",
217
+ verticalalignment="bottom",
218
+ )
219
+ ax.set_yticks(
220
+ list(range(voxels + 1)),
221
+ labels=["low", None, None, None, None, None, None, "high"],
222
+ rotation=-25,
223
+ rotation_mode="anchor",
224
+ verticalalignment="bottom",
225
+ )
226
+ ax.set_zticks(
227
+ list(range(voxels + 1)),
228
+ labels=["low", None, None, None, None, None, None, "high"],
229
+ rotation=25,
230
+ rotation_mode="default",
231
+ verticalalignment="bottom",
232
+ )
233
+
234
+
235
+
236
  description = (
237
  "Recognize "
238
  f"[age](https://huggingface.co/{age_gender_model_name}), "
 
256
  with gr.Column():
257
  output_age = gr.Textbox(label="Age")
258
  output_gender = gr.Label(label="Gender")
259
+ # output_expression = gr.Label(label="Expression")
260
+ output_expression = gr.Image(label="Expression")
261
 
262
  outputs = [output_age, output_gender, output_expression]
263
  submit_btn.click(recognize, input, outputs)
requirements.txt CHANGED
@@ -1,4 +1,5 @@
1
  audiofile
2
  audresample
 
3
  torch
4
  transformers
 
1
  audiofile
2
  audresample
3
+ matplotlib
4
  torch
5
  transformers