MohamedMotaz commited on
Commit
80cacbc
1 Parent(s): 2d25f8d

final deployment

Browse files
Files changed (1) hide show
  1. app.py +3 -1
app.py CHANGED
@@ -3,6 +3,8 @@ from face_emotion_pipeline import process_video, process_image
3
 
4
  def process_file(file, is_video , skip = 1 , add_audio = True):
5
  # print("==========>", is_video)
 
 
6
  input_path = file.name
7
  output_path = "output." + ("mp4" if is_video else "png")
8
  if is_video == True:
@@ -13,7 +15,7 @@ def process_file(file, is_video , skip = 1 , add_audio = True):
13
 
14
  iface = gr.Interface(
15
  fn=process_file,
16
- inputs=[gr.File(label="Upload File"), gr.Checkbox(label="Is Video?") , gr.Slider(1, 20, 1, value=10, label="Frame Skip"), gr.Checkbox(label="Add Audio?")],
17
  outputs=gr.File(label="Processed File"),
18
  title="Face Emotion Detection",
19
  description="""Upload an image or video to detect and annotate emotions. <br>
 
3
 
4
  def process_file(file, is_video , skip = 1 , add_audio = True):
5
  # print("==========>", is_video)
6
+ print("==========>", skip)
7
+ print("==========>", add_audio)
8
  input_path = file.name
9
  output_path = "output." + ("mp4" if is_video else "png")
10
  if is_video == True:
 
15
 
16
  iface = gr.Interface(
17
  fn=process_file,
18
+ inputs=[gr.File(label="Upload File"), gr.Checkbox(label="Is Video?") , gr.Slider(1, 20, 1.0, value=5, label="Frame Skip"), gr.Checkbox(label="Add Audio?")],
19
  outputs=gr.File(label="Processed File"),
20
  title="Face Emotion Detection",
21
  description="""Upload an image or video to detect and annotate emotions. <br>