Alexander Fengler commited on
Commit
f3a075d
1 Parent(s): cc37e2b

comments some langauge and allow skipping frames

Browse files
Files changed (1) hide show
  1. app.py +38 -17
app.py CHANGED
@@ -31,34 +31,58 @@ from inference import inference_frame
31
  import os
32
  import pathlib
33
 
 
34
 
35
- def analize_video(x):
36
  print(x)
 
 
37
  path = '/tmp/test/'
38
  os.makedirs(path, exist_ok=True)
39
- videos = len(os.listdir(path))
40
- path = f'{path}{videos}'
 
 
41
  os.makedirs(path, exist_ok=True)
 
 
42
  outname = f'{path}_processed.mp4'
 
43
  if os.path.exists(outname):
44
  print('video already processed')
45
  return outname
 
46
  cap = cv2.VideoCapture(x)
47
  counter = 0
 
48
  while(cap.isOpened()):
 
49
  ret, frame = cap.read()
 
 
50
  if ret==True:
51
- name = os.path.join(path,f'{counter:05d}.png')
52
- frame = inference_frame(frame)
53
- # write the flipped frame
54
- cv2.imwrite(name, frame)
55
-
 
 
 
 
 
 
 
56
  counter +=1
57
  else:
58
  break
 
59
  # Release everything if job is finished
 
 
 
60
  print(path)
61
- os.system(f'''ffmpeg -framerate 20 -pattern_type glob -i '{path}/*.png' -c:v libx264 -pix_fmt yuv420p {outname} -y''')
62
  return outname
63
 
64
  def set_example_image(example: list) -> dict:
@@ -67,10 +91,9 @@ def set_example_image(example: list) -> dict:
67
  def show_video(example: list) -> dict:
68
  return gr.Video.update(value=example[0])
69
 
70
-
71
  with gr.Blocks(title='Shark Patrol',theme=gr.themes.Soft(),live=True,) as demo:
72
- gr.Markdown("Initial DEMO.")
73
- with gr.Tab("Current Detections"):
74
 
75
  with gr.Row():
76
  video_example = gr.Video(source='upload',include_audio=False,stream=True)
@@ -83,28 +106,26 @@ with gr.Blocks(title='Shark Patrol',theme=gr.themes.Soft(),live=True,) as demo:
83
  inputs=example_preds,
84
  outputs=video_example)
85
 
86
- with gr.Tab("Shark Detector"):
87
  with gr.Row():
88
  video_input = gr.Video(source='upload',include_audio=False)
89
  #video_input.style(witdh='50%',height='50%')
90
  video_output = gr.Video()
91
  #video_output.style(witdh='50%',height='50%')
92
 
93
- video_button = gr.Button("Analyze")
94
  with gr.Row():
95
  paths = sorted(pathlib.Path('videos_example/').rglob('*.mp4'))
96
  example_images = gr.Dataset(components=[video_input],
97
  samples=[[path.as_posix()]
98
  for path in paths if 'videos_side_by_side' not in str(path)])
99
 
100
- video_button.click(analize_video, inputs=video_input, outputs=video_output)
101
 
102
  example_images.click(fn=set_example_image,
103
  inputs=example_images,
104
  outputs=video_input)
105
 
106
-
107
-
108
  demo.queue()
109
  #if os.getenv('SYSTEM') == 'spaces':
110
  demo.launch(width='40%',auth=(os.environ.get('SHARK_USERNAME'), os.environ.get('SHARK_PASSWORD')))
 
31
  import os
32
  import pathlib
33
 
34
+ from time import time
35
 
36
+ def analyze_video(x, skip_frames = 5, frame_rate_out = 8):
37
  print(x)
38
+
39
+ #Define path to saved images
40
  path = '/tmp/test/'
41
  os.makedirs(path, exist_ok=True)
42
+
43
+ # Define name of current video as number of videos in path
44
+ n_videos_in_path = len(os.listdir(path))
45
+ path = f'{path}{n_videos_in_path}'
46
  os.makedirs(path, exist_ok=True)
47
+
48
+ # Define name of output video
49
  outname = f'{path}_processed.mp4'
50
+
51
  if os.path.exists(outname):
52
  print('video already processed')
53
  return outname
54
+
55
  cap = cv2.VideoCapture(x)
56
  counter = 0
57
+
58
  while(cap.isOpened()):
59
+ start = time()
60
  ret, frame = cap.read()
61
+ print(f'read time: {time()-start}')
62
+
63
  if ret==True:
64
+ if counter % skip_frames == 0:
65
+ name = os.path.join(path,f'{counter:05d}.png')
66
+ start = time()
67
+ frame = inference_frame(frame)
68
+ print(f'inference time: {time()-start}')
69
+ # write the flipped frame
70
+ start = time()
71
+ cv2.imwrite(name, frame)
72
+ print(f'write time: {time()-start}')
73
+ else:
74
+ pass
75
+ print(counter)
76
  counter +=1
77
  else:
78
  break
79
+
80
  # Release everything if job is finished
81
+ cap.release()
82
+
83
+ # Create video from predicted images
84
  print(path)
85
+ os.system(f'''ffmpeg -framerate {frame_rate_out} -pattern_type glob -i '{path}/*.png' -c:v libx264 -pix_fmt yuv420p {outname} -y''')
86
  return outname
87
 
88
  def set_example_image(example: list) -> dict:
 
91
  def show_video(example: list) -> dict:
92
  return gr.Video.update(value=example[0])
93
 
 
94
  with gr.Blocks(title='Shark Patrol',theme=gr.themes.Soft(),live=True,) as demo:
95
+ gr.Markdown("Alpha Demo of the Sharkpatrol Oceanlife Detector.")
96
+ with gr.Tab("Preloaded Examples"):
97
 
98
  with gr.Row():
99
  video_example = gr.Video(source='upload',include_audio=False,stream=True)
 
106
  inputs=example_preds,
107
  outputs=video_example)
108
 
109
+ with gr.Tab("Test your own Video"):
110
  with gr.Row():
111
  video_input = gr.Video(source='upload',include_audio=False)
112
  #video_input.style(witdh='50%',height='50%')
113
  video_output = gr.Video()
114
  #video_output.style(witdh='50%',height='50%')
115
 
116
+ video_button = gr.Button("Analyze your Video")
117
  with gr.Row():
118
  paths = sorted(pathlib.Path('videos_example/').rglob('*.mp4'))
119
  example_images = gr.Dataset(components=[video_input],
120
  samples=[[path.as_posix()]
121
  for path in paths if 'videos_side_by_side' not in str(path)])
122
 
123
+ video_button.click(analyze_video, inputs=video_input, outputs=video_output)
124
 
125
  example_images.click(fn=set_example_image,
126
  inputs=example_images,
127
  outputs=video_input)
128
 
 
 
129
  demo.queue()
130
  #if os.getenv('SYSTEM') == 'spaces':
131
  demo.launch(width='40%',auth=(os.environ.get('SHARK_USERNAME'), os.environ.get('SHARK_PASSWORD')))