lazarevich commited on
Commit
eb461b0
1 Parent(s): 1176ec1

add rpi5 data and HW comparison tab

Browse files
Files changed (2) hide show
  1. app.py +152 -70
  2. plotting.py +196 -34
app.py CHANGED
@@ -1,12 +1,15 @@
1
  import gradio as gr
2
 
3
- from plotting import create_yolobench_plots, get_pareto_table
4
  from utils import DEEPLITE_DARK_BLUE_GRADIO
5
 
 
6
  def get_hw_description(hw_name):
7
  HW_URLS = {
8
  'Jetson Nano (GPU, ONNX Runtime, FP32)': 'https://8074457.fs1.hubspotusercontent-na1.net/hubfs/8074457/YOLOBench%20Hardware%20product%20sheets/JetsonNano_DataSheet_DS09366001v1.1.pdf',
9
  'Raspberry Pi 4 Model B (CPU, TFLite, FP32)': 'https://8074457.fs1.hubspotusercontent-na1.net/hubfs/8074457/YOLOBench%20Hardware%20product%20sheets/raspberry-pi-4-datasheet.pdf',
 
 
10
  'Intel® Core™i7-10875H (CPU, OpenVINO, FP32)': 'https://8074457.fs1.hubspotusercontent-na1.net/hubfs/8074457/YOLOBench%20Hardware%20product%20sheets/Intel_ARK_SpecificationsChart_2023_10_11.pdf',
11
  'Khadas VIM3 (NPU, INT16)': 'https://8074457.fs1.hubspotusercontent-na1.net/hubfs/8074457/YOLOBench%20Hardware%20product%20sheets/khadas_vim3_specs.pdf',
12
  'Orange Pi 5 (NPU, FP16)': 'https://8074457.fs1.hubspotusercontent-na1.net/hubfs/8074457/YOLOBench%20Hardware%20product%20sheets/OrangePi_5_RK3588S_User%20Manual_v1.5.pdf',
@@ -15,18 +18,17 @@ def get_hw_description(hw_name):
15
 
16
  hw_url = HW_URLS[hw_name]
17
  DESC = f"""
18
-
19
  🔸 <span style="font-size:16px">Click </span>[<span style="font-size:16px">here</span>]({hw_url})<span style="font-size:16px"> for more information on the selected hardware platform.</span>
20
-
21
  🔸 <span style="font-size:16px">Refer to the [Deeplite Torch Zoo](https://github.com/Deeplite/deeplite-torch-zoo/tree/develop/results/yolobench) for details about latency measurement experiments.</span>
22
-
23
  """
24
  return DESC
25
 
26
 
27
- with gr.Blocks(theme=gr.themes.Default(secondary_hue=DEEPLITE_DARK_BLUE_GRADIO),
28
- css="table { width: 100%; }", analytics_enabled=True) as demo:
29
-
 
 
30
  gr.HTML(
31
  """
32
  <div align="center">
@@ -73,7 +75,7 @@ with gr.Blocks(theme=gr.themes.Default(secondary_hue=DEEPLITE_DARK_BLUE_GRADIO),
73
 
74
  with gr.Row():
75
  gr.Markdown(
76
- """
77
  <span style="font-size:16px">
78
 
79
  🚀 <b>YOLOBench</b> 🚀 is a latency-accuracy benchmark of popular single-stage detectors from the YOLO series. Major highlights of this work are:
@@ -93,65 +95,118 @@ with gr.Blocks(theme=gr.themes.Default(secondary_hue=DEEPLITE_DARK_BLUE_GRADIO),
93
  """
94
  )
95
 
96
- with gr.Row(equal_height=True):
97
- with gr.Column():
98
- hardware_name = gr.Dropdown(
99
- choices=[
100
- 'Jetson Nano (GPU, ONNX Runtime, FP32)',
101
- 'Raspberry Pi 4 Model B (CPU, TFLite, FP32)',
102
- 'Intel® Core™i7-10875H (CPU, OpenVINO, FP32)',
103
- 'Khadas VIM3 (NPU, INT16)',
104
- 'Orange Pi 5 (NPU, FP16)',
105
- 'NVIDIA A40 (GPU, TensorRT, FP32)',
106
- ],
107
- value='Jetson Nano (GPU, ONNX Runtime, FP32)',
108
- label='Hardware',
109
- )
110
- with gr.Column():
111
- dataset_name = gr.Dropdown(
112
- choices=['COCO', 'PASCAL VOC', 'SKU-110K', 'WIDERFACE'],
113
- value='COCO',
114
- label='Dataset',
115
- )
116
-
117
- with gr.Row(equal_height=True):
118
- with gr.Column():
119
- hardware_desc = gr.Markdown(get_hw_description(hardware_name.value))
120
-
121
- with gr.Column():
122
- metric_name = gr.Radio(
123
- ['mAP@0.5:0.95', 'mAP@0.5', 'Precision', 'Recall'],
124
- value='mAP@0.5:0.95',
125
- label='Accuracy metric to plot',
126
- )
127
-
128
- with gr.Row(equal_height=True):
129
- with gr.Column():
130
- gr.Markdown("""
131
- <span style="font-size:16px">
132
-
133
- 🚀 <span style="font-weight:bold">Want to add your own hardware benchmarks to YOLOBench?</span> 🚀
134
-
135
- Contact us [here](https://info.deeplite.ai/add_yolobench_data) for your benchmarking kit and we'll set you up!
136
-
137
- </span>
138
- """)
139
-
140
- with gr.Column():
141
- vis_options = gr.CheckboxGroup(
142
- [
143
- 'Model family',
144
- 'Highlight Pareto',
145
- 'Show Pareto only',
146
- 'Log x-axis'
147
- ],
148
- value=['Model family',],
149
- label='Visualization options',
150
- )
151
-
152
-
153
- with gr.Row():
154
- upper_panel_fig = gr.Plot(show_label=False)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
155
 
156
  gr.Markdown(
157
  """
@@ -211,7 +266,7 @@ with gr.Blocks(theme=gr.themes.Default(secondary_hue=DEEPLITE_DARK_BLUE_GRADIO),
211
  table_mode = gr.Radio(
212
  ['Show top-10 models', 'Show all'],
213
  value='Show top-10 models',
214
- label='Pareto model table'
215
  )
216
 
217
  with gr.Row():
@@ -231,9 +286,10 @@ with gr.Blocks(theme=gr.themes.Default(secondary_hue=DEEPLITE_DARK_BLUE_GRADIO),
231
  }
232
  ```
233
  """
234
- )
235
 
236
  inputs = [dataset_name, hardware_name, metric_name, vis_options, table_mode]
 
237
 
238
  # plot by default (VOC, Raspi4)
239
  demo.load(
@@ -242,6 +298,12 @@ with gr.Blocks(theme=gr.themes.Default(secondary_hue=DEEPLITE_DARK_BLUE_GRADIO),
242
  outputs=[upper_panel_fig, pareto_table],
243
  )
244
 
 
 
 
 
 
 
245
  demo.load(
246
  fn=get_pareto_table,
247
  inputs=[dataset_name, hardware_name, metric_name],
@@ -286,6 +348,26 @@ with gr.Blocks(theme=gr.themes.Default(secondary_hue=DEEPLITE_DARK_BLUE_GRADIO),
286
  outputs=[hardware_desc],
287
  )
288
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
289
 
290
  if __name__ == "__main__":
291
- demo.launch()
 
1
  import gradio as gr
2
 
3
+ from plotting import create_yolobench_plots, get_pareto_table, create_comparison_plot
4
  from utils import DEEPLITE_DARK_BLUE_GRADIO
5
 
6
+
7
  def get_hw_description(hw_name):
8
  HW_URLS = {
9
  'Jetson Nano (GPU, ONNX Runtime, FP32)': 'https://8074457.fs1.hubspotusercontent-na1.net/hubfs/8074457/YOLOBench%20Hardware%20product%20sheets/JetsonNano_DataSheet_DS09366001v1.1.pdf',
10
  'Raspberry Pi 4 Model B (CPU, TFLite, FP32)': 'https://8074457.fs1.hubspotusercontent-na1.net/hubfs/8074457/YOLOBench%20Hardware%20product%20sheets/raspberry-pi-4-datasheet.pdf',
11
+ 'Raspberry Pi 4 Model B (CPU, ONNX Runtime, FP32)': 'https://8074457.fs1.hubspotusercontent-na1.net/hubfs/8074457/YOLOBench%20Hardware%20product%20sheets/raspberry-pi-4-datasheet.pdf',
12
+ 'Raspberry Pi 5 Model B (CPU, ONNX Runtime, FP32)': 'https://8074457.fs1.hubspotusercontent-na1.net/hubfs/8074457/YOLOBench%20Assets/Hardware%20Product%20Assets/raspberry-pi-5-product-brief.pdf',
13
  'Intel® Core™i7-10875H (CPU, OpenVINO, FP32)': 'https://8074457.fs1.hubspotusercontent-na1.net/hubfs/8074457/YOLOBench%20Hardware%20product%20sheets/Intel_ARK_SpecificationsChart_2023_10_11.pdf',
14
  'Khadas VIM3 (NPU, INT16)': 'https://8074457.fs1.hubspotusercontent-na1.net/hubfs/8074457/YOLOBench%20Hardware%20product%20sheets/khadas_vim3_specs.pdf',
15
  'Orange Pi 5 (NPU, FP16)': 'https://8074457.fs1.hubspotusercontent-na1.net/hubfs/8074457/YOLOBench%20Hardware%20product%20sheets/OrangePi_5_RK3588S_User%20Manual_v1.5.pdf',
 
18
 
19
  hw_url = HW_URLS[hw_name]
20
  DESC = f"""
 
21
  🔸 <span style="font-size:16px">Click </span>[<span style="font-size:16px">here</span>]({hw_url})<span style="font-size:16px"> for more information on the selected hardware platform.</span>
 
22
  🔸 <span style="font-size:16px">Refer to the [Deeplite Torch Zoo](https://github.com/Deeplite/deeplite-torch-zoo/tree/develop/results/yolobench) for details about latency measurement experiments.</span>
 
23
  """
24
  return DESC
25
 
26
 
27
+ with gr.Blocks(
28
+ theme=gr.themes.Default(secondary_hue=DEEPLITE_DARK_BLUE_GRADIO),
29
+ css="table { width: 100%; }",
30
+ analytics_enabled=True,
31
+ ) as demo:
32
  gr.HTML(
33
  """
34
  <div align="center">
 
75
 
76
  with gr.Row():
77
  gr.Markdown(
78
+ """
79
  <span style="font-size:16px">
80
 
81
  🚀 <b>YOLOBench</b> 🚀 is a latency-accuracy benchmark of popular single-stage detectors from the YOLO series. Major highlights of this work are:
 
95
  """
96
  )
97
 
98
+ with gr.Tab("YOLO model comparision"):
99
+ with gr.Row(equal_height=True):
100
+ with gr.Column():
101
+ hardware_name = gr.Dropdown(
102
+ choices=[
103
+ 'Jetson Nano (GPU, ONNX Runtime, FP32)',
104
+ 'Raspberry Pi 4 Model B (CPU, TFLite, FP32)',
105
+ 'Raspberry Pi 4 Model B (CPU, ONNX Runtime, FP32)',
106
+ 'Raspberry Pi 5 Model B (CPU, ONNX Runtime, FP32)',
107
+ 'Intel® Core™i7-10875H (CPU, OpenVINO, FP32)',
108
+ 'Khadas VIM3 (NPU, INT16)',
109
+ 'Orange Pi 5 (NPU, FP16)',
110
+ 'NVIDIA A40 (GPU, TensorRT, FP32)',
111
+ ],
112
+ value='Jetson Nano (GPU, ONNX Runtime, FP32)',
113
+ label='Hardware target',
114
+ )
115
+ with gr.Column():
116
+ dataset_name = gr.Dropdown(
117
+ choices=['COCO', 'PASCAL VOC', 'SKU-110K', 'WIDERFACE'],
118
+ value='COCO',
119
+ label='Dataset',
120
+ )
121
+
122
+ with gr.Row(equal_height=True):
123
+ with gr.Column():
124
+ hardware_desc = gr.Markdown(get_hw_description(hardware_name.value))
125
+
126
+ with gr.Column():
127
+ metric_name = gr.Radio(
128
+ ['mAP@0.5:0.95', 'mAP@0.5', 'Precision', 'Recall'],
129
+ value='mAP@0.5:0.95',
130
+ label='Accuracy metric to plot',
131
+ )
132
+
133
+ with gr.Row(equal_height=True):
134
+ with gr.Column():
135
+ gr.Markdown(
136
+ """
137
+ <span style="font-size:16px">
138
+
139
+ 🚀 <span style="font-weight:bold">Want to add your own hardware benchmarks to YOLOBench?</span> 🚀
140
+ Contact us [here](https://info.deeplite.ai/add_yolobench_data) for your benchmarking kit and we'll set you up!
141
+
142
+ </span>
143
+ """
144
+ )
145
+
146
+ with gr.Column():
147
+ vis_options = gr.CheckboxGroup(
148
+ [
149
+ 'Model family',
150
+ 'Highlight Pareto',
151
+ 'Show Pareto only',
152
+ 'Log x-axis',
153
+ ],
154
+ value=[
155
+ 'Model family',
156
+ ],
157
+ label='Visualization options',
158
+ )
159
+
160
+ with gr.Row(equal_height=True):
161
+ upper_panel_fig = gr.Plot(show_label=False)
162
+
163
+ with gr.Tab("Hardware platform comparison"):
164
+ with gr.Row(equal_height=True):
165
+ with gr.Column():
166
+ comp_hw = gr.Dropdown(
167
+ [
168
+ 'Jetson Nano (GPU, ONNX Runtime, FP32)',
169
+ 'Raspberry Pi 4 Model B (CPU, TFLite, FP32)',
170
+ 'Raspberry Pi 4 Model B (CPU, ONNX Runtime, FP32)',
171
+ 'Raspberry Pi 5 Model B (CPU, ONNX Runtime, FP32)',
172
+ 'Intel® Core™i7-10875H (CPU, OpenVINO, FP32)',
173
+ 'Khadas VIM3 (NPU, INT16)',
174
+ 'Orange Pi 5 (NPU, FP16)',
175
+ 'NVIDIA A40 (GPU, TensorRT, FP32)',
176
+ ],
177
+ value=[
178
+ 'Jetson Nano (GPU, ONNX Runtime, FP32)',
179
+ 'Intel® Core™i7-10875H (CPU, OpenVINO, FP32)',
180
+ ],
181
+ label='Hardware',
182
+ multiselect=True,
183
+ )
184
+ with gr.Column():
185
+ comp_data = gr.Dropdown(
186
+ choices=['COCO', 'PASCAL VOC', 'SKU-110K', 'WIDERFACE'],
187
+ value='COCO',
188
+ label='Dataset',
189
+ )
190
+
191
+ with gr.Row(equal_height=True):
192
+ with gr.Column():
193
+ comp_metric = gr.Radio(
194
+ ['mAP@0.5:0.95', 'mAP@0.5', 'Precision', 'Recall'],
195
+ value='mAP@0.5:0.95',
196
+ label='Accuracy metric to plot',
197
+ )
198
+
199
+ with gr.Column():
200
+ comp_vis_opt = gr.CheckboxGroup(
201
+ ['Log x-axis', 'Remove datapoint markers'],
202
+ value=[
203
+ 'Log x-axis',
204
+ ],
205
+ label='Visualization options',
206
+ )
207
+
208
+ with gr.Row(equal_height=True):
209
+ comp_plot = gr.Plot(show_label=False)
210
 
211
  gr.Markdown(
212
  """
 
266
  table_mode = gr.Radio(
267
  ['Show top-10 models', 'Show all'],
268
  value='Show top-10 models',
269
+ label='Pareto model table',
270
  )
271
 
272
  with gr.Row():
 
286
  }
287
  ```
288
  """
289
+ )
290
 
291
  inputs = [dataset_name, hardware_name, metric_name, vis_options, table_mode]
292
+ inputs_comparison = [comp_data, comp_hw, comp_metric, comp_vis_opt]
293
 
294
  # plot by default (VOC, Raspi4)
295
  demo.load(
 
298
  outputs=[upper_panel_fig, pareto_table],
299
  )
300
 
301
+ demo.load(
302
+ fn=create_comparison_plot,
303
+ inputs=inputs_comparison,
304
+ outputs=[comp_plot],
305
+ )
306
+
307
  demo.load(
308
  fn=get_pareto_table,
309
  inputs=[dataset_name, hardware_name, metric_name],
 
348
  outputs=[hardware_desc],
349
  )
350
 
351
+ comp_data.change(
352
+ fn=create_comparison_plot,
353
+ inputs=inputs_comparison,
354
+ outputs=[comp_plot],
355
+ )
356
+ comp_hw.change(
357
+ fn=create_comparison_plot,
358
+ inputs=inputs_comparison,
359
+ outputs=[comp_plot],
360
+ )
361
+ comp_metric.change(
362
+ fn=create_comparison_plot,
363
+ inputs=inputs_comparison,
364
+ outputs=[comp_plot],
365
+ )
366
+ comp_vis_opt.change(
367
+ fn=create_comparison_plot,
368
+ inputs=inputs_comparison,
369
+ outputs=[comp_plot],
370
+ )
371
 
372
  if __name__ == "__main__":
373
+ demo.launch()
plotting.py CHANGED
@@ -6,7 +6,6 @@ from utils import DEEPLITE_LIGHT_BLUE_HEX, load_yolobench_data
6
 
7
  df, pareto_indices = load_yolobench_data()
8
 
9
-
10
  METRIC_NAME_MAPPING = {
11
  'mAP@0.5': 'mAP_0.5',
12
  'mAP@0.5:0.95': 'mAP_0.5:0.95',
@@ -19,6 +18,8 @@ METRIC_KEYS_TO_NAMES = {v: k for k, v in METRIC_NAME_MAPPING.items()}
19
 
20
  LATENCY_KEYS = {
21
  'Raspberry Pi 4 Model B (CPU, TFLite, FP32)': 'raspi4_tflite_latency',
 
 
22
  'Jetson Nano (GPU, ONNX Runtime, FP32)': 'nano_gpu_latency',
23
  'Intel® Core™i7-10875H (CPU, OpenVINO, FP32)': 'openvino_latency',
24
  'Khadas VIM3 (NPU, INT16)': 'vim3_latency',
@@ -39,15 +40,18 @@ DATASET_TAGS_TO_NAMES = {v: k for k, v in DATASET_TAGS.items()}
39
 
40
 
41
  def get_scatter_plot(
42
- dataset_tag,
43
- metric_tag,
44
- latency_key,
45
- model_family_coloring=True,
46
- add_pareto_frontier=False,
47
- plot_pareto_only=False,
48
- log_axis=False,
49
- ):
50
- fig_opts, layout_opts = {'opacity': 0.5, 'color_discrete_sequence': [DEEPLITE_LIGHT_BLUE_HEX]}, {}
 
 
 
51
  if model_family_coloring:
52
  fig_opts = {
53
  'color': 'model_family',
@@ -89,7 +93,14 @@ def get_scatter_plot(
89
 
90
  fig.update_layout(
91
  height=600,
92
- modebar_remove=['lasso', 'autoscale', 'zoomin', 'zoomout', 'select2d', 'select'],
 
 
 
 
 
 
 
93
  xaxis_title=f'{LATENCY_KEYS_TO_NAMES[latency_key]} latency, ms',
94
  yaxis_title=f"{METRIC_KEYS_TO_NAMES[metric_tag]}",
95
  xaxis=dict(
@@ -105,22 +116,140 @@ def get_scatter_plot(
105
  hoverlabel=dict(
106
  # bgcolor="white",
107
  font_size=14,
108
- font_family='Source Sans Pro'
109
  ),
110
  **layout_opts,
111
  )
112
  if add_pareto_frontier:
113
  fig = pareto_frontier_layer(fig, dataset_tag, metric_tag, latency_key)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
114
  return fig
115
 
116
 
117
  def create_yolobench_plots(
118
- dataset_name,
119
- hardware_name,
120
- metric_name,
121
- vis_options,
122
- table_mode,
123
- ):
124
  model_family_coloring = 'Model family' in vis_options
125
  add_pareto_frontier = 'Highlight Pareto' in vis_options
126
  plot_pareto_only = 'Show Pareto only' in vis_options
@@ -141,11 +270,11 @@ def create_yolobench_plots(
141
 
142
 
143
  def pareto_frontier_layer(
144
- fig,
145
- dataset_tag,
146
- metric_tag,
147
- latency_key,
148
- ):
149
  metric_key = f'{metric_tag}_{dataset_tag}'
150
  frontier = pareto_indices[metric_key][latency_key]
151
  fig.add_trace(
@@ -162,8 +291,29 @@ def pareto_frontier_layer(
162
  return fig
163
 
164
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
165
  def get_pareto_table(
166
- dataset_name, hardware_name, metric_name, expand_table=False,
 
 
 
167
  ):
168
  dataset_tag = DATASET_TAGS[dataset_name]
169
  metric_tag = METRIC_NAME_MAPPING[metric_name]
@@ -175,32 +325,44 @@ def get_pareto_table(
175
 
176
  frontier = pareto_indices[metric_key][latency_key]
177
  table_df = df.iloc[frontier, :][['model_name', metric_key, latency_key]]
178
- table_df['Input resolution (px)'] = table_df['model_name'].apply(lambda name: name.split('_')[-1])
179
- table_df['Model name'] = table_df['model_name'].apply(lambda name: name.split('_')[0])
 
 
 
 
180
  table_df[metric_key_final] = table_df[metric_key].apply(lambda val: round(val, 3))
181
  table_df[latency_key_final] = table_df[latency_key].apply(lambda val: round(val, 2))
182
 
183
  def make_clickable(url, name):
184
  return f'<a href="{url}">{name}</a>'
185
 
186
-
187
  if dataset_name == 'COCO':
188
  table_df['Download link'] = table_df['model_name'].apply(
189
  lambda name: f'https://download.deeplite.ai/zoo/models/YOLOBench/{name.split("_")[0]}_640.pt'
190
  )
191
- table_df['Download link'] = table_df.apply(lambda x: make_clickable(x['Download link'], 'Weights download'), axis=1)
 
 
192
  else:
193
- table_df['Download link'] = table_df['model_name'].apply(lambda s: 'Coming soon')
194
-
 
195
 
196
- table_df = table_df[['Model name', 'Input resolution (px)',
197
- metric_key_final, latency_key_final, 'Download link']].sort_values(by=metric_key_final, ascending=False)
 
 
 
 
 
 
 
198
  if not expand_table:
199
  table_df = table_df.iloc[:10, :]
200
 
201
  table_df = table_df.to_html(
202
- classes='table',
203
- escape=False, render_links=True, index=False
204
  )
205
 
206
  return table_df
 
6
 
7
  df, pareto_indices = load_yolobench_data()
8
 
 
9
  METRIC_NAME_MAPPING = {
10
  'mAP@0.5': 'mAP_0.5',
11
  'mAP@0.5:0.95': 'mAP_0.5:0.95',
 
18
 
19
  LATENCY_KEYS = {
20
  'Raspberry Pi 4 Model B (CPU, TFLite, FP32)': 'raspi4_tflite_latency',
21
+ 'Raspberry Pi 4 Model B (CPU, ONNX Runtime, FP32)': 'pi4_ort_latency',
22
+ 'Raspberry Pi 5 Model B (CPU, ONNX Runtime, FP32)': 'pi5_ort_latency',
23
  'Jetson Nano (GPU, ONNX Runtime, FP32)': 'nano_gpu_latency',
24
  'Intel® Core™i7-10875H (CPU, OpenVINO, FP32)': 'openvino_latency',
25
  'Khadas VIM3 (NPU, INT16)': 'vim3_latency',
 
40
 
41
 
42
  def get_scatter_plot(
43
+ dataset_tag,
44
+ metric_tag,
45
+ latency_key,
46
+ model_family_coloring=True,
47
+ add_pareto_frontier=False,
48
+ plot_pareto_only=False,
49
+ log_axis=False,
50
+ ):
51
+ fig_opts, layout_opts = {
52
+ 'opacity': 0.5,
53
+ 'color_discrete_sequence': [DEEPLITE_LIGHT_BLUE_HEX],
54
+ }, {}
55
  if model_family_coloring:
56
  fig_opts = {
57
  'color': 'model_family',
 
93
 
94
  fig.update_layout(
95
  height=600,
96
+ modebar_remove=[
97
+ 'lasso',
98
+ 'autoscale',
99
+ 'zoomin',
100
+ 'zoomout',
101
+ 'select2d',
102
+ 'select',
103
+ ],
104
  xaxis_title=f'{LATENCY_KEYS_TO_NAMES[latency_key]} latency, ms',
105
  yaxis_title=f"{METRIC_KEYS_TO_NAMES[metric_tag]}",
106
  xaxis=dict(
 
116
  hoverlabel=dict(
117
  # bgcolor="white",
118
  font_size=14,
119
+ font_family='Source Sans Pro',
120
  ),
121
  **layout_opts,
122
  )
123
  if add_pareto_frontier:
124
  fig = pareto_frontier_layer(fig, dataset_tag, metric_tag, latency_key)
125
+ fig.update_layout(autosize=True)
126
+ return fig
127
+
128
+
129
+ def get_comparison_plot(
130
+ dataset_tag,
131
+ metric_tag,
132
+ latency_keys,
133
+ log_axis=False,
134
+ remove_marker=False,
135
+ ):
136
+ if len(latency_keys) == 0:
137
+ layout_opts = {
138
+ "annotations": [
139
+ {
140
+ "text": "Please select at least 2 hardware targets to compare!",
141
+ "showarrow": False,
142
+ "font": {"size": 28},
143
+ }
144
+ ]
145
+ }
146
+ else:
147
+ layout_opts = {
148
+ 'legend': dict(
149
+ title='Hardware targets selected<br>(click to toggle)',
150
+ )
151
+ }
152
+
153
+ fig = go.Figure(
154
+ layout=go.Layout(
155
+ title=go.layout.Title(
156
+ text=f'{METRIC_KEYS_TO_NAMES[metric_tag]}-latency Pareto Optimal Models'
157
+ )
158
+ )
159
+ )
160
+
161
+ if remove_marker:
162
+ mode = 'lines'
163
+ else:
164
+ mode = 'lines+markers'
165
+
166
+ for latency_key in latency_keys:
167
+ metric_key = f'{metric_tag}_{dataset_tag}'
168
+ frontier = pareto_indices[metric_key][latency_key]
169
+ df_pareto = df.iloc[frontier, :]
170
+ model_name = df_pareto['model_name']
171
+ fig.add_trace(
172
+ go.Scatter(
173
+ x=df_pareto[latency_key],
174
+ y=df_pareto[f'{metric_tag}_{dataset_tag}'],
175
+ name=LATENCY_KEYS_TO_NAMES[latency_key],
176
+ mode=mode,
177
+ customdata=model_name,
178
+ hovertemplate='<b>model name:%{customdata}</b><br>latency:%{x:.2f} <br>metric: %{y:.2f} ',
179
+ )
180
+ )
181
+
182
+ if log_axis:
183
+ fig.update_xaxes(type='log')
184
+ x_axis_title = f'Inference latency, ms (BS=1, log scale)'
185
+ else:
186
+ x_axis_title = f'Inference latency, ms (BS=1)'
187
+
188
+ fig.update_layout(
189
+ height=600,
190
+ plot_bgcolor='white',
191
+ modebar_remove=[
192
+ 'lasso',
193
+ 'autoscale',
194
+ 'zoomin',
195
+ 'zoomout',
196
+ 'select2d',
197
+ 'select',
198
+ ],
199
+ xaxis_title=x_axis_title,
200
+ yaxis_title=f"{METRIC_KEYS_TO_NAMES[metric_tag]}",
201
+ xaxis=dict(
202
+ rangeslider=dict(
203
+ visible=True,
204
+ bgcolor=DEEPLITE_LIGHT_BLUE_HEX,
205
+ thickness=0.02,
206
+ ),
207
+ ),
208
+ yaxis=dict(
209
+ fixedrange=False,
210
+ ),
211
+ hoverlabel=dict(
212
+ # bgcolor="white",
213
+ font_size=14,
214
+ font_family='Source Sans Pro',
215
+ ),
216
+ **layout_opts,
217
+ )
218
+ fig.update_layout(autosize=True, template="plotly_white")
219
+
220
+ return fig
221
+
222
+
223
+ def pareto_frontier_layer(
224
+ fig,
225
+ dataset_tag,
226
+ metric_tag,
227
+ latency_key,
228
+ ):
229
+ metric_key = f'{metric_tag}_{dataset_tag}'
230
+ frontier = pareto_indices[metric_key][latency_key]
231
+ fig.add_trace(
232
+ go.Scatter(
233
+ x=df.iloc[frontier, :][latency_key],
234
+ y=df.iloc[frontier, :][metric_key],
235
+ mode='lines+markers',
236
+ opacity=0.5,
237
+ line=go.scatter.Line(color='grey'),
238
+ showlegend=False,
239
+ name=metric_key,
240
+ marker=dict(symbol=['circle']),
241
+ )
242
+ )
243
  return fig
244
 
245
 
246
  def create_yolobench_plots(
247
+ dataset_name,
248
+ hardware_name,
249
+ metric_name,
250
+ vis_options,
251
+ table_mode,
252
+ ):
253
  model_family_coloring = 'Model family' in vis_options
254
  add_pareto_frontier = 'Highlight Pareto' in vis_options
255
  plot_pareto_only = 'Show Pareto only' in vis_options
 
270
 
271
 
272
  def pareto_frontier_layer(
273
+ fig,
274
+ dataset_tag,
275
+ metric_tag,
276
+ latency_key,
277
+ ):
278
  metric_key = f'{metric_tag}_{dataset_tag}'
279
  frontier = pareto_indices[metric_key][latency_key]
280
  fig.add_trace(
 
291
  return fig
292
 
293
 
294
+ def create_comparison_plot(dataset_name, hardware_list, metric_name, vis_options):
295
+ log_axis = 'Log x-axis' in vis_options
296
+ remove_marker = 'Remove datapoint markers' in vis_options
297
+
298
+ latency_keys = []
299
+ for hardware_name in hardware_list:
300
+ latency_keys.append(LATENCY_KEYS[hardware_name])
301
+
302
+ fig = get_comparison_plot(
303
+ DATASET_TAGS[dataset_name],
304
+ METRIC_NAME_MAPPING[metric_name],
305
+ latency_keys,
306
+ log_axis,
307
+ remove_marker,
308
+ )
309
+ return fig
310
+
311
+
312
  def get_pareto_table(
313
+ dataset_name,
314
+ hardware_name,
315
+ metric_name,
316
+ expand_table=False,
317
  ):
318
  dataset_tag = DATASET_TAGS[dataset_name]
319
  metric_tag = METRIC_NAME_MAPPING[metric_name]
 
325
 
326
  frontier = pareto_indices[metric_key][latency_key]
327
  table_df = df.iloc[frontier, :][['model_name', metric_key, latency_key]]
328
+ table_df['Input resolution (px)'] = table_df['model_name'].apply(
329
+ lambda name: name.split('_')[-1]
330
+ )
331
+ table_df['Model name'] = table_df['model_name'].apply(
332
+ lambda name: name.split('_')[0]
333
+ )
334
  table_df[metric_key_final] = table_df[metric_key].apply(lambda val: round(val, 3))
335
  table_df[latency_key_final] = table_df[latency_key].apply(lambda val: round(val, 2))
336
 
337
  def make_clickable(url, name):
338
  return f'<a href="{url}">{name}</a>'
339
 
 
340
  if dataset_name == 'COCO':
341
  table_df['Download link'] = table_df['model_name'].apply(
342
  lambda name: f'https://download.deeplite.ai/zoo/models/YOLOBench/{name.split("_")[0]}_640.pt'
343
  )
344
+ table_df['Download link'] = table_df.apply(
345
+ lambda x: make_clickable(x['Download link'], 'Weights download'), axis=1
346
+ )
347
  else:
348
+ table_df['Download link'] = table_df['model_name'].apply(
349
+ lambda s: 'Coming soon'
350
+ )
351
 
352
+ table_df = table_df[
353
+ [
354
+ 'Model name',
355
+ 'Input resolution (px)',
356
+ metric_key_final,
357
+ latency_key_final,
358
+ 'Download link',
359
+ ]
360
+ ].sort_values(by=metric_key_final, ascending=False)
361
  if not expand_table:
362
  table_df = table_df.iloc[:10, :]
363
 
364
  table_df = table_df.to_html(
365
+ classes='table', escape=False, render_links=True, index=False
 
366
  )
367
 
368
  return table_df