kevinconka commited on
Commit
1b40095
·
verified ·
1 Parent(s): 90ee270

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +194 -122
README.md CHANGED
@@ -13,26 +13,33 @@ pinned: false
13
  emoji: 🕵️
14
  ---
15
 
16
- # Metric Card for Detection Metric
17
 
18
- ## Metric Description
19
  This metric can be used to calculate object detection metrics. It has an option to calculate the metrics at different levels of bounding box sizes, so that more insight is provided into the performance for different objects. It is adapted from the base of pycocotools metrics.
20
 
21
  ## How to Use
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
22
  ```
23
- >>> import evaluate
24
- >>> from seametrics.fo_to_payload.utils import fo_to_payload
25
- >>> b = fo_to_payload(
26
- >>> dataset="SAILING_DATASET_QA",
27
- >>> gt_field="ground_truth_det",
28
- >>> models=["yolov5n6_RGB_D2304-v1_9C"],
29
- >>> sequence_list=["Trip_14_Seq_1"],
30
- >>> data_type="rgb"
31
- >>> )
32
- >>> module = evaluate.load("SEA-AI/det-metrics.py")
33
- >>> module.add_batch(b)
34
- >>> res = module.compute()
35
- >>> print(res)
36
  {'all': {'range': [0, 10000000000.0],
37
  'iouThr': '0.00',
38
  'maxDets': 100,
@@ -62,56 +69,103 @@ The format of payload should be as returned by function `fo_to_payload()` define
62
  An example of how a payload might look like is:
63
 
64
  ```
65
- test_payload = {
66
  'dataset': 'SAILING_DATASET_QA',
67
- 'models': ['yolov5n6_RGB_D2304-v1_9C'],
 
68
  'gt_field_name': 'ground_truth_det',
69
  'sequences': {
70
- # sequence 1, 1 frame with 1 pred and 1 gt
 
71
  'Trip_14_Seq_1': {
72
- 'resolution': (720, 1280),
73
- 'yolov5n6_RGB_D2304-v1_9C': [[fo.Detection(
 
 
74
  label='FAR_AWAY_OBJECT',
75
- bounding_box=[0.35107421875, 0.274658203125, 0.0048828125, 0.009765625], # tp nr1
 
 
 
 
 
76
  confidence=0.153076171875
77
- )]],
78
- 'ground_truth_det': [[fo.Detection(
 
 
 
79
  label='FAR_AWAY_OBJECT',
80
- bounding_box=[0.35107421875, 0.274658203125, 0.0048828125, 0.009765625]
81
- )]]
82
- },
83
- # sequence 2, 2 frames with frame 1: 2 pred, 1 gt; frame 2: 1 pred 1 gt
 
 
 
 
 
 
 
 
 
84
  'Trip_14_Seq_2': {
85
- 'resolution': (720, 1280),
 
86
  'yolov5n6_RGB_D2304-v1_9C': [
87
  [
88
  fo.Detection(
89
  label='FAR_AWAY_OBJECT',
90
- bounding_box=[0.389404296875,0.306640625,0.005126953125,0.0146484375], # tp nr 2
 
 
 
 
 
91
  confidence=0.153076171875
92
  ),
93
  fo.Detection(
94
  label='FAR_AWAY_OBJECT',
95
- bounding_box=[0.50390625, 0.357666015625, 0.0048828125, 0.00976562], # fp nr 1
 
 
 
 
 
96
  confidence=0.153076171875
97
  ),
98
  fo.Detection(
99
  label='FAR_AWAY_OBJECT',
100
- bounding_box=[0.455078125, 0.31494140625, 0.00390625, 0.0087890625], # fp nr 2
 
 
 
 
 
101
  confidence=0.153076171875
102
  )
103
  ],
104
  [
105
  fo.Detection(
106
  label='FAR_AWAY_OBJECT',
107
- bounding_box=[0.455078125, 0.31494140625, 0.00390625, 0.0087890625], # tp nr 3
 
 
 
 
 
108
  confidence=0.153076171875
109
  )
110
  ],
111
  [
112
  fo.Detection(
113
  label='FAR_AWAY_OBJECT',
114
- bounding_box=[0.455078125, 0.31494140625, 0.00390625, 0.0087890625], # fp nr 3
 
 
 
 
 
115
  confidence=0.153076171875
116
  )
117
  ]
@@ -121,29 +175,45 @@ test_payload = {
121
  [
122
  fo.Detection(
123
  label='FAR_AWAY_OBJECT',
124
- bounding_box=[0.389404296875,0.306640625,0.005126953125,0.0146484375],
 
 
 
 
 
125
  )
126
  ],
127
  # frame nr 2
128
  [
129
  fo.Detection(
130
  label='FAR_AWAY_OBJECT',
131
- bounding_box=[0.455078125, 0.31494140625, 0.00390625, 0.0087890625],
 
 
 
 
 
132
  confidence=0.153076171875
133
  ),
134
  fo.Detection(
135
  label='FAR_AWAY_OBJECT',
136
- bounding_box=[0.35107421875, 0.274658203125, 0.0048828125, 0.009765625], # missed nr 1
 
 
 
 
 
137
  confidence=0.153076171875
138
  )
139
  ],
140
  # frame nr3
141
- [
142
- ],
143
  ]
144
- }
145
- },
146
- "sequence_list": ["Trip_14_Seq_1", 'Trip_14_Seq_2']
 
 
147
  }
148
  ```
149
 
@@ -169,88 +239,90 @@ Each sub-dictionary holds performance metrics at the specific area range level:
169
 
170
  ### Examples
171
  We can specify different area range levels, at which we would like to compute the metrics.
172
- ```
173
- >>> import evaluate
174
- >>> from seametrics.fo_to_payload.utils import fo_to_payload
175
- >>> area_ranges_tuples = [
176
- ("all", [0, 1e5 ** 2]),
177
- ("small", [0 ** 2, 6 ** 2]),
178
- ("medium", [6 ** 2, 12 ** 2]),
179
- ("large", [12 ** 2, 1e5 ** 2])
180
- ]
181
- >>> payload = fo_to_payload(
182
- dataset=dataset,
183
- gt_field=gt_field,
184
- models=model_list
185
- )
186
- >>> module = evaluate.load(
187
- "./detection_metric.py",
188
- iou_threshold=0.9,
189
- area_ranges_tuples=area_ranges_tuples
190
  )
191
- >>> module.add_batch(payload)
192
- >>> result = module.compute()
193
- >>> print(result)
194
- {'all':
195
- {'range': [0, 10000000000.0],
196
- 'iouThr': '0.9',
197
- 'maxDets': 100,
198
- 'tp': 0,
199
- 'fp': 3,
200
- 'fn': 1,
201
- 'duplicates': 0,
202
- 'precision': 0.0,
203
- 'recall': 0.0,
204
- 'f1': 0,
205
- 'support': 1,
206
- 'fpi': 1,
207
- 'nImgs': 2
208
- },
209
- 'small': {
210
- 'range': [0, 36],
211
- 'iouThr': '0.9',
212
- 'maxDets': 100,
213
- 'tp': 0,
214
- 'fp': 1,
215
- 'fn': 1,
216
- 'duplicates': 0,
217
- 'precision': 0.0,
218
- 'recall': 0.0,
219
- 'f1': 0,
220
- 'support': 1,
221
- 'fpi': 1,
222
- 'nImgs': 2
223
- },
224
- 'medium': {
225
- 'range': [36, 144],
226
- 'iouThr': '0.9',
227
- 'maxDets': 100,
228
- 'tp': 0,
229
- 'fp': 2,
230
- 'fn': 0,
231
- 'duplicates': 0,
232
- 'precision': 0.0,
233
- 'recall': 0,
234
- 'f1': 0,
235
- 'support': 0,
236
- 'fpi': 2,
237
- 'nImgs': 2
238
- }, 'large': {
239
- 'range': [144, 10000000000.0],
240
- 'iouThr': '0.9',
241
- 'maxDets': 100,
242
- 'tp': -1,
243
- 'fp': -1,
244
- 'fn': -1,
245
- 'duplicates': -1,
246
- 'precision': -1,
247
- 'recall': -1,
248
- 'f1': -1,
249
- 'support': 0,
250
- 'fpi': 0,
251
- 'nImgs': 2
252
- }
253
- }
 
 
 
 
 
 
 
254
  ```
255
 
256
  ## Further References
 
13
  emoji: 🕵️
14
  ---
15
 
16
+ # Detection Metrics
17
 
18
+ ## Description
19
  This metric can be used to calculate object detection metrics. It has an option to calculate the metrics at different levels of bounding box sizes, so that more insight is provided into the performance for different objects. It is adapted from the base of pycocotools metrics.
20
 
21
  ## How to Use
22
+ ```python
23
+ import evaluate
24
+ import logging
25
+ from seametrics.payload import PayloadProcessor
26
+
27
+ logging.basicConfig(level=logging.WARNING)
28
+
29
+ processor = PayloadProcessor(
30
+ dataset_name="SAILING_DATASET_QA",
31
+ gt_field="ground_truth_det",
32
+ models=["yolov5n6_RGB_D2304-v1_9C"],
33
+ sequence_list=["Trip_14_Seq_1"],
34
+ data_type="rgb",
35
+ )
36
+
37
+ module = evaluate.load("SEA-AI/det-metrics")
38
+ module.add_from_payload(processor.payload)
39
+ module.compute()
40
  ```
41
+
42
+ ```console
 
 
 
 
 
 
 
 
 
 
 
43
  {'all': {'range': [0, 10000000000.0],
44
  'iouThr': '0.00',
45
  'maxDets': 100,
 
69
  An example of how a payload might look like is:
70
 
71
  ```
72
+ {
73
  'dataset': 'SAILING_DATASET_QA',
74
+ 'models': ['yolov5n6_RGB_D2304-v1_9C'
75
+ ],
76
  'gt_field_name': 'ground_truth_det',
77
  'sequences': {
78
+ # sequence 1,
79
+ 1 frame with 1 pred and 1 gt
80
  'Trip_14_Seq_1': {
81
+ 'resolution': (720,
82
+ 1280),
83
+ 'yolov5n6_RGB_D2304-v1_9C': [
84
+ [fo.Detection(
85
  label='FAR_AWAY_OBJECT',
86
+ bounding_box=[
87
+ 0.35107421875,
88
+ 0.274658203125,
89
+ 0.0048828125,
90
+ 0.009765625
91
+ ], # tp nr1
92
  confidence=0.153076171875
93
+ )
94
+ ]
95
+ ],
96
+ 'ground_truth_det': [
97
+ [fo.Detection(
98
  label='FAR_AWAY_OBJECT',
99
+ bounding_box=[
100
+ 0.35107421875,
101
+ 0.274658203125,
102
+ 0.0048828125,
103
+ 0.009765625
104
+ ]
105
+ )
106
+ ]
107
+ ]
108
+ },
109
+ # sequence 2,
110
+ 2 frames with frame 1: 2 pred,
111
+ 1 gt; frame 2: 1 pred 1 gt
112
  'Trip_14_Seq_2': {
113
+ 'resolution': (720,
114
+ 1280),
115
  'yolov5n6_RGB_D2304-v1_9C': [
116
  [
117
  fo.Detection(
118
  label='FAR_AWAY_OBJECT',
119
+ bounding_box=[
120
+ 0.389404296875,
121
+ 0.306640625,
122
+ 0.005126953125,
123
+ 0.0146484375
124
+ ], # tp nr 2
125
  confidence=0.153076171875
126
  ),
127
  fo.Detection(
128
  label='FAR_AWAY_OBJECT',
129
+ bounding_box=[
130
+ 0.50390625,
131
+ 0.357666015625,
132
+ 0.0048828125,
133
+ 0.00976562
134
+ ], # fp nr 1
135
  confidence=0.153076171875
136
  ),
137
  fo.Detection(
138
  label='FAR_AWAY_OBJECT',
139
+ bounding_box=[
140
+ 0.455078125,
141
+ 0.31494140625,
142
+ 0.00390625,
143
+ 0.0087890625
144
+ ], # fp nr 2
145
  confidence=0.153076171875
146
  )
147
  ],
148
  [
149
  fo.Detection(
150
  label='FAR_AWAY_OBJECT',
151
+ bounding_box=[
152
+ 0.455078125,
153
+ 0.31494140625,
154
+ 0.00390625,
155
+ 0.0087890625
156
+ ], # tp nr 3
157
  confidence=0.153076171875
158
  )
159
  ],
160
  [
161
  fo.Detection(
162
  label='FAR_AWAY_OBJECT',
163
+ bounding_box=[
164
+ 0.455078125,
165
+ 0.31494140625,
166
+ 0.00390625,
167
+ 0.0087890625
168
+ ], # fp nr 3
169
  confidence=0.153076171875
170
  )
171
  ]
 
175
  [
176
  fo.Detection(
177
  label='FAR_AWAY_OBJECT',
178
+ bounding_box=[
179
+ 0.389404296875,
180
+ 0.306640625,
181
+ 0.005126953125,
182
+ 0.0146484375
183
+ ],
184
  )
185
  ],
186
  # frame nr 2
187
  [
188
  fo.Detection(
189
  label='FAR_AWAY_OBJECT',
190
+ bounding_box=[
191
+ 0.455078125,
192
+ 0.31494140625,
193
+ 0.00390625,
194
+ 0.0087890625
195
+ ],
196
  confidence=0.153076171875
197
  ),
198
  fo.Detection(
199
  label='FAR_AWAY_OBJECT',
200
+ bounding_box=[
201
+ 0.35107421875,
202
+ 0.274658203125,
203
+ 0.0048828125,
204
+ 0.009765625
205
+ ], # missed nr 1
206
  confidence=0.153076171875
207
  )
208
  ],
209
  # frame nr3
210
+ [],
 
211
  ]
212
+ }
213
+ },
214
+ "sequence_list": [
215
+ "Trip_14_Seq_1", 'Trip_14_Seq_2'
216
+ ]
217
  }
218
  ```
219
 
 
239
 
240
  ### Examples
241
  We can specify different area range levels, at which we would like to compute the metrics.
242
+ ```python
243
+ import evaluate
244
+ import logging
245
+ from seametrics.payload import PayloadProcessor
246
+
247
+ logging.basicConfig(level=logging.WARNING)
248
+
249
+ processor = PayloadProcessor(
250
+ dataset_name="SAILING_DATASET_QA",
251
+ gt_field="ground_truth_det",
252
+ models=["yolov5n6_RGB_D2304-v1_9C"],
253
+ sequence_list=["Trip_14_Seq_1"],
254
+ data_type="rgb",
 
 
 
 
 
255
  )
256
+
257
+ area_ranges_tuples = [
258
+ ("all", [0, 1e5**2]),
259
+ ("small", [0**2, 6**2]),
260
+ ("medium", [6**2, 12**2]),
261
+ ("large", [12**2, 1e5**2]),
262
+ ]
263
+
264
+ module = evaluate.load(
265
+ "SEA-AI/det-metrics",
266
+ iou_thresholds=[0.00001],
267
+ area_ranges_tuples=area_ranges_tuples,
268
+ )
269
+ module.add_from_payload(processor.payload)
270
+ module.compute()
271
+ ```
272
+
273
+ ```
274
+ {'all': {'range': [0, 10000000000.0],
275
+ 'iouThr': '0.00',
276
+ 'maxDets': 100,
277
+ 'tp': 89,
278
+ 'fp': 13,
279
+ 'fn': 15,
280
+ 'duplicates': 1,
281
+ 'precision': 0.8725490196078431,
282
+ 'recall': 0.8557692307692307,
283
+ 'f1': 0.8640776699029126,
284
+ 'support': 104,
285
+ 'fpi': 0,
286
+ 'nImgs': 22},
287
+ 'small': {'range': [0, 36],
288
+ 'iouThr': '0.00',
289
+ 'maxDets': 100,
290
+ 'tp': 12,
291
+ 'fp': 3,
292
+ 'fn': 8,
293
+ 'duplicates': 0,
294
+ 'precision': 0.8,
295
+ 'recall': 0.6,
296
+ 'f1': 0.6857142857142857,
297
+ 'support': 20,
298
+ 'fpi': 0,
299
+ 'nImgs': 22},
300
+ 'medium': {'range': [36, 144],
301
+ 'iouThr': '0.00',
302
+ 'maxDets': 100,
303
+ 'tp': 50,
304
+ 'fp': 10,
305
+ 'fn': 7,
306
+ 'duplicates': 1,
307
+ 'precision': 0.8333333333333334,
308
+ 'recall': 0.8771929824561403,
309
+ 'f1': 0.8547008547008548,
310
+ 'support': 57,
311
+ 'fpi': 0,
312
+ 'nImgs': 22},
313
+ 'large': {'range': [144, 10000000000.0],
314
+ 'iouThr': '0.00',
315
+ 'maxDets': 100,
316
+ 'tp': 27,
317
+ 'fp': 0,
318
+ 'fn': 0,
319
+ 'duplicates': 0,
320
+ 'precision': 1.0,
321
+ 'recall': 1.0,
322
+ 'f1': 1.0,
323
+ 'support': 27,
324
+ 'fpi': 0,
325
+ 'nImgs': 22}}
326
  ```
327
 
328
  ## Further References