Spaces:
Sleeping
Sleeping
Update README.md
Browse files
README.md
CHANGED
@@ -13,26 +13,33 @@ pinned: false
|
|
13 |
emoji: 🕵️
|
14 |
---
|
15 |
|
16 |
-
#
|
17 |
|
18 |
-
##
|
19 |
This metric can be used to calculate object detection metrics. It has an option to calculate the metrics at different levels of bounding box sizes, so that more insight is provided into the performance for different objects. It is adapted from the base of pycocotools metrics.
|
20 |
|
21 |
## How to Use
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
22 |
```
|
23 |
-
|
24 |
-
|
25 |
-
>>> b = fo_to_payload(
|
26 |
-
>>> dataset="SAILING_DATASET_QA",
|
27 |
-
>>> gt_field="ground_truth_det",
|
28 |
-
>>> models=["yolov5n6_RGB_D2304-v1_9C"],
|
29 |
-
>>> sequence_list=["Trip_14_Seq_1"],
|
30 |
-
>>> data_type="rgb"
|
31 |
-
>>> )
|
32 |
-
>>> module = evaluate.load("SEA-AI/det-metrics.py")
|
33 |
-
>>> module.add_batch(b)
|
34 |
-
>>> res = module.compute()
|
35 |
-
>>> print(res)
|
36 |
{'all': {'range': [0, 10000000000.0],
|
37 |
'iouThr': '0.00',
|
38 |
'maxDets': 100,
|
@@ -62,56 +69,103 @@ The format of payload should be as returned by function `fo_to_payload()` define
|
|
62 |
An example of how a payload might look like is:
|
63 |
|
64 |
```
|
65 |
-
|
66 |
'dataset': 'SAILING_DATASET_QA',
|
67 |
-
'models': ['yolov5n6_RGB_D2304-v1_9C'
|
|
|
68 |
'gt_field_name': 'ground_truth_det',
|
69 |
'sequences': {
|
70 |
-
# sequence 1,
|
|
|
71 |
'Trip_14_Seq_1': {
|
72 |
-
'resolution': (720,
|
73 |
-
|
|
|
|
|
74 |
label='FAR_AWAY_OBJECT',
|
75 |
-
bounding_box=[
|
|
|
|
|
|
|
|
|
|
|
76 |
confidence=0.153076171875
|
77 |
-
)
|
78 |
-
|
|
|
|
|
|
|
79 |
label='FAR_AWAY_OBJECT',
|
80 |
-
bounding_box=[
|
81 |
-
|
82 |
-
|
83 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
84 |
'Trip_14_Seq_2': {
|
85 |
-
'resolution': (720,
|
|
|
86 |
'yolov5n6_RGB_D2304-v1_9C': [
|
87 |
[
|
88 |
fo.Detection(
|
89 |
label='FAR_AWAY_OBJECT',
|
90 |
-
bounding_box=[
|
|
|
|
|
|
|
|
|
|
|
91 |
confidence=0.153076171875
|
92 |
),
|
93 |
fo.Detection(
|
94 |
label='FAR_AWAY_OBJECT',
|
95 |
-
bounding_box=[
|
|
|
|
|
|
|
|
|
|
|
96 |
confidence=0.153076171875
|
97 |
),
|
98 |
fo.Detection(
|
99 |
label='FAR_AWAY_OBJECT',
|
100 |
-
bounding_box=[
|
|
|
|
|
|
|
|
|
|
|
101 |
confidence=0.153076171875
|
102 |
)
|
103 |
],
|
104 |
[
|
105 |
fo.Detection(
|
106 |
label='FAR_AWAY_OBJECT',
|
107 |
-
bounding_box=[
|
|
|
|
|
|
|
|
|
|
|
108 |
confidence=0.153076171875
|
109 |
)
|
110 |
],
|
111 |
[
|
112 |
fo.Detection(
|
113 |
label='FAR_AWAY_OBJECT',
|
114 |
-
bounding_box=[
|
|
|
|
|
|
|
|
|
|
|
115 |
confidence=0.153076171875
|
116 |
)
|
117 |
]
|
@@ -121,29 +175,45 @@ test_payload = {
|
|
121 |
[
|
122 |
fo.Detection(
|
123 |
label='FAR_AWAY_OBJECT',
|
124 |
-
bounding_box=[
|
|
|
|
|
|
|
|
|
|
|
125 |
)
|
126 |
],
|
127 |
# frame nr 2
|
128 |
[
|
129 |
fo.Detection(
|
130 |
label='FAR_AWAY_OBJECT',
|
131 |
-
bounding_box=[
|
|
|
|
|
|
|
|
|
|
|
132 |
confidence=0.153076171875
|
133 |
),
|
134 |
fo.Detection(
|
135 |
label='FAR_AWAY_OBJECT',
|
136 |
-
bounding_box=[
|
|
|
|
|
|
|
|
|
|
|
137 |
confidence=0.153076171875
|
138 |
)
|
139 |
],
|
140 |
# frame nr3
|
141 |
-
[
|
142 |
-
],
|
143 |
]
|
144 |
-
|
145 |
-
|
146 |
-
"sequence_list": [
|
|
|
|
|
147 |
}
|
148 |
```
|
149 |
|
@@ -169,88 +239,90 @@ Each sub-dictionary holds performance metrics at the specific area range level:
|
|
169 |
|
170 |
### Examples
|
171 |
We can specify different area range levels, at which we would like to compute the metrics.
|
172 |
-
```
|
173 |
-
|
174 |
-
|
175 |
-
|
176 |
-
|
177 |
-
|
178 |
-
|
179 |
-
|
180 |
-
|
181 |
-
|
182 |
-
|
183 |
-
|
184 |
-
|
185 |
-
)
|
186 |
-
>>> module = evaluate.load(
|
187 |
-
"./detection_metric.py",
|
188 |
-
iou_threshold=0.9,
|
189 |
-
area_ranges_tuples=area_ranges_tuples
|
190 |
)
|
191 |
-
|
192 |
-
|
193 |
-
|
194 |
-
|
195 |
-
|
196 |
-
|
197 |
-
|
198 |
-
|
199 |
-
|
200 |
-
|
201 |
-
|
202 |
-
|
203 |
-
|
204 |
-
|
205 |
-
|
206 |
-
|
207 |
-
|
208 |
-
|
209 |
-
'
|
210 |
-
|
211 |
-
|
212 |
-
|
213 |
-
|
214 |
-
|
215 |
-
|
216 |
-
|
217 |
-
|
218 |
-
|
219 |
-
|
220 |
-
|
221 |
-
|
222 |
-
|
223 |
-
|
224 |
-
'
|
225 |
-
|
226 |
-
|
227 |
-
|
228 |
-
|
229 |
-
|
230 |
-
|
231 |
-
|
232 |
-
|
233 |
-
|
234 |
-
|
235 |
-
|
236 |
-
|
237 |
-
|
238 |
-
|
239 |
-
|
240 |
-
|
241 |
-
|
242 |
-
|
243 |
-
|
244 |
-
|
245 |
-
|
246 |
-
|
247 |
-
|
248 |
-
|
249 |
-
|
250 |
-
|
251 |
-
|
252 |
-
|
253 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
254 |
```
|
255 |
|
256 |
## Further References
|
|
|
13 |
emoji: 🕵️
|
14 |
---
|
15 |
|
16 |
+
# Detection Metrics
|
17 |
|
18 |
+
## Description
|
19 |
This metric can be used to calculate object detection metrics. It has an option to calculate the metrics at different levels of bounding box sizes, so that more insight is provided into the performance for different objects. It is adapted from the base of pycocotools metrics.
|
20 |
|
21 |
## How to Use
|
22 |
+
```python
|
23 |
+
import evaluate
|
24 |
+
import logging
|
25 |
+
from seametrics.payload import PayloadProcessor
|
26 |
+
|
27 |
+
logging.basicConfig(level=logging.WARNING)
|
28 |
+
|
29 |
+
processor = PayloadProcessor(
|
30 |
+
dataset_name="SAILING_DATASET_QA",
|
31 |
+
gt_field="ground_truth_det",
|
32 |
+
models=["yolov5n6_RGB_D2304-v1_9C"],
|
33 |
+
sequence_list=["Trip_14_Seq_1"],
|
34 |
+
data_type="rgb",
|
35 |
+
)
|
36 |
+
|
37 |
+
module = evaluate.load("SEA-AI/det-metrics")
|
38 |
+
module.add_from_payload(processor.payload)
|
39 |
+
module.compute()
|
40 |
```
|
41 |
+
|
42 |
+
```console
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
43 |
{'all': {'range': [0, 10000000000.0],
|
44 |
'iouThr': '0.00',
|
45 |
'maxDets': 100,
|
|
|
69 |
An example of how a payload might look like is:
|
70 |
|
71 |
```
|
72 |
+
{
|
73 |
'dataset': 'SAILING_DATASET_QA',
|
74 |
+
'models': ['yolov5n6_RGB_D2304-v1_9C'
|
75 |
+
],
|
76 |
'gt_field_name': 'ground_truth_det',
|
77 |
'sequences': {
|
78 |
+
# sequence 1,
|
79 |
+
1 frame with 1 pred and 1 gt
|
80 |
'Trip_14_Seq_1': {
|
81 |
+
'resolution': (720,
|
82 |
+
1280),
|
83 |
+
'yolov5n6_RGB_D2304-v1_9C': [
|
84 |
+
[fo.Detection(
|
85 |
label='FAR_AWAY_OBJECT',
|
86 |
+
bounding_box=[
|
87 |
+
0.35107421875,
|
88 |
+
0.274658203125,
|
89 |
+
0.0048828125,
|
90 |
+
0.009765625
|
91 |
+
], # tp nr1
|
92 |
confidence=0.153076171875
|
93 |
+
)
|
94 |
+
]
|
95 |
+
],
|
96 |
+
'ground_truth_det': [
|
97 |
+
[fo.Detection(
|
98 |
label='FAR_AWAY_OBJECT',
|
99 |
+
bounding_box=[
|
100 |
+
0.35107421875,
|
101 |
+
0.274658203125,
|
102 |
+
0.0048828125,
|
103 |
+
0.009765625
|
104 |
+
]
|
105 |
+
)
|
106 |
+
]
|
107 |
+
]
|
108 |
+
},
|
109 |
+
# sequence 2,
|
110 |
+
2 frames with frame 1: 2 pred,
|
111 |
+
1 gt; frame 2: 1 pred 1 gt
|
112 |
'Trip_14_Seq_2': {
|
113 |
+
'resolution': (720,
|
114 |
+
1280),
|
115 |
'yolov5n6_RGB_D2304-v1_9C': [
|
116 |
[
|
117 |
fo.Detection(
|
118 |
label='FAR_AWAY_OBJECT',
|
119 |
+
bounding_box=[
|
120 |
+
0.389404296875,
|
121 |
+
0.306640625,
|
122 |
+
0.005126953125,
|
123 |
+
0.0146484375
|
124 |
+
], # tp nr 2
|
125 |
confidence=0.153076171875
|
126 |
),
|
127 |
fo.Detection(
|
128 |
label='FAR_AWAY_OBJECT',
|
129 |
+
bounding_box=[
|
130 |
+
0.50390625,
|
131 |
+
0.357666015625,
|
132 |
+
0.0048828125,
|
133 |
+
0.00976562
|
134 |
+
], # fp nr 1
|
135 |
confidence=0.153076171875
|
136 |
),
|
137 |
fo.Detection(
|
138 |
label='FAR_AWAY_OBJECT',
|
139 |
+
bounding_box=[
|
140 |
+
0.455078125,
|
141 |
+
0.31494140625,
|
142 |
+
0.00390625,
|
143 |
+
0.0087890625
|
144 |
+
], # fp nr 2
|
145 |
confidence=0.153076171875
|
146 |
)
|
147 |
],
|
148 |
[
|
149 |
fo.Detection(
|
150 |
label='FAR_AWAY_OBJECT',
|
151 |
+
bounding_box=[
|
152 |
+
0.455078125,
|
153 |
+
0.31494140625,
|
154 |
+
0.00390625,
|
155 |
+
0.0087890625
|
156 |
+
], # tp nr 3
|
157 |
confidence=0.153076171875
|
158 |
)
|
159 |
],
|
160 |
[
|
161 |
fo.Detection(
|
162 |
label='FAR_AWAY_OBJECT',
|
163 |
+
bounding_box=[
|
164 |
+
0.455078125,
|
165 |
+
0.31494140625,
|
166 |
+
0.00390625,
|
167 |
+
0.0087890625
|
168 |
+
], # fp nr 3
|
169 |
confidence=0.153076171875
|
170 |
)
|
171 |
]
|
|
|
175 |
[
|
176 |
fo.Detection(
|
177 |
label='FAR_AWAY_OBJECT',
|
178 |
+
bounding_box=[
|
179 |
+
0.389404296875,
|
180 |
+
0.306640625,
|
181 |
+
0.005126953125,
|
182 |
+
0.0146484375
|
183 |
+
],
|
184 |
)
|
185 |
],
|
186 |
# frame nr 2
|
187 |
[
|
188 |
fo.Detection(
|
189 |
label='FAR_AWAY_OBJECT',
|
190 |
+
bounding_box=[
|
191 |
+
0.455078125,
|
192 |
+
0.31494140625,
|
193 |
+
0.00390625,
|
194 |
+
0.0087890625
|
195 |
+
],
|
196 |
confidence=0.153076171875
|
197 |
),
|
198 |
fo.Detection(
|
199 |
label='FAR_AWAY_OBJECT',
|
200 |
+
bounding_box=[
|
201 |
+
0.35107421875,
|
202 |
+
0.274658203125,
|
203 |
+
0.0048828125,
|
204 |
+
0.009765625
|
205 |
+
], # missed nr 1
|
206 |
confidence=0.153076171875
|
207 |
)
|
208 |
],
|
209 |
# frame nr3
|
210 |
+
[],
|
|
|
211 |
]
|
212 |
+
}
|
213 |
+
},
|
214 |
+
"sequence_list": [
|
215 |
+
"Trip_14_Seq_1", 'Trip_14_Seq_2'
|
216 |
+
]
|
217 |
}
|
218 |
```
|
219 |
|
|
|
239 |
|
240 |
### Examples
|
241 |
We can specify different area range levels, at which we would like to compute the metrics.
|
242 |
+
```python
|
243 |
+
import evaluate
|
244 |
+
import logging
|
245 |
+
from seametrics.payload import PayloadProcessor
|
246 |
+
|
247 |
+
logging.basicConfig(level=logging.WARNING)
|
248 |
+
|
249 |
+
processor = PayloadProcessor(
|
250 |
+
dataset_name="SAILING_DATASET_QA",
|
251 |
+
gt_field="ground_truth_det",
|
252 |
+
models=["yolov5n6_RGB_D2304-v1_9C"],
|
253 |
+
sequence_list=["Trip_14_Seq_1"],
|
254 |
+
data_type="rgb",
|
|
|
|
|
|
|
|
|
|
|
255 |
)
|
256 |
+
|
257 |
+
area_ranges_tuples = [
|
258 |
+
("all", [0, 1e5**2]),
|
259 |
+
("small", [0**2, 6**2]),
|
260 |
+
("medium", [6**2, 12**2]),
|
261 |
+
("large", [12**2, 1e5**2]),
|
262 |
+
]
|
263 |
+
|
264 |
+
module = evaluate.load(
|
265 |
+
"SEA-AI/det-metrics",
|
266 |
+
iou_thresholds=[0.00001],
|
267 |
+
area_ranges_tuples=area_ranges_tuples,
|
268 |
+
)
|
269 |
+
module.add_from_payload(processor.payload)
|
270 |
+
module.compute()
|
271 |
+
```
|
272 |
+
|
273 |
+
```
|
274 |
+
{'all': {'range': [0, 10000000000.0],
|
275 |
+
'iouThr': '0.00',
|
276 |
+
'maxDets': 100,
|
277 |
+
'tp': 89,
|
278 |
+
'fp': 13,
|
279 |
+
'fn': 15,
|
280 |
+
'duplicates': 1,
|
281 |
+
'precision': 0.8725490196078431,
|
282 |
+
'recall': 0.8557692307692307,
|
283 |
+
'f1': 0.8640776699029126,
|
284 |
+
'support': 104,
|
285 |
+
'fpi': 0,
|
286 |
+
'nImgs': 22},
|
287 |
+
'small': {'range': [0, 36],
|
288 |
+
'iouThr': '0.00',
|
289 |
+
'maxDets': 100,
|
290 |
+
'tp': 12,
|
291 |
+
'fp': 3,
|
292 |
+
'fn': 8,
|
293 |
+
'duplicates': 0,
|
294 |
+
'precision': 0.8,
|
295 |
+
'recall': 0.6,
|
296 |
+
'f1': 0.6857142857142857,
|
297 |
+
'support': 20,
|
298 |
+
'fpi': 0,
|
299 |
+
'nImgs': 22},
|
300 |
+
'medium': {'range': [36, 144],
|
301 |
+
'iouThr': '0.00',
|
302 |
+
'maxDets': 100,
|
303 |
+
'tp': 50,
|
304 |
+
'fp': 10,
|
305 |
+
'fn': 7,
|
306 |
+
'duplicates': 1,
|
307 |
+
'precision': 0.8333333333333334,
|
308 |
+
'recall': 0.8771929824561403,
|
309 |
+
'f1': 0.8547008547008548,
|
310 |
+
'support': 57,
|
311 |
+
'fpi': 0,
|
312 |
+
'nImgs': 22},
|
313 |
+
'large': {'range': [144, 10000000000.0],
|
314 |
+
'iouThr': '0.00',
|
315 |
+
'maxDets': 100,
|
316 |
+
'tp': 27,
|
317 |
+
'fp': 0,
|
318 |
+
'fn': 0,
|
319 |
+
'duplicates': 0,
|
320 |
+
'precision': 1.0,
|
321 |
+
'recall': 1.0,
|
322 |
+
'f1': 1.0,
|
323 |
+
'support': 27,
|
324 |
+
'fpi': 0,
|
325 |
+
'nImgs': 22}}
|
326 |
```
|
327 |
|
328 |
## Further References
|