tomaarsen HF staff commited on
Commit
c4cce95
1 Parent(s): 9b77503

Upload all_results.json with huggingface_hub

Browse files
Files changed (1) hide show
  1. all_results.json +407 -0
all_results.json ADDED
@@ -0,0 +1,407 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 3.0,
3
+ "test_art-broadcastprogram": {
4
+ "f1": 0.6051364365971107,
5
+ "number": 603,
6
+ "precision": 0.5863141524105754,
7
+ "recall": 0.6252072968490879
8
+ },
9
+ "test_art-film": {
10
+ "f1": 0.7647457627118643,
11
+ "number": 750,
12
+ "precision": 0.7779310344827586,
13
+ "recall": 0.752
14
+ },
15
+ "test_art-music": {
16
+ "f1": 0.7786106946526736,
17
+ "number": 1029,
18
+ "precision": 0.801440329218107,
19
+ "recall": 0.7570456754130224
20
+ },
21
+ "test_art-other": {
22
+ "f1": 0.3649193548387097,
23
+ "number": 562,
24
+ "precision": 0.42093023255813955,
25
+ "recall": 0.3220640569395018
26
+ },
27
+ "test_art-painting": {
28
+ "f1": 0.628099173553719,
29
+ "number": 57,
30
+ "precision": 0.59375,
31
+ "recall": 0.6666666666666666
32
+ },
33
+ "test_art-writtenart": {
34
+ "f1": 0.662753468516542,
35
+ "number": 968,
36
+ "precision": 0.6854304635761589,
37
+ "recall": 0.6415289256198347
38
+ },
39
+ "test_building-airport": {
40
+ "f1": 0.821917808219178,
41
+ "number": 364,
42
+ "precision": 0.819672131147541,
43
+ "recall": 0.8241758241758241
44
+ },
45
+ "test_building-hospital": {
46
+ "f1": 0.767052767052767,
47
+ "number": 364,
48
+ "precision": 0.7215496368038741,
49
+ "recall": 0.8186813186813187
50
+ },
51
+ "test_building-hotel": {
52
+ "f1": 0.7065637065637066,
53
+ "number": 265,
54
+ "precision": 0.7233201581027668,
55
+ "recall": 0.690566037735849
56
+ },
57
+ "test_building-library": {
58
+ "f1": 0.7424460431654676,
59
+ "number": 355,
60
+ "precision": 0.7588235294117647,
61
+ "recall": 0.7267605633802817
62
+ },
63
+ "test_building-other": {
64
+ "f1": 0.58483896307934,
65
+ "number": 2543,
66
+ "precision": 0.5841506473126716,
67
+ "recall": 0.5855289028706252
68
+ },
69
+ "test_building-restaurant": {
70
+ "f1": 0.5195402298850574,
71
+ "number": 232,
72
+ "precision": 0.5566502463054187,
73
+ "recall": 0.4870689655172414
74
+ },
75
+ "test_building-sportsfacility": {
76
+ "f1": 0.7052401746724892,
77
+ "number": 420,
78
+ "precision": 0.6512096774193549,
79
+ "recall": 0.7690476190476191
80
+ },
81
+ "test_building-theater": {
82
+ "f1": 0.7245762711864406,
83
+ "number": 455,
84
+ "precision": 0.6993865030674846,
85
+ "recall": 0.7516483516483516
86
+ },
87
+ "test_event-attack/battle/war/militaryconflict": {
88
+ "f1": 0.755868544600939,
89
+ "number": 1098,
90
+ "precision": 0.7800387596899225,
91
+ "recall": 0.7331511839708561
92
+ },
93
+ "test_event-disaster": {
94
+ "f1": 0.5505050505050505,
95
+ "number": 207,
96
+ "precision": 0.5767195767195767,
97
+ "recall": 0.5265700483091788
98
+ },
99
+ "test_event-election": {
100
+ "f1": 0.2096069868995633,
101
+ "number": 182,
102
+ "precision": 0.5106382978723404,
103
+ "recall": 0.13186813186813187
104
+ },
105
+ "test_event-other": {
106
+ "f1": 0.4504391468005019,
107
+ "number": 866,
108
+ "precision": 0.49313186813186816,
109
+ "recall": 0.41454965357967666
110
+ },
111
+ "test_event-protest": {
112
+ "f1": 0.39999999999999997,
113
+ "number": 166,
114
+ "precision": 0.3711340206185567,
115
+ "recall": 0.43373493975903615
116
+ },
117
+ "test_event-sportsevent": {
118
+ "f1": 0.6155810983397191,
119
+ "number": 1566,
120
+ "precision": 0.6155810983397191,
121
+ "recall": 0.6155810983397191
122
+ },
123
+ "test_location-GPE": {
124
+ "f1": 0.8338255420298207,
125
+ "number": 20409,
126
+ "precision": 0.8175141242937853,
127
+ "recall": 0.8508011171541967
128
+ },
129
+ "test_location-bodiesofwater": {
130
+ "f1": 0.7456066945606695,
131
+ "number": 1169,
132
+ "precision": 0.7297297297297297,
133
+ "recall": 0.7621899059024807
134
+ },
135
+ "test_location-island": {
136
+ "f1": 0.6995153473344103,
137
+ "number": 646,
138
+ "precision": 0.731418918918919,
139
+ "recall": 0.6702786377708978
140
+ },
141
+ "test_location-mountain": {
142
+ "f1": 0.7408513816280807,
143
+ "number": 681,
144
+ "precision": 0.7537993920972644,
145
+ "recall": 0.7283406754772394
146
+ },
147
+ "test_location-other": {
148
+ "f1": 0.3585464333781965,
149
+ "number": 2191,
150
+ "precision": 0.43700787401574803,
151
+ "recall": 0.3039707895937928
152
+ },
153
+ "test_location-park": {
154
+ "f1": 0.6969026548672567,
155
+ "number": 458,
156
+ "precision": 0.7062780269058296,
157
+ "recall": 0.6877729257641921
158
+ },
159
+ "test_location-road/railway/highway/transit": {
160
+ "f1": 0.7174418604651162,
161
+ "number": 1700,
162
+ "precision": 0.7091954022988506,
163
+ "recall": 0.7258823529411764
164
+ },
165
+ "test_loss": 0.022720418870449066,
166
+ "test_organization-company": {
167
+ "f1": 0.6927016645326505,
168
+ "number": 3896,
169
+ "precision": 0.6911088400613183,
170
+ "recall": 0.6943018480492813
171
+ },
172
+ "test_organization-education": {
173
+ "f1": 0.7885167464114833,
174
+ "number": 2067,
175
+ "precision": 0.7799337434926644,
176
+ "recall": 0.7972907595549105
177
+ },
178
+ "test_organization-government/governmentagency": {
179
+ "f1": 0.4941520467836257,
180
+ "number": 1511,
181
+ "precision": 0.5518367346938775,
182
+ "recall": 0.44738583719391134
183
+ },
184
+ "test_organization-media/newspaper": {
185
+ "f1": 0.6505271378367826,
186
+ "number": 1232,
187
+ "precision": 0.6267870579382995,
188
+ "recall": 0.6761363636363636
189
+ },
190
+ "test_organization-other": {
191
+ "f1": 0.5563115908024402,
192
+ "number": 4439,
193
+ "precision": 0.5804161566707466,
194
+ "recall": 0.5341293084027934
195
+ },
196
+ "test_organization-politicalparty": {
197
+ "f1": 0.6949458483754513,
198
+ "number": 1054,
199
+ "precision": 0.6626506024096386,
200
+ "recall": 0.7305502846299811
201
+ },
202
+ "test_organization-religion": {
203
+ "f1": 0.5933756166314307,
204
+ "number": 672,
205
+ "precision": 0.5635876840696118,
206
+ "recall": 0.6264880952380952
207
+ },
208
+ "test_organization-showorganization": {
209
+ "f1": 0.6054333764553688,
210
+ "number": 769,
211
+ "precision": 0.6023166023166023,
212
+ "recall": 0.6085825747724317
213
+ },
214
+ "test_organization-sportsleague": {
215
+ "f1": 0.6544831524842947,
216
+ "number": 882,
217
+ "precision": 0.6593785960874569,
218
+ "recall": 0.6496598639455783
219
+ },
220
+ "test_organization-sportsteam": {
221
+ "f1": 0.7517758484609314,
222
+ "number": 2473,
223
+ "precision": 0.7341040462427746,
224
+ "recall": 0.770319450060655
225
+ },
226
+ "test_other-astronomything": {
227
+ "f1": 0.8040057224606582,
228
+ "number": 678,
229
+ "precision": 0.7805555555555556,
230
+ "recall": 0.8289085545722714
231
+ },
232
+ "test_other-award": {
233
+ "f1": 0.6956521739130435,
234
+ "number": 919,
235
+ "precision": 0.7230046948356808,
236
+ "recall": 0.6702937976060935
237
+ },
238
+ "test_other-biologything": {
239
+ "f1": 0.6544157981349424,
240
+ "number": 1874,
241
+ "precision": 0.6732505643340858,
242
+ "recall": 0.6366061899679829
243
+ },
244
+ "test_other-chemicalthing": {
245
+ "f1": 0.5899352267065271,
246
+ "number": 1014,
247
+ "precision": 0.5961732124874118,
248
+ "recall": 0.5838264299802761
249
+ },
250
+ "test_other-currency": {
251
+ "f1": 0.746268656716418,
252
+ "number": 799,
253
+ "precision": 0.7134703196347032,
254
+ "recall": 0.7822277847309136
255
+ },
256
+ "test_other-disease": {
257
+ "f1": 0.6637390213299874,
258
+ "number": 749,
259
+ "precision": 0.6260355029585799,
260
+ "recall": 0.7062750333778371
261
+ },
262
+ "test_other-educationaldegree": {
263
+ "f1": 0.6016483516483516,
264
+ "number": 363,
265
+ "precision": 0.6,
266
+ "recall": 0.6033057851239669
267
+ },
268
+ "test_other-god": {
269
+ "f1": 0.7084639498432602,
270
+ "number": 635,
271
+ "precision": 0.7051482059282371,
272
+ "recall": 0.7118110236220473
273
+ },
274
+ "test_other-language": {
275
+ "f1": 0.736648250460405,
276
+ "number": 753,
277
+ "precision": 0.684931506849315,
278
+ "recall": 0.796812749003984
279
+ },
280
+ "test_other-law": {
281
+ "f1": 0.6828752642706131,
282
+ "number": 472,
283
+ "precision": 0.6814345991561181,
284
+ "recall": 0.684322033898305
285
+ },
286
+ "test_other-livingthing": {
287
+ "f1": 0.6191536748329621,
288
+ "number": 863,
289
+ "precision": 0.5959271168274384,
290
+ "recall": 0.6442641946697567
291
+ },
292
+ "test_other-medical": {
293
+ "f1": 0.5019710906701709,
294
+ "number": 397,
295
+ "precision": 0.5247252747252747,
296
+ "recall": 0.4811083123425693
297
+ },
298
+ "test_overall_accuracy": 0.9248186428918111,
299
+ "test_overall_f1": 0.7006507253689264,
300
+ "test_overall_precision": 0.7040676584045078,
301
+ "test_overall_recall": 0.6972667978051558,
302
+ "test_person-actor": {
303
+ "f1": 0.8146295717411691,
304
+ "number": 1637,
305
+ "precision": 0.8341869398207427,
306
+ "recall": 0.7959682345754429
307
+ },
308
+ "test_person-artist/author": {
309
+ "f1": 0.7260753818130867,
310
+ "number": 3463,
311
+ "precision": 0.7052259118127382,
312
+ "recall": 0.74819520646838
313
+ },
314
+ "test_person-athlete": {
315
+ "f1": 0.8462332301341589,
316
+ "number": 2884,
317
+ "precision": 0.8395904436860068,
318
+ "recall": 0.8529819694868238
319
+ },
320
+ "test_person-director": {
321
+ "f1": 0.7289048473967685,
322
+ "number": 554,
323
+ "precision": 0.725,
324
+ "recall": 0.7328519855595668
325
+ },
326
+ "test_person-other": {
327
+ "f1": 0.6767326159898183,
328
+ "number": 8767,
329
+ "precision": 0.6865829322690457,
330
+ "recall": 0.6671609444507813
331
+ },
332
+ "test_person-politician": {
333
+ "f1": 0.6835310537334263,
334
+ "number": 2859,
335
+ "precision": 0.6818656456665506,
336
+ "recall": 0.6852046169989506
337
+ },
338
+ "test_person-scholar": {
339
+ "f1": 0.5197740112994349,
340
+ "number": 743,
341
+ "precision": 0.5468053491827637,
342
+ "recall": 0.4952893674293405
343
+ },
344
+ "test_person-soldier": {
345
+ "f1": 0.5496987951807228,
346
+ "number": 647,
347
+ "precision": 0.5359765051395007,
348
+ "recall": 0.5641421947449768
349
+ },
350
+ "test_product-airplane": {
351
+ "f1": 0.6776859504132232,
352
+ "number": 792,
353
+ "precision": 0.6824583866837388,
354
+ "recall": 0.672979797979798
355
+ },
356
+ "test_product-car": {
357
+ "f1": 0.7109144542772862,
358
+ "number": 687,
359
+ "precision": 0.7204783258594918,
360
+ "recall": 0.7016011644832606
361
+ },
362
+ "test_product-food": {
363
+ "f1": 0.5696821515892421,
364
+ "number": 432,
365
+ "precision": 0.6036269430051814,
366
+ "recall": 0.5393518518518519
367
+ },
368
+ "test_product-game": {
369
+ "f1": 0.728249194414608,
370
+ "number": 493,
371
+ "precision": 0.773972602739726,
372
+ "recall": 0.6876267748478702
373
+ },
374
+ "test_product-other": {
375
+ "f1": 0.4614848379226211,
376
+ "number": 1608,
377
+ "precision": 0.5249801744647106,
378
+ "recall": 0.4116915422885572
379
+ },
380
+ "test_product-ship": {
381
+ "f1": 0.6772068511198946,
382
+ "number": 380,
383
+ "precision": 0.6781002638522428,
384
+ "recall": 0.6763157894736842
385
+ },
386
+ "test_product-software": {
387
+ "f1": 0.6651558073654391,
388
+ "number": 889,
389
+ "precision": 0.6700913242009132,
390
+ "recall": 0.6602924634420697
391
+ },
392
+ "test_product-train": {
393
+ "f1": 0.5984251968503936,
394
+ "number": 314,
395
+ "precision": 0.5919003115264797,
396
+ "recall": 0.6050955414012739
397
+ },
398
+ "test_product-weapon": {
399
+ "f1": 0.5921397379912663,
400
+ "number": 624,
401
+ "precision": 0.6506717850287908,
402
+ "recall": 0.5432692307692307
403
+ },
404
+ "test_runtime": 1023.4462,
405
+ "test_samples_per_second": 45.081,
406
+ "test_steps_per_second": 2.818
407
+ }