tomaarsen HF staff commited on
Commit
2af80f5
1 Parent(s): 278472a

Upload all_results.json with huggingface_hub

Browse files
Files changed (1) hide show
  1. all_results.json +404 -8
all_results.json CHANGED
@@ -1,11 +1,407 @@
1
  {
2
  "epoch": 3.0,
3
- "test_loss": 0.023421578109264374,
4
- "test_overall_accuracy": 0.9225603373768171,
5
- "test_overall_f1": 0.6859873927870208,
6
- "test_overall_precision": 0.6847278953665883,
7
- "test_overall_recall": 0.6872515322179891,
8
- "test_runtime": 301.9559,
9
- "test_samples_per_second": 153.923,
10
- "test_steps_per_second": 4.812
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
11
  }
 
1
  {
2
  "epoch": 3.0,
3
+ "test_art-broadcastprogram": {
4
+ "f1": 0.588334742180896,
5
+ "number": 603,
6
+ "precision": 0.6,
7
+ "recall": 0.5771144278606966
8
+ },
9
+ "test_art-film": {
10
+ "f1": 0.7418712674187126,
11
+ "number": 750,
12
+ "precision": 0.7384412153236459,
13
+ "recall": 0.7453333333333333
14
+ },
15
+ "test_art-music": {
16
+ "f1": 0.755849440488301,
17
+ "number": 1029,
18
+ "precision": 0.7929562433297759,
19
+ "recall": 0.7220602526724975
20
+ },
21
+ "test_art-other": {
22
+ "f1": 0.3446088794926005,
23
+ "number": 562,
24
+ "precision": 0.4244791666666667,
25
+ "recall": 0.2900355871886121
26
+ },
27
+ "test_art-painting": {
28
+ "f1": 0.4646464646464647,
29
+ "number": 57,
30
+ "precision": 0.5476190476190477,
31
+ "recall": 0.40350877192982454
32
+ },
33
+ "test_art-writtenart": {
34
+ "f1": 0.6469085334695963,
35
+ "number": 968,
36
+ "precision": 0.6400404448938322,
37
+ "recall": 0.6539256198347108
38
+ },
39
+ "test_building-airport": {
40
+ "f1": 0.8230452674897119,
41
+ "number": 364,
42
+ "precision": 0.821917808219178,
43
+ "recall": 0.8241758241758241
44
+ },
45
+ "test_building-hospital": {
46
+ "f1": 0.7525510204081632,
47
+ "number": 364,
48
+ "precision": 0.7023809523809523,
49
+ "recall": 0.8104395604395604
50
+ },
51
+ "test_building-hotel": {
52
+ "f1": 0.7228464419475654,
53
+ "number": 265,
54
+ "precision": 0.7174721189591078,
55
+ "recall": 0.7283018867924528
56
+ },
57
+ "test_building-library": {
58
+ "f1": 0.7347517730496453,
59
+ "number": 355,
60
+ "precision": 0.74,
61
+ "recall": 0.7295774647887324
62
+ },
63
+ "test_building-other": {
64
+ "f1": 0.5868801249511909,
65
+ "number": 2543,
66
+ "precision": 0.58278402481582,
67
+ "recall": 0.5910342115611482
68
+ },
69
+ "test_building-restaurant": {
70
+ "f1": 0.5365853658536586,
71
+ "number": 232,
72
+ "precision": 0.5525114155251142,
73
+ "recall": 0.521551724137931
74
+ },
75
+ "test_building-sportsfacility": {
76
+ "f1": 0.6931937172774869,
77
+ "number": 420,
78
+ "precision": 0.6186915887850467,
79
+ "recall": 0.7880952380952381
80
+ },
81
+ "test_building-theater": {
82
+ "f1": 0.733615221987315,
83
+ "number": 455,
84
+ "precision": 0.7067209775967414,
85
+ "recall": 0.7626373626373626
86
+ },
87
+ "test_event-attack/battle/war/militaryconflict": {
88
+ "f1": 0.7505720823798627,
89
+ "number": 1098,
90
+ "precision": 0.7543698252069917,
91
+ "recall": 0.7468123861566485
92
+ },
93
+ "test_event-disaster": {
94
+ "f1": 0.5583756345177665,
95
+ "number": 207,
96
+ "precision": 0.5882352941176471,
97
+ "recall": 0.5314009661835749
98
+ },
99
+ "test_event-election": {
100
+ "f1": 0.2877697841726619,
101
+ "number": 182,
102
+ "precision": 0.4166666666666667,
103
+ "recall": 0.21978021978021978
104
+ },
105
+ "test_event-other": {
106
+ "f1": 0.44303797468354433,
107
+ "number": 866,
108
+ "precision": 0.49019607843137253,
109
+ "recall": 0.40415704387990764
110
+ },
111
+ "test_event-protest": {
112
+ "f1": 0.3186440677966102,
113
+ "number": 166,
114
+ "precision": 0.3643410852713178,
115
+ "recall": 0.28313253012048195
116
+ },
117
+ "test_event-sportsevent": {
118
+ "f1": 0.6181588105030055,
119
+ "number": 1566,
120
+ "precision": 0.6125391849529781,
121
+ "recall": 0.623882503192848
122
+ },
123
+ "test_location-GPE": {
124
+ "f1": 0.8321414950419527,
125
+ "number": 20409,
126
+ "precision": 0.810239985145987,
127
+ "recall": 0.855259934342692
128
+ },
129
+ "test_location-bodiesofwater": {
130
+ "f1": 0.7282258064516128,
131
+ "number": 1169,
132
+ "precision": 0.6887871853546911,
133
+ "recall": 0.7724550898203593
134
+ },
135
+ "test_location-island": {
136
+ "f1": 0.6836483155299917,
137
+ "number": 646,
138
+ "precision": 0.7285464098073555,
139
+ "recall": 0.6439628482972136
140
+ },
141
+ "test_location-mountain": {
142
+ "f1": 0.7226647356987691,
143
+ "number": 681,
144
+ "precision": 0.7128571428571429,
145
+ "recall": 0.7327459618208517
146
+ },
147
+ "test_location-other": {
148
+ "f1": 0.32306363374604086,
149
+ "number": 2191,
150
+ "precision": 0.437597503900156,
151
+ "recall": 0.25604746691008673
152
+ },
153
+ "test_location-park": {
154
+ "f1": 0.6945054945054945,
155
+ "number": 458,
156
+ "precision": 0.6991150442477876,
157
+ "recall": 0.6899563318777293
158
+ },
159
+ "test_location-road/railway/highway/transit": {
160
+ "f1": 0.70939925265881,
161
+ "number": 1700,
162
+ "precision": 0.6936481169196178,
163
+ "recall": 0.7258823529411764
164
+ },
165
+ "test_loss": 0.023223718628287315,
166
+ "test_organization-company": {
167
+ "f1": 0.6916655965069989,
168
+ "number": 3896,
169
+ "precision": 0.6921099974299666,
170
+ "recall": 0.6912217659137577
171
+ },
172
+ "test_organization-education": {
173
+ "f1": 0.7900167986561075,
174
+ "number": 2067,
175
+ "precision": 0.7838095238095238,
176
+ "recall": 0.7963231736816643
177
+ },
178
+ "test_organization-government/governmentagency": {
179
+ "f1": 0.48308475809385226,
180
+ "number": 1511,
181
+ "precision": 0.5363489499192245,
182
+ "recall": 0.43944407677035074
183
+ },
184
+ "test_organization-media/newspaper": {
185
+ "f1": 0.645060523233112,
186
+ "number": 1232,
187
+ "precision": 0.6215199398043642,
188
+ "recall": 0.6704545454545454
189
+ },
190
+ "test_organization-other": {
191
+ "f1": 0.5444166963967176,
192
+ "number": 4439,
193
+ "precision": 0.5765743073047859,
194
+ "recall": 0.5156566794323045
195
+ },
196
+ "test_organization-politicalparty": {
197
+ "f1": 0.6859173700577521,
198
+ "number": 1054,
199
+ "precision": 0.6449456975772765,
200
+ "recall": 0.7324478178368121
201
+ },
202
+ "test_organization-religion": {
203
+ "f1": 0.5560109289617486,
204
+ "number": 672,
205
+ "precision": 0.5138888888888888,
206
+ "recall": 0.6056547619047619
207
+ },
208
+ "test_organization-showorganization": {
209
+ "f1": 0.5638366817887231,
210
+ "number": 769,
211
+ "precision": 0.562015503875969,
212
+ "recall": 0.5656697009102731
213
+ },
214
+ "test_organization-sportsleague": {
215
+ "f1": 0.6443327749860414,
216
+ "number": 882,
217
+ "precision": 0.6347634763476347,
218
+ "recall": 0.6541950113378685
219
+ },
220
+ "test_organization-sportsteam": {
221
+ "f1": 0.7345897133882999,
222
+ "number": 2473,
223
+ "precision": 0.7138496756962991,
224
+ "recall": 0.7565709664375253
225
+ },
226
+ "test_other-astronomything": {
227
+ "f1": 0.752,
228
+ "number": 678,
229
+ "precision": 0.7417503586800573,
230
+ "recall": 0.7625368731563422
231
+ },
232
+ "test_other-award": {
233
+ "f1": 0.7002262443438915,
234
+ "number": 919,
235
+ "precision": 0.7290930506478209,
236
+ "recall": 0.6735582154515778
237
+ },
238
+ "test_other-biologything": {
239
+ "f1": 0.6497237569060773,
240
+ "number": 1874,
241
+ "precision": 0.6735395189003437,
242
+ "recall": 0.6275346851654215
243
+ },
244
+ "test_other-chemicalthing": {
245
+ "f1": 0.583206106870229,
246
+ "number": 1014,
247
+ "precision": 0.6025236593059937,
248
+ "recall": 0.5650887573964497
249
+ },
250
+ "test_other-currency": {
251
+ "f1": 0.7546322290847838,
252
+ "number": 799,
253
+ "precision": 0.6843177189409368,
254
+ "recall": 0.8410513141426783
255
+ },
256
+ "test_other-disease": {
257
+ "f1": 0.6662484316185696,
258
+ "number": 749,
259
+ "precision": 0.6284023668639053,
260
+ "recall": 0.7089452603471295
261
+ },
262
+ "test_other-educationaldegree": {
263
+ "f1": 0.5943012211668929,
264
+ "number": 363,
265
+ "precision": 0.5855614973262032,
266
+ "recall": 0.6033057851239669
267
+ },
268
+ "test_other-god": {
269
+ "f1": 0.6474926253687314,
270
+ "number": 635,
271
+ "precision": 0.608876560332871,
272
+ "recall": 0.6913385826771653
273
+ },
274
+ "test_other-language": {
275
+ "f1": 0.7224563515954245,
276
+ "number": 753,
277
+ "precision": 0.6607929515418502,
278
+ "recall": 0.796812749003984
279
+ },
280
+ "test_other-law": {
281
+ "f1": 0.6958290946083417,
282
+ "number": 472,
283
+ "precision": 0.6692759295499021,
284
+ "recall": 0.7245762711864406
285
+ },
286
+ "test_other-livingthing": {
287
+ "f1": 0.6041909196740396,
288
+ "number": 863,
289
+ "precision": 0.6070175438596491,
290
+ "recall": 0.6013904982618772
291
+ },
292
+ "test_other-medical": {
293
+ "f1": 0.5087719298245613,
294
+ "number": 397,
295
+ "precision": 0.5062344139650873,
296
+ "recall": 0.5113350125944585
297
+ },
298
+ "test_overall_accuracy": 0.9227814069042201,
299
+ "test_overall_f1": 0.6884821229658107,
300
+ "test_overall_precision": 0.6890426017339362,
301
+ "test_overall_recall": 0.6879225552622042,
302
+ "test_person-actor": {
303
+ "f1": 0.7961965134706814,
304
+ "number": 1637,
305
+ "precision": 0.8274044795783926,
306
+ "recall": 0.7672571777642028
307
+ },
308
+ "test_person-artist/author": {
309
+ "f1": 0.7017641339074872,
310
+ "number": 3463,
311
+ "precision": 0.6761241970021413,
312
+ "recall": 0.7294253537395322
313
+ },
314
+ "test_person-athlete": {
315
+ "f1": 0.823791566678094,
316
+ "number": 2879,
317
+ "precision": 0.8131979695431472,
318
+ "recall": 0.8346648141715873
319
+ },
320
+ "test_person-director": {
321
+ "f1": 0.6786355475763016,
322
+ "number": 554,
323
+ "precision": 0.675,
324
+ "recall": 0.6823104693140795
325
+ },
326
+ "test_person-other": {
327
+ "f1": 0.6429391504018369,
328
+ "number": 8767,
329
+ "precision": 0.6471743903848376,
330
+ "recall": 0.6387589825481921
331
+ },
332
+ "test_person-politician": {
333
+ "f1": 0.6607080266386259,
334
+ "number": 2859,
335
+ "precision": 0.6621004566210046,
336
+ "recall": 0.6593214410633088
337
+ },
338
+ "test_person-scholar": {
339
+ "f1": 0.5092402464065708,
340
+ "number": 743,
341
+ "precision": 0.5181058495821727,
342
+ "recall": 0.5006729475100942
343
+ },
344
+ "test_person-soldier": {
345
+ "f1": 0.49331352154531943,
346
+ "number": 647,
347
+ "precision": 0.474964234620887,
348
+ "recall": 0.5131375579598145
349
+ },
350
+ "test_product-airplane": {
351
+ "f1": 0.646415552855407,
352
+ "number": 792,
353
+ "precision": 0.6229508196721312,
354
+ "recall": 0.6717171717171717
355
+ },
356
+ "test_product-car": {
357
+ "f1": 0.7234042553191489,
358
+ "number": 687,
359
+ "precision": 0.7292899408284024,
360
+ "recall": 0.7176128093158661
361
+ },
362
+ "test_product-food": {
363
+ "f1": 0.5456760048721071,
364
+ "number": 432,
365
+ "precision": 0.5758354755784062,
366
+ "recall": 0.5185185185185185
367
+ },
368
+ "test_product-game": {
369
+ "f1": 0.6887966804979253,
370
+ "number": 493,
371
+ "precision": 0.7048832271762208,
372
+ "recall": 0.6734279918864098
373
+ },
374
+ "test_product-other": {
375
+ "f1": 0.4668094218415418,
376
+ "number": 1608,
377
+ "precision": 0.5477386934673367,
378
+ "recall": 0.40671641791044777
379
+ },
380
+ "test_product-ship": {
381
+ "f1": 0.6319895968790638,
382
+ "number": 380,
383
+ "precision": 0.6246786632390745,
384
+ "recall": 0.6394736842105263
385
+ },
386
+ "test_product-software": {
387
+ "f1": 0.6626240352811467,
388
+ "number": 889,
389
+ "precision": 0.6497297297297298,
390
+ "recall": 0.6760404949381328
391
+ },
392
+ "test_product-train": {
393
+ "f1": 0.5616224648985959,
394
+ "number": 314,
395
+ "precision": 0.5504587155963303,
396
+ "recall": 0.5732484076433121
397
+ },
398
+ "test_product-weapon": {
399
+ "f1": 0.5299910474485229,
400
+ "number": 624,
401
+ "precision": 0.6004056795131846,
402
+ "recall": 0.47435897435897434
403
+ },
404
+ "test_runtime": 632.5801,
405
+ "test_samples_per_second": 73.524,
406
+ "test_steps_per_second": 4.595
407
  }