|
{ |
|
"epoch": 3.0, |
|
"test_art-broadcastprogram": { |
|
"f1": 0.588334742180896, |
|
"number": 603, |
|
"precision": 0.6, |
|
"recall": 0.5771144278606966 |
|
}, |
|
"test_art-film": { |
|
"f1": 0.7418712674187126, |
|
"number": 750, |
|
"precision": 0.7384412153236459, |
|
"recall": 0.7453333333333333 |
|
}, |
|
"test_art-music": { |
|
"f1": 0.755849440488301, |
|
"number": 1029, |
|
"precision": 0.7929562433297759, |
|
"recall": 0.7220602526724975 |
|
}, |
|
"test_art-other": { |
|
"f1": 0.3446088794926005, |
|
"number": 562, |
|
"precision": 0.4244791666666667, |
|
"recall": 0.2900355871886121 |
|
}, |
|
"test_art-painting": { |
|
"f1": 0.4646464646464647, |
|
"number": 57, |
|
"precision": 0.5476190476190477, |
|
"recall": 0.40350877192982454 |
|
}, |
|
"test_art-writtenart": { |
|
"f1": 0.6469085334695963, |
|
"number": 968, |
|
"precision": 0.6400404448938322, |
|
"recall": 0.6539256198347108 |
|
}, |
|
"test_building-airport": { |
|
"f1": 0.8230452674897119, |
|
"number": 364, |
|
"precision": 0.821917808219178, |
|
"recall": 0.8241758241758241 |
|
}, |
|
"test_building-hospital": { |
|
"f1": 0.7525510204081632, |
|
"number": 364, |
|
"precision": 0.7023809523809523, |
|
"recall": 0.8104395604395604 |
|
}, |
|
"test_building-hotel": { |
|
"f1": 0.7228464419475654, |
|
"number": 265, |
|
"precision": 0.7174721189591078, |
|
"recall": 0.7283018867924528 |
|
}, |
|
"test_building-library": { |
|
"f1": 0.7347517730496453, |
|
"number": 355, |
|
"precision": 0.74, |
|
"recall": 0.7295774647887324 |
|
}, |
|
"test_building-other": { |
|
"f1": 0.5868801249511909, |
|
"number": 2543, |
|
"precision": 0.58278402481582, |
|
"recall": 0.5910342115611482 |
|
}, |
|
"test_building-restaurant": { |
|
"f1": 0.5365853658536586, |
|
"number": 232, |
|
"precision": 0.5525114155251142, |
|
"recall": 0.521551724137931 |
|
}, |
|
"test_building-sportsfacility": { |
|
"f1": 0.6931937172774869, |
|
"number": 420, |
|
"precision": 0.6186915887850467, |
|
"recall": 0.7880952380952381 |
|
}, |
|
"test_building-theater": { |
|
"f1": 0.733615221987315, |
|
"number": 455, |
|
"precision": 0.7067209775967414, |
|
"recall": 0.7626373626373626 |
|
}, |
|
"test_event-attack/battle/war/militaryconflict": { |
|
"f1": 0.7505720823798627, |
|
"number": 1098, |
|
"precision": 0.7543698252069917, |
|
"recall": 0.7468123861566485 |
|
}, |
|
"test_event-disaster": { |
|
"f1": 0.5583756345177665, |
|
"number": 207, |
|
"precision": 0.5882352941176471, |
|
"recall": 0.5314009661835749 |
|
}, |
|
"test_event-election": { |
|
"f1": 0.2877697841726619, |
|
"number": 182, |
|
"precision": 0.4166666666666667, |
|
"recall": 0.21978021978021978 |
|
}, |
|
"test_event-other": { |
|
"f1": 0.44303797468354433, |
|
"number": 866, |
|
"precision": 0.49019607843137253, |
|
"recall": 0.40415704387990764 |
|
}, |
|
"test_event-protest": { |
|
"f1": 0.3186440677966102, |
|
"number": 166, |
|
"precision": 0.3643410852713178, |
|
"recall": 0.28313253012048195 |
|
}, |
|
"test_event-sportsevent": { |
|
"f1": 0.6181588105030055, |
|
"number": 1566, |
|
"precision": 0.6125391849529781, |
|
"recall": 0.623882503192848 |
|
}, |
|
"test_location-GPE": { |
|
"f1": 0.8321414950419527, |
|
"number": 20409, |
|
"precision": 0.810239985145987, |
|
"recall": 0.855259934342692 |
|
}, |
|
"test_location-bodiesofwater": { |
|
"f1": 0.7282258064516128, |
|
"number": 1169, |
|
"precision": 0.6887871853546911, |
|
"recall": 0.7724550898203593 |
|
}, |
|
"test_location-island": { |
|
"f1": 0.6836483155299917, |
|
"number": 646, |
|
"precision": 0.7285464098073555, |
|
"recall": 0.6439628482972136 |
|
}, |
|
"test_location-mountain": { |
|
"f1": 0.7226647356987691, |
|
"number": 681, |
|
"precision": 0.7128571428571429, |
|
"recall": 0.7327459618208517 |
|
}, |
|
"test_location-other": { |
|
"f1": 0.32306363374604086, |
|
"number": 2191, |
|
"precision": 0.437597503900156, |
|
"recall": 0.25604746691008673 |
|
}, |
|
"test_location-park": { |
|
"f1": 0.6945054945054945, |
|
"number": 458, |
|
"precision": 0.6991150442477876, |
|
"recall": 0.6899563318777293 |
|
}, |
|
"test_location-road/railway/highway/transit": { |
|
"f1": 0.70939925265881, |
|
"number": 1700, |
|
"precision": 0.6936481169196178, |
|
"recall": 0.7258823529411764 |
|
}, |
|
"test_loss": 0.023223718628287315, |
|
"test_organization-company": { |
|
"f1": 0.6916655965069989, |
|
"number": 3896, |
|
"precision": 0.6921099974299666, |
|
"recall": 0.6912217659137577 |
|
}, |
|
"test_organization-education": { |
|
"f1": 0.7900167986561075, |
|
"number": 2067, |
|
"precision": 0.7838095238095238, |
|
"recall": 0.7963231736816643 |
|
}, |
|
"test_organization-government/governmentagency": { |
|
"f1": 0.48308475809385226, |
|
"number": 1511, |
|
"precision": 0.5363489499192245, |
|
"recall": 0.43944407677035074 |
|
}, |
|
"test_organization-media/newspaper": { |
|
"f1": 0.645060523233112, |
|
"number": 1232, |
|
"precision": 0.6215199398043642, |
|
"recall": 0.6704545454545454 |
|
}, |
|
"test_organization-other": { |
|
"f1": 0.5444166963967176, |
|
"number": 4439, |
|
"precision": 0.5765743073047859, |
|
"recall": 0.5156566794323045 |
|
}, |
|
"test_organization-politicalparty": { |
|
"f1": 0.6859173700577521, |
|
"number": 1054, |
|
"precision": 0.6449456975772765, |
|
"recall": 0.7324478178368121 |
|
}, |
|
"test_organization-religion": { |
|
"f1": 0.5560109289617486, |
|
"number": 672, |
|
"precision": 0.5138888888888888, |
|
"recall": 0.6056547619047619 |
|
}, |
|
"test_organization-showorganization": { |
|
"f1": 0.5638366817887231, |
|
"number": 769, |
|
"precision": 0.562015503875969, |
|
"recall": 0.5656697009102731 |
|
}, |
|
"test_organization-sportsleague": { |
|
"f1": 0.6443327749860414, |
|
"number": 882, |
|
"precision": 0.6347634763476347, |
|
"recall": 0.6541950113378685 |
|
}, |
|
"test_organization-sportsteam": { |
|
"f1": 0.7345897133882999, |
|
"number": 2473, |
|
"precision": 0.7138496756962991, |
|
"recall": 0.7565709664375253 |
|
}, |
|
"test_other-astronomything": { |
|
"f1": 0.752, |
|
"number": 678, |
|
"precision": 0.7417503586800573, |
|
"recall": 0.7625368731563422 |
|
}, |
|
"test_other-award": { |
|
"f1": 0.7002262443438915, |
|
"number": 919, |
|
"precision": 0.7290930506478209, |
|
"recall": 0.6735582154515778 |
|
}, |
|
"test_other-biologything": { |
|
"f1": 0.6497237569060773, |
|
"number": 1874, |
|
"precision": 0.6735395189003437, |
|
"recall": 0.6275346851654215 |
|
}, |
|
"test_other-chemicalthing": { |
|
"f1": 0.583206106870229, |
|
"number": 1014, |
|
"precision": 0.6025236593059937, |
|
"recall": 0.5650887573964497 |
|
}, |
|
"test_other-currency": { |
|
"f1": 0.7546322290847838, |
|
"number": 799, |
|
"precision": 0.6843177189409368, |
|
"recall": 0.8410513141426783 |
|
}, |
|
"test_other-disease": { |
|
"f1": 0.6662484316185696, |
|
"number": 749, |
|
"precision": 0.6284023668639053, |
|
"recall": 0.7089452603471295 |
|
}, |
|
"test_other-educationaldegree": { |
|
"f1": 0.5943012211668929, |
|
"number": 363, |
|
"precision": 0.5855614973262032, |
|
"recall": 0.6033057851239669 |
|
}, |
|
"test_other-god": { |
|
"f1": 0.6474926253687314, |
|
"number": 635, |
|
"precision": 0.608876560332871, |
|
"recall": 0.6913385826771653 |
|
}, |
|
"test_other-language": { |
|
"f1": 0.7224563515954245, |
|
"number": 753, |
|
"precision": 0.6607929515418502, |
|
"recall": 0.796812749003984 |
|
}, |
|
"test_other-law": { |
|
"f1": 0.6958290946083417, |
|
"number": 472, |
|
"precision": 0.6692759295499021, |
|
"recall": 0.7245762711864406 |
|
}, |
|
"test_other-livingthing": { |
|
"f1": 0.6041909196740396, |
|
"number": 863, |
|
"precision": 0.6070175438596491, |
|
"recall": 0.6013904982618772 |
|
}, |
|
"test_other-medical": { |
|
"f1": 0.5087719298245613, |
|
"number": 397, |
|
"precision": 0.5062344139650873, |
|
"recall": 0.5113350125944585 |
|
}, |
|
"test_overall_accuracy": 0.9227814069042201, |
|
"test_overall_f1": 0.6884821229658107, |
|
"test_overall_precision": 0.6890426017339362, |
|
"test_overall_recall": 0.6879225552622042, |
|
"test_person-actor": { |
|
"f1": 0.7961965134706814, |
|
"number": 1637, |
|
"precision": 0.8274044795783926, |
|
"recall": 0.7672571777642028 |
|
}, |
|
"test_person-artist/author": { |
|
"f1": 0.7017641339074872, |
|
"number": 3463, |
|
"precision": 0.6761241970021413, |
|
"recall": 0.7294253537395322 |
|
}, |
|
"test_person-athlete": { |
|
"f1": 0.823791566678094, |
|
"number": 2879, |
|
"precision": 0.8131979695431472, |
|
"recall": 0.8346648141715873 |
|
}, |
|
"test_person-director": { |
|
"f1": 0.6786355475763016, |
|
"number": 554, |
|
"precision": 0.675, |
|
"recall": 0.6823104693140795 |
|
}, |
|
"test_person-other": { |
|
"f1": 0.6429391504018369, |
|
"number": 8767, |
|
"precision": 0.6471743903848376, |
|
"recall": 0.6387589825481921 |
|
}, |
|
"test_person-politician": { |
|
"f1": 0.6607080266386259, |
|
"number": 2859, |
|
"precision": 0.6621004566210046, |
|
"recall": 0.6593214410633088 |
|
}, |
|
"test_person-scholar": { |
|
"f1": 0.5092402464065708, |
|
"number": 743, |
|
"precision": 0.5181058495821727, |
|
"recall": 0.5006729475100942 |
|
}, |
|
"test_person-soldier": { |
|
"f1": 0.49331352154531943, |
|
"number": 647, |
|
"precision": 0.474964234620887, |
|
"recall": 0.5131375579598145 |
|
}, |
|
"test_product-airplane": { |
|
"f1": 0.646415552855407, |
|
"number": 792, |
|
"precision": 0.6229508196721312, |
|
"recall": 0.6717171717171717 |
|
}, |
|
"test_product-car": { |
|
"f1": 0.7234042553191489, |
|
"number": 687, |
|
"precision": 0.7292899408284024, |
|
"recall": 0.7176128093158661 |
|
}, |
|
"test_product-food": { |
|
"f1": 0.5456760048721071, |
|
"number": 432, |
|
"precision": 0.5758354755784062, |
|
"recall": 0.5185185185185185 |
|
}, |
|
"test_product-game": { |
|
"f1": 0.6887966804979253, |
|
"number": 493, |
|
"precision": 0.7048832271762208, |
|
"recall": 0.6734279918864098 |
|
}, |
|
"test_product-other": { |
|
"f1": 0.4668094218415418, |
|
"number": 1608, |
|
"precision": 0.5477386934673367, |
|
"recall": 0.40671641791044777 |
|
}, |
|
"test_product-ship": { |
|
"f1": 0.6319895968790638, |
|
"number": 380, |
|
"precision": 0.6246786632390745, |
|
"recall": 0.6394736842105263 |
|
}, |
|
"test_product-software": { |
|
"f1": 0.6626240352811467, |
|
"number": 889, |
|
"precision": 0.6497297297297298, |
|
"recall": 0.6760404949381328 |
|
}, |
|
"test_product-train": { |
|
"f1": 0.5616224648985959, |
|
"number": 314, |
|
"precision": 0.5504587155963303, |
|
"recall": 0.5732484076433121 |
|
}, |
|
"test_product-weapon": { |
|
"f1": 0.5299910474485229, |
|
"number": 624, |
|
"precision": 0.6004056795131846, |
|
"recall": 0.47435897435897434 |
|
}, |
|
"test_runtime": 632.5801, |
|
"test_samples_per_second": 73.524, |
|
"test_steps_per_second": 4.595 |
|
} |