File size: 2,344 Bytes
66cf324
1
{"GEM/web_nlg_en": {"PALM_prompt": {"bleu": 0.36573098571999374, "bleu_stderr": 0.04360246560901712, "rouge1_fmeasure": 0.11134690296906873, "rouge1_fmeasure_stderr": 0.002791584141599169, "rouge1_precision": 0.09072268249056871, "rouge1_precision_stderr": 0.003309127881928078, "rouge1_recall": 0.2567858530968384, "rouge1_recall_stderr": 0.004292381643415595, "rouge2_fmeasure": 0.05139255155957873, "rouge2_fmeasure_stderr": 0.0017044945884763974, "rouge2_precision": 0.04188174110659927, "rouge2_precision_stderr": 0.00201017336799391, "rouge2_recall": 0.12225529287536878, "rouge2_recall_stderr": 0.0029565428717282195, "rougeL_fmeasure": 0.1024427565171009, "rougeL_fmeasure_stderr": 0.002361887124438967, "rougeL_precision": 0.0820576144802433, "rougeL_precision_stderr": 0.002849358571939298, "rougeL_recall": 0.24398283648869387, "rougeL_recall_stderr": 0.004010077159680049, "rougeLsum_fmeasure": 0.10363166429373906, "rougeLsum_fmeasure_stderr": 0.002442180833294991, "rougeLsum_precision": 0.08363859011376361, "rougeLsum_precision_stderr": 0.0029610562342101215, "rougeLsum_recall": 0.24452543427182877, "rougeLsum_recall_stderr": 0.00401534422341755}}, "e2e_nlg_cleaned": {"generate_text_restaurant": {"bleu": 10.728282207198331, "bleu_stderr": 0.24011799063293282, "rouge1_fmeasure": 0.4068134876287919, "rouge1_fmeasure_stderr": 0.0022557400359672, "rouge1_precision": 0.477647664692683, "rouge1_precision_stderr": 0.0031248017700348297, "rouge1_recall": 0.39135016208440804, "rouge1_recall_stderr": 0.0027569384858509, "rouge2_fmeasure": 0.18288067492070217, "rouge2_fmeasure_stderr": 0.0018865611354475435, "rouge2_precision": 0.21801326437861637, "rouge2_precision_stderr": 0.0024632223317407284, "rouge2_recall": 0.17630740013929216, "rouge2_recall_stderr": 0.0020437181517989683, "rougeL_fmeasure": 0.30792019503460466, "rougeL_fmeasure_stderr": 0.0019572611259853316, "rougeL_precision": 0.36356480318205375, "rougeL_precision_stderr": 0.002775946018030203, "rougeL_recall": 0.2960996158626088, "rougeL_recall_stderr": 0.002317240262095146, "rougeLsum_fmeasure": 0.34545652721327375, "rougeLsum_fmeasure_stderr": 0.0022151340769871386, "rougeLsum_precision": 0.40657650575499105, "rougeLsum_precision_stderr": 0.003027829983953915, "rougeLsum_recall": 0.3321667166863464, "rougeLsum_recall_stderr": 0.002593003012673271}}}