Muennighoff's picture
Add
66cf324
{"GEM/web_nlg_en": {"PALM_prompt": {"bleu": 0.32359241899151747, "bleu_stderr": 0.02402979694298941, "rouge1_fmeasure": 0.10561184879910503, "rouge1_fmeasure_stderr": 0.0025642659603447846, "rouge1_precision": 0.08608348405841157, "rouge1_precision_stderr": 0.0030889568803165366, "rouge1_recall": 0.24581368650128066, "rouge1_recall_stderr": 0.0042101313562470394, "rouge2_fmeasure": 0.04745672201410211, "rouge2_fmeasure_stderr": 0.0014569648711692605, "rouge2_precision": 0.03877151013301891, "rouge2_precision_stderr": 0.0018686193069556473, "rouge2_recall": 0.11616825070098583, "rouge2_recall_stderr": 0.002832186555639384, "rougeL_fmeasure": 0.09789371482528919, "rougeL_fmeasure_stderr": 0.00220103630816279, "rougeL_precision": 0.07844420087825354, "rougeL_precision_stderr": 0.002692441142953267, "rougeL_recall": 0.23451408639630753, "rougeL_recall_stderr": 0.003961883955493253, "rougeLsum_fmeasure": 0.09830813287691423, "rougeLsum_fmeasure_stderr": 0.0022462680004158276, "rougeLsum_precision": 0.07915325102565371, "rougeLsum_precision_stderr": 0.0027526142780926856, "rougeLsum_recall": 0.23412111056208834, "rougeLsum_recall_stderr": 0.003946618325848973}}, "e2e_nlg_cleaned": {"generate_text_restaurant": {"bleu": 10.486924146893084, "bleu_stderr": 0.1879978902522849, "rouge1_fmeasure": 0.40652468037996703, "rouge1_fmeasure_stderr": 0.0022669994108304747, "rouge1_precision": 0.478367908606433, "rouge1_precision_stderr": 0.0031354672753373947, "rouge1_recall": 0.390350749308519, "rouge1_recall_stderr": 0.002801854940488885, "rouge2_fmeasure": 0.18108294134986383, "rouge2_fmeasure_stderr": 0.0018807245099643842, "rouge2_precision": 0.21544418371661336, "rouge2_precision_stderr": 0.002433255884136804, "rouge2_recall": 0.17472231572324518, "rouge2_recall_stderr": 0.002057064466752639, "rougeL_fmeasure": 0.3060829327052773, "rougeL_fmeasure_stderr": 0.0019203124081712175, "rougeL_precision": 0.3619072629236733, "rougeL_precision_stderr": 0.0027220547153319315, "rougeL_recall": 0.2939266536878585, "rougeL_recall_stderr": 0.002311156416614425, "rougeLsum_fmeasure": 0.34225992780755277, "rougeLsum_fmeasure_stderr": 0.002203497378125526, "rougeLsum_precision": 0.40337393321175263, "rougeLsum_precision_stderr": 0.0029962931876702597, "rougeLsum_recall": 0.32865230989492134, "rougeLsum_recall_stderr": 0.00260768739886733}}}