Muennighoff's picture
Add
66cf324
raw
history blame contribute delete
No virus
4.71 kB
{"GEM/web_nlg_en": {"PALM_prompt": {"bleu": 0.294397302224118, "bleu_stderr": 0.03180020978248308, "rouge1_fmeasure": 0.09327639689125222, "rouge1_fmeasure_stderr": 0.0023221008649308332, "rouge1_precision": 0.07227299460032628, "rouge1_precision_stderr": 0.002724334423830787, "rouge1_recall": 0.22804674781838755, "rouge1_recall_stderr": 0.004198588087398287, "rouge2_fmeasure": 0.041838265087868864, "rouge2_fmeasure_stderr": 0.0013598421719273656, "rouge2_precision": 0.03215453861806292, "rouge2_precision_stderr": 0.0015988164849269554, "rouge2_recall": 0.10594236000175299, "rouge2_recall_stderr": 0.0027374156958961545, "rougeL_fmeasure": 0.08794733350967848, "rougeL_fmeasure_stderr": 0.0020909652906707936, "rougeL_precision": 0.0671362857152771, "rougeL_precision_stderr": 0.002432702789118092, "rougeL_recall": 0.2199426029207181, "rougeL_recall_stderr": 0.004048168624147004, "rougeLsum_fmeasure": 0.08851003890238592, "rougeLsum_fmeasure_stderr": 0.0021375180109742087, "rougeLsum_precision": 0.06792716569285503, "rougeLsum_precision_stderr": 0.0024928034182697552, "rougeLsum_recall": 0.2192671006402645, "rougeLsum_recall_stderr": 0.004000455487925256}}, "GEM/wiki_lingua_en": {"tldr_en": {"bleu": 0.6684044638312031, "bleu_stderr": 0.04187470618449945, "rouge1_fmeasure": 0.10763826159933514, "rouge1_fmeasure_stderr": 0.0013525324871299627, "rouge1_precision": 0.12308182097233723, "rouge1_precision_stderr": 0.0018481502410378418, "rouge1_recall": 0.12449112001433235, "rouge1_recall_stderr": 0.001825226039254451, "rouge2_fmeasure": 0.008130248785767194, "rouge2_fmeasure_stderr": 0.00045500400898887644, "rouge2_precision": 0.009790871714963267, "rouge2_precision_stderr": 0.0007348150994641933, "rouge2_recall": 0.010222269667937045, "rouge2_recall_stderr": 0.0006436041200909552, "rougeL_fmeasure": 0.08575832818443441, "rougeL_fmeasure_stderr": 0.0010004793820906812, "rougeL_precision": 0.09890502496301032, "rougeL_precision_stderr": 0.0014972754172167691, "rougeL_recall": 0.10004004635352211, "rougeL_recall_stderr": 0.0014274970250568245, "rougeLsum_fmeasure": 0.10330396931538684, "rougeLsum_fmeasure_stderr": 0.0012724265721607867, "rougeLsum_precision": 0.11802395204372415, "rougeLsum_precision_stderr": 0.0017510472325683625, "rougeLsum_recall": 0.11973835837821764, "rougeLsum_recall_stderr": 0.0017361045661803502}}, "e2e_nlg_cleaned": {"generate_text_restaurant": {"bleu": 8.823096779660515, "bleu_stderr": 0.11690653415613038, "rouge1_fmeasure": 0.3933483684246514, "rouge1_fmeasure_stderr": 0.002223340123438031, "rouge1_precision": 0.46823513585686943, "rouge1_precision_stderr": 0.003048586808321214, "rouge1_recall": 0.3744402407613394, "rouge1_recall_stderr": 0.0027362585254064632, "rouge2_fmeasure": 0.16466434977054223, "rouge2_fmeasure_stderr": 0.001783331988731105, "rouge2_precision": 0.19862111365997734, "rouge2_precision_stderr": 0.0022804160473922914, "rouge2_recall": 0.1567691503171535, "rouge2_recall_stderr": 0.0019052405845258668, "rougeL_fmeasure": 0.2874028952318806, "rougeL_fmeasure_stderr": 0.001851572010814936, "rougeL_precision": 0.34452736640518433, "rougeL_precision_stderr": 0.0026260646469064633, "rougeL_recall": 0.2731355581022275, "rougeL_recall_stderr": 0.002192220161659619, "rougeLsum_fmeasure": 0.3219621299960541, "rougeLsum_fmeasure_stderr": 0.0020970190351953305, "rougeLsum_precision": 0.38452681092769214, "rougeLsum_precision_stderr": 0.0028645263440308702, "rougeLsum_recall": 0.3061584330285605, "rougeLsum_recall_stderr": 0.002457979540915117}}, "gem_xsum": {"article_DOC_summary": {"bleu": 0.8454754025100735, "bleu_stderr": 0.1076186112658887, "rouge1_fmeasure": 0.15203240352186698, "rouge1_fmeasure_stderr": 0.002308658375593066, "rouge1_precision": 0.12113951797315028, "rouge1_precision_stderr": 0.0018830952193454589, "rouge1_recall": 0.23806957065750983, "rouge1_recall_stderr": 0.004206003400223407, "rouge2_fmeasure": 0.019938218722607128, "rouge2_fmeasure_stderr": 0.001057767139147975, "rouge2_precision": 0.014684549430517613, "rouge2_precision_stderr": 0.0007903875881572367, "rouge2_recall": 0.034654175781707264, "rouge2_recall_stderr": 0.0018699275977704895, "rougeL_fmeasure": 0.1157471734098351, "rougeL_fmeasure_stderr": 0.0016180551266214967, "rougeL_precision": 0.09245797918691692, "rougeL_precision_stderr": 0.001365895920099929, "rougeL_recall": 0.18210706545486832, "rougeL_recall_stderr": 0.0030918589040844742, "rougeLsum_fmeasure": 0.11978857826813652, "rougeLsum_fmeasure_stderr": 0.001872584027844624, "rougeLsum_precision": 0.09534512345985657, "rougeLsum_precision_stderr": 0.0015122187391096182, "rougeLsum_recall": 0.18894695865315375, "rougeLsum_recall_stderr": 0.0035149754701327618}}}