{ "results": [ { "task_name": "mlsum_es", "prompt_name": "layman_summ_es", "bleu": 3.606126125049115, "dataset_path": "GEM/mlsum", "dataset_name": "es", "subset": "", "bleu_stderr": 0.1777428611437274 }, { "task_name": "mlsum_es", "prompt_name": "layman_summ_es", "rouge1_precision": 0.24897524860913609, "dataset_path": "GEM/mlsum", "dataset_name": "es", "subset": "", "rouge1_precision_stderr": 0.002769352726963559 }, { "task_name": "mlsum_es", "prompt_name": "layman_summ_es", "rouge1_recall": 0.24862486574088613, "dataset_path": "GEM/mlsum", "dataset_name": "es", "subset": "", "rouge1_recall_stderr": 0.0030999097459434703 }, { "task_name": "mlsum_es", "prompt_name": "layman_summ_es", "rouge1_fmeasure": 0.22369539928629967, "dataset_path": "GEM/mlsum", "dataset_name": "es", "subset": "", "rouge1_fmeasure_stderr": 0.002355115682657273 }, { "task_name": "mlsum_es", "prompt_name": "layman_summ_es", "rouge2_precision": 0.06634301490578277, "dataset_path": "GEM/mlsum", "dataset_name": "es", "subset": "", "rouge2_precision_stderr": 0.002048301218559081 }, { "task_name": "mlsum_es", "prompt_name": "layman_summ_es", "rouge2_recall": 0.06986753000720762, "dataset_path": "GEM/mlsum", "dataset_name": "es", "subset": "", "rouge2_recall_stderr": 0.002064436828187958 }, { "task_name": "mlsum_es", "prompt_name": "layman_summ_es", "rouge2_fmeasure": 0.06070627110770783, "dataset_path": "GEM/mlsum", "dataset_name": "es", "subset": "", "rouge2_fmeasure_stderr": 0.001726497816324924 }, { "task_name": "mlsum_es", "prompt_name": "layman_summ_es", "rougeL_precision": 0.1940954836427296, "dataset_path": "GEM/mlsum", "dataset_name": "es", "subset": "", "rougeL_precision_stderr": 0.002431855538295837 }, { "task_name": "mlsum_es", "prompt_name": "layman_summ_es", "rougeL_recall": 0.18959437921291328, "dataset_path": "GEM/mlsum", "dataset_name": "es", "subset": "", "rougeL_recall_stderr": 0.0025078934952384247 }, { "task_name": "mlsum_es", "prompt_name": "layman_summ_es", "rougeL_fmeasure": 0.1716590640884715, "dataset_path": "GEM/mlsum", "dataset_name": "es", "subset": "", "rougeL_fmeasure_stderr": 0.001979894431520568 }, { "task_name": "mlsum_es", "prompt_name": "layman_summ_es", "rougeLsum_precision": 0.19939821083949505, "dataset_path": "GEM/mlsum", "dataset_name": "es", "subset": "", "rougeLsum_precision_stderr": 0.0024800156414648273 }, { "task_name": "mlsum_es", "prompt_name": "layman_summ_es", "rougeLsum_recall": 0.19543176650107444, "dataset_path": "GEM/mlsum", "dataset_name": "es", "subset": "", "rougeLsum_recall_stderr": 0.0025892786548937794 }, { "task_name": "mlsum_es", "prompt_name": "layman_summ_es", "rougeLsum_fmeasure": 0.17657777023351298, "dataset_path": "GEM/mlsum", "dataset_name": "es", "subset": "", "rougeLsum_fmeasure_stderr": 0.002018383139682419 } ], "config": { "model": "hf-causal", "model_args": "pretrained=/gpfsscratch/rech/six/commun/commun/experiments/muennighoff/bloomckpt/176bt0/xp3capmixnewcodelonglossseq_global_step498,use_accelerate=True,tokenizer=/gpfsscratch/rech/six/commun/commun/experiments/muennighoff/bloomckpt/176bt0/xp3capmixnewcodelonglossseq_global_step498,dtype=bfloat16", "num_fewshot": 0, "batch_size": 4, "device": "cuda", "use_cache": false, "limit": 3000, "bootstrap_iters": 10, "seed": 1234 } }