File size: 4,040 Bytes
a1243c9 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 |
{
"results": [
{
"task_name": "gem_xsum",
"prompt_name": "article_DOC_summary",
"rouge1_precision": 0.0021529907822185413,
"dataset_path": "GEM/xsum",
"dataset_name": null,
"subset": "",
"rouge1_precision_stderr": 0.0006324831746531657
},
{
"task_name": "gem_xsum",
"prompt_name": "article_DOC_summary",
"rouge1_recall": 0.001764574432238271,
"dataset_path": "GEM/xsum",
"dataset_name": null,
"subset": "",
"rouge1_recall_stderr": 0.0005172389485732619
},
{
"task_name": "gem_xsum",
"prompt_name": "article_DOC_summary",
"rouge1_fmeasure": 0.0019051009413407058,
"dataset_path": "GEM/xsum",
"dataset_name": null,
"subset": "",
"rouge1_fmeasure_stderr": 0.0005593343846105789
},
{
"task_name": "gem_xsum",
"prompt_name": "article_DOC_summary",
"rouge2_precision": 0.0001457415441877151,
"dataset_path": "GEM/xsum",
"dataset_name": null,
"subset": "",
"rouge2_precision_stderr": 8.410282821934284e-05
},
{
"task_name": "gem_xsum",
"prompt_name": "article_DOC_summary",
"rouge2_recall": 0.00011802662746058974,
"dataset_path": "GEM/xsum",
"dataset_name": null,
"subset": "",
"rouge2_recall_stderr": 6.837858900511585e-05
},
{
"task_name": "gem_xsum",
"prompt_name": "article_DOC_summary",
"rouge2_fmeasure": 0.0001299594149643802,
"dataset_path": "GEM/xsum",
"dataset_name": null,
"subset": "",
"rouge2_fmeasure_stderr": 7.503370260000825e-05
},
{
"task_name": "gem_xsum",
"prompt_name": "article_DOC_summary",
"rougeL_precision": 0.0017522974182971845,
"dataset_path": "GEM/xsum",
"dataset_name": null,
"subset": "",
"rougeL_precision_stderr": 0.000503946684193751
},
{
"task_name": "gem_xsum",
"prompt_name": "article_DOC_summary",
"rougeL_recall": 0.0014399120468157657,
"dataset_path": "GEM/xsum",
"dataset_name": null,
"subset": "",
"rougeL_recall_stderr": 0.0004056116387189239
},
{
"task_name": "gem_xsum",
"prompt_name": "article_DOC_summary",
"rougeL_fmeasure": 0.0015504765957310557,
"dataset_path": "GEM/xsum",
"dataset_name": null,
"subset": "",
"rougeL_fmeasure_stderr": 0.0004402385576724586
},
{
"task_name": "gem_xsum",
"prompt_name": "article_DOC_summary",
"rougeLsum_precision": 0.0018058994766162238,
"dataset_path": "GEM/xsum",
"dataset_name": null,
"subset": "",
"rougeLsum_precision_stderr": 0.0005178578773672206
},
{
"task_name": "gem_xsum",
"prompt_name": "article_DOC_summary",
"rougeLsum_recall": 0.001474217364139951,
"dataset_path": "GEM/xsum",
"dataset_name": null,
"subset": "",
"rougeLsum_recall_stderr": 0.0004127036649374616
},
{
"task_name": "gem_xsum",
"prompt_name": "article_DOC_summary",
"rougeLsum_fmeasure": 0.0015923123485654278,
"dataset_path": "GEM/xsum",
"dataset_name": null,
"subset": "",
"rougeLsum_fmeasure_stderr": 0.00044995087651658854
},
{
"task_name": "gem_xsum",
"prompt_name": "article_DOC_summary",
"bleu": 2.3013943780107486e-40,
"dataset_path": "GEM/xsum",
"dataset_name": null,
"subset": "",
"bleu_stderr": 6.513754776072693e-35
}
],
"config": {
"model": "hf-causal",
"model_args": "pretrained=/pfs/lustrep4/scratch/project_462000119/muennighoff/nov-2022-bettercom/lm1-4b2-84b-c4-repetitions/4b284b28bc4/transformers,use_accelerate=True,tokenizer=/pfs/lustrep4/scratch/project_462000119/muennighoff/nov-2022-bettercom/gpt2,dtype=bfloat16",
"task_args": "",
"num_fewshot": 5,
"batch_size": 16,
"device": "cuda",
"use_cache": false,
"limit": 3000,
"bootstrap_iters": 10,
"seed": 1234
}
} |