evaluation / bloom_176B_2shot /bloom_176B_bigbench_sentence_ambiguity_2shot.json
Muennighoff's picture
Add eval
1e119e1
raw
history blame
549 Bytes
{
"results": {
"bigbench_sentence_ambiguity": {
"multiple_choice_grade": 0.5166666666666667,
"multiple_choice_grade_stderr": 0.06505828185300304
}
},
"versions": {
"bigbench_sentence_ambiguity": 0
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=/pfs/lustrep2/scratch/project_462000185/muennighoff/bloom",
"num_fewshot": 2,
"batch_size": null,
"device": "cuda:0",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}