evaluation / bloom_176B_0shot /bloom_176B_bigbench_empirical_judgments.json
Muennighoff's picture
Add eval
1e119e1
raw
history blame
551 Bytes
{
"results": {
"bigbench_empirical_judgments": {
"multiple_choice_grade": 0.3333333333333333,
"multiple_choice_grade_stderr": 0.04761904761904759
}
},
"versions": {
"bigbench_empirical_judgments": 0
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=/pfs/lustrep2/scratch/project_462000185/muennighoff/bloom",
"num_fewshot": 0,
"batch_size": null,
"device": "cuda:0",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}