evaluation / bloom_176B_2shot /bloom_176B_bigbench_empirical_judgments_2shot.json
Muennighoff's picture
Add eval
1e119e1
raw
history blame
550 Bytes
{
"results": {
"bigbench_empirical_judgments": {
"multiple_choice_grade": 0.3434343434343434,
"multiple_choice_grade_stderr": 0.0479675905875748
}
},
"versions": {
"bigbench_empirical_judgments": 0
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=/pfs/lustrep2/scratch/project_462000185/muennighoff/bloom",
"num_fewshot": 2,
"batch_size": null,
"device": "cuda:0",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}