evaluation / bloom_176B_1shot /bloom_176B_bigbench_misconceptions_1shot.json
Muennighoff's picture
Add eval
1e119e1
raw
history blame
541 Bytes
{
"results": {
"bigbench_misconceptions": {
"multiple_choice_grade": 0.5074626865671642,
"multiple_choice_grade_stderr": 0.04335066912520505
}
},
"versions": {
"bigbench_misconceptions": 0
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=/pfs/lustrep2/scratch/project_462000185/muennighoff/bloom",
"num_fewshot": 1,
"batch_size": null,
"device": "cuda:0",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}