laiviet's picture
Update evals
95b0e17
raw
history blame
482 Bytes
{
"results": {
"mmlu_pt": {
"acc": 0.2809216451516061,
"acc_stderr": 0.0038938542873620118,
"acc_norm": 0.287676373461423,
"acc_norm_stderr": 0.0039218389764563225
}
},
"versions": {
"mmlu_pt": 0
},
"config": {
"model": "hf-auto",
"model_args": "pretrained=bigscience/bloom-7b1",
"batch_size": 1,
"device": "cuda",
"no_cache": false,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}