laiviet's picture
Update evals
95b0e17
raw
history blame
482 Bytes
{
"results": {
"mmlu_ro": {
"acc": 0.2555891238670695,
"acc_stderr": 0.003790966515146354,
"acc_norm": 0.2737160120845921,
"acc_norm_stderr": 0.0038750360364507622
}
},
"versions": {
"mmlu_ro": 0
},
"config": {
"model": "hf-auto",
"model_args": "pretrained=bigscience/bloom-7b1",
"batch_size": 1,
"device": "cuda",
"no_cache": false,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}