File size: 623 Bytes
d54c12f |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 |
{
"results": {
"arc_challenge": {
"acc": 0.16723549488054607,
"acc_stderr": 0.01090553272460121,
"acc_norm": 0.22184300341296928,
"acc_norm_stderr": 0.012141659068147884
}
},
"versions": {
"arc_challenge": 0
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=BEE-spoke-data/smol_llama-81M-tied,revision=main,trust_remote_code=True,dtype='float'",
"num_fewshot": 25,
"batch_size": "16",
"batch_sizes": [],
"device": "cuda",
"no_cache": false,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
} |