File size: 616 Bytes
d54c12f |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 |
{
"results": {
"hellaswag": {
"acc": 0.27450199203187253,
"acc_stderr": 0.008909237404005179,
"acc_norm": 0.28884462151394424,
"acc_norm_stderr": 0.009048238955347484
}
},
"versions": {
"hellaswag": 0
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=BEE-spoke-data/smol_llama-81M-tied,revision=main,trust_remote_code=True,dtype='float'",
"num_fewshot": 10,
"batch_size": "16",
"batch_sizes": [],
"device": "cuda",
"no_cache": false,
"limit": 0.25,
"bootstrap_iters": 100000,
"description_dict": {}
}
} |