laiviet's picture
Update evals
95b0e17
raw
history blame
504 Bytes
{
"results": {
"truthfulqa_bn": {
"mc1": 0.2765685019206146,
"mc1_stderr": 0.016015952210618845,
"mc2": 0.5123820777474262,
"mc2_stderr": 0.01680032112327857
}
},
"versions": {
"truthfulqa_bn": 1
},
"config": {
"model": "hf-auto",
"model_args": "pretrained=/sensei-fs/users/daclai/uoChatGPT/llama-7B",
"batch_size": 1,
"device": "cuda",
"no_cache": false,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}