{ "results": { "truthfulqa_mc": { "mc1": 0.2631578947368421, "mc1_stderr": 0.015415241740237024, "mc2": 0.4225443383057251, "mc2_stderr": 0.014864051479547237 } }, "versions": { "truthfulqa_mc": 1 }, "config": { "model": "sparseml", "model_args": "pretrained=/cerebras/experiments/spft-retrained_sparse70_llama2_DATAUpdated_KDFalse_GCTrue_LR1e-4_E6-/combined/,trust_remote_code=True", "num_fewshot": 0, "batch_size": "64", "batch_sizes": [], "device": "cuda:0", "no_cache": true, "limit": null, "bootstrap_iters": 100000, "description_dict": {} } }