{ "config": { "model_dtype": "torch.float16", "model_name": "huggyllama/llama-7b" }, "results": { "MMLU": { "metric_name": 0.2588922576351401 }, "Truthful_qa": { "metric_name": 0.43169172402480593 }, "ARC": { "metric_name": 0.2508532423208191 }, "HellaSwag": { "metric_name": 0.2931015016371232 }, "GSM8K": { "metric_name": 0.016704631738800303 }, "Winogrande": { "metric_name": 0.48973143759873616 } } }