{ "config": { "model_dtype": "torch.float16", "model_name": "notlober/llama3-8b-tr" }, "results": { "MMLU": { "metric_name": 0.4597352658433779 }, "Truthful_qa": { "metric_name": 0.4523010631732335 }, "ARC": { "metric_name": 0.4112627986348123 }, "HellaSwag": { "metric_name": 0.47081404538782884 }, "GSM8K": { "metric_name": 0.38724373576309795 }, "Winogrande": { "metric_name": 0.5339652448657188 } } }