{ "config": { "model_dtype": "torch.float16", "model_name": "aerdincdal/CBDDO-LLM-8B-Instruct-v1" }, "results": { "MMLU": { "metric_name": 0.4370 }, "Truthful_qa": { "metric_name": 0.4656 }, "ARC": { "metric_name": 0.4215 }, "HellaSwag": { "metric_name": 0.4526 }, "GSM8K": { "metric_name": 0.0091 }, "Winogrande": { "metric_name": 0.5379 } } }