{ "config": { "model_dtype": "torch.float16", "model_name": "aerdincdal/CBDDO-LLM-8B-Instruct-v1" }, "results": { "MMLU": { "metric_name": 0.4380684759298972 }, "Truthful_qa": { "metric_name": 0.45898189506718756 }, "ARC": { "metric_name": 0.41638225255972694 }, "HellaSwag": { "metric_name": 0.44507169470475333 }, "GSM8K": { "metric_name": 0.008352315869400152 }, "Winogrande": { "metric_name": 0.5315955766192733 } } }