llm_pt_leaderboard_requests
/
NousResearch
/Nous-Hermes-2-Mixtral-8x7B-DPO_eval_request_False_float16_Original.json
Update NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO_eval_request_False_float16_Original.json
21b37a6
verified
{ | |
"model": "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO", | |
"base_model": "", | |
"revision": "main", | |
"private": false, | |
"precision": "float16", | |
"params": 46.703, | |
"architectures": "MixtralForCausalLM", | |
"weight_type": "Original", | |
"status": "FINISHED", | |
"submitted_time": "2024-02-21T13:34:22Z", | |
"model_type": "🔶 : fine-tuned/fp on domain-specific datasets", | |
"source": "leaderboard", | |
"job_id": 295, | |
"job_start_time": "2024-03-06T22-07-16.186340", | |
"eval_version": "1.1.0", | |
"result_metrics": { | |
"enem_challenge": 0.655703289013296, | |
"bluex": 0.5535465924895688, | |
"oab_exams": 0.4710706150341686, | |
"assin2_rte": 0.9011405575094769, | |
"assin2_sts": 0.7346929104749711, | |
"faquad_nli": 0.7626485982066783, | |
"hatebr_offensive": 0.7640680874353314, | |
"portuguese_hate_speech": 0.5811439239646979, | |
"tweetsentbr": 0.6217084995395291 | |
}, | |
"result_metrics_average": 0.6717470081853019, | |
"result_metrics_npm": 0.5055825901432658, | |
"hide": true | |
} |