llm_pt_leaderboard_requests
/
NousResearch
/Nous-Hermes-2-Yi-34B_eval_request_False_bfloat16_Original.json
Update status of NousResearch/Nous-Hermes-2-Yi-34B_eval_request_False_bfloat16_Original to FINISHED
c5bc427
verified
{ | |
"model": "NousResearch/Nous-Hermes-2-Yi-34B", | |
"base_model": "", | |
"revision": "main", | |
"private": false, | |
"precision": "bfloat16", | |
"params": 34.389, | |
"architectures": "LlamaForCausalLM", | |
"weight_type": "Original", | |
"status": "FINISHED", | |
"submitted_time": "2024-02-27T00:37:39Z", | |
"model_type": "💬 : chat models (RLHF, DPO, IFT, ...)", | |
"source": "leaderboard", | |
"job_id": 268, | |
"job_start_time": "2024-02-27T03-50-28.297314", | |
"eval_version": "1.1.0", | |
"result_metrics": { | |
"enem_challenge": 0.7312806158152554, | |
"bluex": 0.6578581363004172, | |
"oab_exams": 0.5599088838268793, | |
"assin2_rte": 0.9215044447012628, | |
"assin2_sts": 0.7985401560561216, | |
"faquad_nli": 0.7605236777394121, | |
"hatebr_offensive": 0.7703803469511286, | |
"portuguese_hate_speech": 0.6607502875554572, | |
"tweetsentbr": 0.6569392825486907 | |
}, | |
"result_metrics_average": 0.7241873146105138, | |
"result_metrics_npm": 0.5795863393852321 | |
} |