llm_pt_leaderboard_requests
/
NousResearch
/Hermes-2-Theta-Llama-3-8B_eval_request_False_bfloat16_Original.json
Update status of NousResearch/Hermes-2-Theta-Llama-3-8B_eval_request_False_bfloat16_Original to FINISHED
37c5006
verified
{ | |
"model": "NousResearch/Hermes-2-Theta-Llama-3-8B", | |
"base_model": "", | |
"revision": "main", | |
"private": false, | |
"precision": "bfloat16", | |
"params": 8.03, | |
"architectures": "LlamaForCausalLM", | |
"weight_type": "Original", | |
"main_language": "English", | |
"status": "FINISHED", | |
"submitted_time": "2024-05-27T04:35:42Z", | |
"model_type": "💬 : chat (RLHF, DPO, IFT, ...)", | |
"source": "leaderboard", | |
"job_id": 743, | |
"job_start_time": "2024-05-27T07-29-46.242035", | |
"eval_version": "1.1.0", | |
"result_metrics": { | |
"enem_challenge": 0.6997900629811057, | |
"bluex": 0.5702364394993046, | |
"oab_exams": 0.47198177676537584, | |
"assin2_rte": 0.9207473491333994, | |
"assin2_sts": 0.7468943971678241, | |
"faquad_nli": 0.7622538924128841, | |
"hatebr_offensive": 0.7992468869348173, | |
"portuguese_hate_speech": 0.6630305662353587, | |
"tweetsentbr": 0.6659324957483715 | |
}, | |
"result_metrics_average": 0.7000126518753824, | |
"result_metrics_npm": 0.5524589589565919 | |
} |