llm_pt_leaderboard_requests
/
Danielbrdz
/Barcenas-Llama3-8b-ORPO_eval_request_False_float16_Original.json
Update status of Danielbrdz/Barcenas-Llama3-8b-ORPO_eval_request_False_float16_Original to FINISHED
e12c044
verified
{ | |
"model": "Danielbrdz/Barcenas-Llama3-8b-ORPO", | |
"base_model": "", | |
"revision": "main", | |
"private": false, | |
"precision": "float16", | |
"params": 8.03, | |
"architectures": "LlamaForCausalLM", | |
"weight_type": "Original", | |
"main_language": "English", | |
"status": "FINISHED", | |
"submitted_time": "2024-05-13T16:38:54Z", | |
"model_type": "🔶 : fine-tuned/fp on domain-specific datasets", | |
"source": "leaderboard", | |
"job_id": 636, | |
"job_start_time": "2024-05-18T00-12-52.690138", | |
"eval_version": "1.1.0", | |
"result_metrics": { | |
"enem_challenge": 0.7102869139258222, | |
"bluex": 0.5827538247566064, | |
"oab_exams": 0.508883826879271, | |
"assin2_rte": 0.9178150146340144, | |
"assin2_sts": 0.7260402501200387, | |
"faquad_nli": 0.7308849598805747, | |
"hatebr_offensive": 0.8698828946051447, | |
"portuguese_hate_speech": 0.5958643988009942, | |
"tweetsentbr": 0.4996436497852127 | |
}, | |
"result_metrics_average": 0.6824506370430754, | |
"result_metrics_npm": 0.525680465043327 | |
} |