File size: 1,063 Bytes
af89e26 e12c044 af89e26 3b58b73 e12c044 af89e26 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 |
{
"model": "Danielbrdz/Barcenas-Llama3-8b-ORPO",
"base_model": "",
"revision": "main",
"private": false,
"precision": "float16",
"params": 8.03,
"architectures": "LlamaForCausalLM",
"weight_type": "Original",
"main_language": "English",
"status": "FINISHED",
"submitted_time": "2024-05-13T16:38:54Z",
"model_type": "🔶 : fine-tuned/fp on domain-specific datasets",
"source": "leaderboard",
"job_id": 636,
"job_start_time": "2024-05-18T00-12-52.690138",
"eval_version": "1.1.0",
"result_metrics": {
"enem_challenge": 0.7102869139258222,
"bluex": 0.5827538247566064,
"oab_exams": 0.508883826879271,
"assin2_rte": 0.9178150146340144,
"assin2_sts": 0.7260402501200387,
"faquad_nli": 0.7308849598805747,
"hatebr_offensive": 0.8698828946051447,
"portuguese_hate_speech": 0.5958643988009942,
"tweetsentbr": 0.4996436497852127
},
"result_metrics_average": 0.6824506370430754,
"result_metrics_npm": 0.525680465043327
} |