File size: 1,069 Bytes
29e6e6d 2e311db 29e6e6d 6ed88fa 2e311db 29e6e6d |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 |
{
"model": "MaziyarPanahi/Mistral-7B-Instruct-Aya-101",
"base_model": "",
"revision": "main",
"private": false,
"precision": "bfloat16",
"params": 7.242,
"architectures": "MistralForCausalLM",
"weight_type": "Original",
"main_language": "English",
"status": "FINISHED",
"submitted_time": "2024-04-17T06:11:16Z",
"model_type": "💬 : chat models (RLHF, DPO, IFT, ...)",
"source": "leaderboard",
"job_id": 474,
"job_start_time": "2024-04-17T09-07-30.140283",
"eval_version": "1.1.0",
"result_metrics": {
"enem_challenge": 0.6060181945416375,
"bluex": 0.5438108484005564,
"oab_exams": 0.39362186788154896,
"assin2_rte": 0.9072398971802695,
"assin2_sts": 0.7641692139433879,
"faquad_nli": 0.6218181818181818,
"hatebr_offensive": 0.8004209608305171,
"portuguese_hate_speech": 0.6762940852684385,
"tweetsentbr": 0.5030635127570277
},
"result_metrics_average": 0.6462729736246184,
"result_metrics_npm": 0.4704317569510121
} |