File size: 1,053 Bytes
2038059 d0503f5 2038059 19e06ea d0503f5 2038059 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 |
{
"model": "NousResearch/Hermes-2-Pro-Llama-3-8B",
"base_model": "",
"revision": "main",
"private": false,
"precision": "float16",
"params": 8.031,
"architectures": "LlamaForCausalLM",
"weight_type": "Original",
"main_language": "English",
"status": "FINISHED",
"submitted_time": "2024-05-06T23:04:05Z",
"model_type": "💬 : chat (RLHF, DPO, IFT, ...)",
"source": "leaderboard",
"job_id": 593,
"job_start_time": "2024-05-07T04-01-33.854422",
"eval_version": "1.1.0",
"result_metrics": {
"enem_challenge": 0.6787963610916725,
"bluex": 0.5702364394993046,
"oab_exams": 0.44738041002277906,
"assin2_rte": 0.9223739628332301,
"assin2_sts": 0.7575480918675715,
"faquad_nli": 0.7486659964426572,
"hatebr_offensive": 0.821316847945847,
"portuguese_hate_speech": 0.6324128242225997,
"tweetsentbr": 0.6706448057731071
},
"result_metrics_average": 0.6943750821887522,
"result_metrics_npm": 0.5438223202296597
} |