File size: 1,025 Bytes
a3632c3 a21bc98 a3632c3 a21bc98 a3632c3 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 |
{
"model": "fernandosola/bluearara-7B",
"base_model": "",
"revision": "main",
"private": false,
"precision": "float16",
"params": 7.0,
"architectures": "MistralForCausalLM",
"weight_type": "Original",
"status": "FINISHED",
"submitted_time": "2024-02-17T01:32:38Z",
"model_type": "🆎 : language adapted models (FP, FT, ...)",
"source": "leaderboard",
"job_id": 253,
"job_start_time": "2024-02-22T13-31-58.902845",
"eval_version": "1.1.0",
"result_metrics": {
"enem_challenge": 0.18754373687893633,
"bluex": 0.22948539638386647,
"oab_exams": 0.26013667425968107,
"assin2_rte": 0.7104594402253265,
"assin2_sts": 0.1302382514093839,
"faquad_nli": 0.4396551724137931,
"hatebr_offensive": 0.5745222209657356,
"portuguese_hate_speech": 0.5425911592707258,
"tweetsentbr": 0.4674139206717111
},
"result_metrics_average": 0.3935606636087955,
"result_metrics_npm": 0.11148954880306625
} |