llm_pt_leaderboard_requests
/
JJhooww
/MistralReloadBR_v2_ptbr_eval_request_False_bfloat16_Original.json
{ | |
"model": "JJhooww/MistralReloadBR_v2_ptbr", | |
"base_model": "", | |
"revision": "main", | |
"private": false, | |
"precision": "bfloat16", | |
"params": 7.242, | |
"architectures": "MistralForCausalLM", | |
"weight_type": "Original", | |
"status": "FINISHED", | |
"submitted_time": "2024-03-08T02:22:06Z", | |
"model_type": "๐ : language adapted models (FP, FT, ...)", | |
"source": "leaderboard", | |
"job_id": 320, | |
"job_start_time": "2024-03-09T04-58-37.486266", | |
"eval_version": "1.1.0", | |
"result_metrics": { | |
"enem_challenge": 0.6081175647305809, | |
"bluex": 0.47983310152990266, | |
"oab_exams": 0.40728929384965834, | |
"assin2_rte": 0.9101172201226876, | |
"assin2_sts": 0.745635698648774, | |
"faquad_nli": 0.4760412001791312, | |
"hatebr_offensive": 0.7982678280152018, | |
"portuguese_hate_speech": 0.6632432143375528, | |
"tweetsentbr": 0.6700347269707226 | |
}, | |
"result_metrics_average": 0.6398422053760237, | |
"result_metrics_npm": 0.4567270236874747, | |
"main_language": "Portuguese" | |
} |