llm_pt_leaderboard_requests
/
croissantllm
/CroissantLLMBase_eval_request_False_float16_Original.json
{ | |
"model": "croissantllm/CroissantLLMBase", | |
"base_model": "", | |
"revision": "main", | |
"private": false, | |
"precision": "float16", | |
"params": 0, | |
"architectures": "LlamaForCausalLM", | |
"weight_type": "Original", | |
"status": "FINISHED", | |
"submitted_time": "2024-02-17T04:01:24Z", | |
"model_type": "๐ : language adapted models (FP, FT, ...)", | |
"source": "leaderboard", | |
"job_id": 253, | |
"job_start_time": "2024-02-24T19-45-27.123677", | |
"eval_version": "1.1.0", | |
"result_metrics": { | |
"enem_challenge": 0.2001399580125962, | |
"bluex": 0.2517385257301808, | |
"oab_exams": 0.25375854214123006, | |
"assin2_rte": 0.3333333333333333, | |
"assin2_sts": 0.13488403072117375, | |
"faquad_nli": 0.4396551724137931, | |
"hatebr_offensive": 0.3349186726374015, | |
"portuguese_hate_speech": 0.37796265667381035, | |
"tweetsentbr": 0.1506866897702477 | |
}, | |
"result_metrics_average": 0.2752308423815297, | |
"result_metrics_npm": -0.10852874111412539, | |
"main_language": "English" | |
} |