File size: 1,016 Bytes
b26dc22 d21e0d0 b26dc22 508daa4 d21e0d0 b26dc22 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 |
{
"model": "EleutherAI/gpt-j-6b",
"base_model": "",
"revision": "main",
"private": false,
"precision": "float16",
"params": 6.0,
"architectures": "GPTJForCausalLM",
"weight_type": "Original",
"status": "FINISHED",
"submitted_time": "2024-02-05T23:12:19Z",
"model_type": "🟢 : pretrained",
"source": "script",
"job_id": 387,
"job_start_time": "2024-04-05T04-41-08.855450",
"main_language": "English",
"eval_version": "1.1.0",
"result_metrics": {
"enem_challenge": 0.21973407977606718,
"bluex": 0.2364394993045897,
"oab_exams": 0.25466970387243737,
"assin2_rte": 0.3582588385476761,
"assin2_sts": 0.14562487212003206,
"faquad_nli": 0.4396551724137931,
"hatebr_offensive": 0.6588376162844248,
"portuguese_hate_speech": 0.5468502264582175,
"tweetsentbr": 0.3534145441122185
},
"result_metrics_average": 0.3570538392099396,
"result_metrics_npm": 0.04038629796455701
} |