File size: 996 Bytes
ec0c1dd e8f8de8 ec0c1dd 6cde9f7 e8f8de8 ece2c89 e8f8de8 ece2c89 e8f8de8 ece2c89 e8f8de8 ece2c89 e8f8de8 ec0c1dd |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 |
{
"model": "openlm-research/open_llama_7b",
"base_model": "",
"revision": "main",
"private": false,
"precision": "float16",
"params": 7.0,
"architectures": "LlamaForCausalLM",
"weight_type": "Original",
"status": "FINISHED",
"submitted_time": "2024-02-11T13:34:22Z",
"model_type": "🟢 : pretrained",
"source": "script",
"job_id": 237,
"job_start_time": "2024-02-17T11-32-23.927613",
"eval_version": "1.1.0",
"result_metrics": {
"enem_challenge": 0.21203638908327502,
"bluex": 0.21974965229485396,
"oab_exams": 0.2765375854214123,
"assin2_rte": 0.35551526385531756,
"assin2_sts": 0.16402785934407932,
"faquad_nli": 0.4396551724137931,
"hatebr_offensive": 0.3349186726374015,
"portuguese_hate_speech": 0.4118866620594333,
"tweetsentbr": 0.368042700579295
},
"result_metrics_average": 0.3091522175209846,
"result_metrics_npm": -0.05674718342028821
} |