File size: 918 Bytes
8ed3f16 b2edc5b 8ed3f16 b2edc5b 8ed3f16 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 |
{
"model": "Unbabel/TowerBase-7B-v0.1",
"base_model": "",
"revision": "main",
"private": false,
"precision": "bfloat16",
"params": 6.738,
"architectures": "LlamaForCausalLM",
"weight_type": "Original",
"status": "FINISHED",
"submitted_time": "2024-02-05T23:04:12Z",
"model_type": "🆎 : language adapted models (FP, FT, ...)",
"source": "script",
"job_id": 23,
"job_start_time": "2024-02-06T23-18-53.380050",
"eval_version": "1.0.0",
"result_metrics": {
"enem_challenge": 0.3610916724982505,
"bluex": 0.3129346314325452,
"oab_exams": 0.34943052391799545,
"assin2_rte": 0.424025213670537,
"assin2_sts": 0.09020044429155334,
"faquad_nli": 0.45217350015431734,
"sparrow_pt": 0.3127165512648607
},
"result_metrics_average": 0.32893893389000856,
"result_metrics_npm": 0.057678483940730205
} |