File size: 989 Bytes
0e233d6 18e177d 0e233d6 e8d53dd 18e177d 0e233d6 18e177d 0e233d6 18e177d 0e233d6 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 |
{
"model": "meta-llama/Llama-2-7b-hf",
"base_model": "",
"revision": "main",
"private": false,
"precision": "float16",
"params": 6.738,
"architectures": "LlamaForCausalLM",
"weight_type": "Original",
"status": "FINISHED",
"submitted_time": "2024-02-05T22:59:42Z",
"model_type": "🟢 : pretrained",
"source": "script",
"job_id": 217,
"job_start_time": "2024-02-16T17-57-01.707450",
"eval_version": "1.1.0",
"result_metrics": {
"enem_challenge": 0.3191042687193842,
"bluex": 0.3129346314325452,
"oab_exams": 0.35444191343963555,
"assin2_rte": 0.6701769945334302,
"assin2_sts": 0.3110321340448155,
"faquad_nli": 0.5387420425880172,
"hatebr_offensive": 0.7515535753827598,
"portuguese_hate_speech": 0.552600996126176,
"tweetsentbr": 0.5906311367573921
},
"result_metrics_average": 0.48902418811379506,
"result_metrics_npm": 0.2489201920526417
} |