{ "model": "huggyllama/llama-30b", "base_model": "", "revision": "main", "private": false, "precision": "float16", "params": 32.529, "architectures": "LlamaForCausalLM", "weight_type": "Original", "status": "FINISHED", "submitted_time": "2024-02-05T23:05:30Z", "model_type": "🟢 : pretrained", "source": "script", "job_id": 289, "job_start_time": "2024-03-04T06-00-26.904208", "eval_version": "1.1.0", "result_metrics": { "enem_challenge": 0.6130160951714486, "bluex": 0.5187760778859527, "oab_exams": 0.4191343963553531, "assin2_rte": 0.7293812533558872, "assin2_sts": 0.6049405695262198, "faquad_nli": 0.519508339133766, "hatebr_offensive": 0.6823108859287468, "portuguese_hate_speech": 0.6828759020466106, "tweetsentbr": 0.6009045227861195 }, "result_metrics_average": 0.5967608935766783, "result_metrics_npm": 0.3848156782676072 }