{ "model": "huggyllama/llama-30b", "base_model": "", "revision": "main", "private": false, "precision": "float16", "params": 32.529, "architectures": "LlamaForCausalLM", "weight_type": "Original", "status": "FINISHED", "submitted_time": "2024-02-05T23:05:30Z", "model_type": "🟢 : pretrained", "source": "script", "job_id": 176, "job_start_time": "2024-02-10T05-59-10.907847", "eval_version": "1.0.0", "result_metrics": { "enem_challenge": 0.6186144156752974, "bluex": 0.5034770514603616, "oab_exams": 0.4214123006833713, "assin2_rte": 0.6994823029869264, "assin2_sts": 0.521939545377829, "faquad_nli": 0.5100755946706865, "sparrow_pt": 0.32914824721309877 }, "result_metrics_average": 0.514878494009653, "result_metrics_npm": 0.3114125756635208 }