{ "model": "huggyllama/llama-65b", "base_model": "", "revision": "main", "private": false, "precision": "float16", "params": 65.286, "architectures": "LlamaForCausalLM", "weight_type": "Original", "status": "FINISHED", "submitted_time": "2024-02-05T23:05:56Z", "model_type": "🟢 : pretrained", "source": "script", "job_id": 288, "job_start_time": "2024-03-03T23-57-44.587602", "eval_version": "1.1.0", "result_metrics": { "enem_challenge": 0.6682995101469559, "bluex": 0.5535465924895688, "oab_exams": 0.46378132118451026, "assin2_rte": 0.7707707349316295, "assin2_sts": 0.6875101964208782, "faquad_nli": 0.4396551724137931, "hatebr_offensive": 0.6774236636123208, "portuguese_hate_speech": 0.6673631928525245, "tweetsentbr": 0.6729660354724708 }, "result_metrics_average": 0.6223684910582947, "result_metrics_npm": 0.413676060283789 }