{ "model": "NousResearch/Nous-Hermes-2-Mistral-7B-DPO", "base_model": "", "revision": "main", "private": false, "precision": "bfloat16", "params": 7.242, "architectures": "MistralForCausalLM", "weight_type": "Original", "status": "FINISHED", "submitted_time": "2024-02-27T00:37:25Z", "model_type": "💬 : chat models (RLHF, DPO, IFT, ...)", "source": "leaderboard", "job_id": 267, "job_start_time": "2024-02-27T02-51-13.508742", "eval_version": "1.1.0", "result_metrics": { "enem_challenge": 0.6326102169349195, "bluex": 0.541029207232267, "oab_exams": 0.43735763097949887, "assin2_rte": 0.601464720105945, "assin2_sts": 0.6915650379510005, "faquad_nli": 0.7138364779874213, "hatebr_offensive": 0.7767581619154933, "portuguese_hate_speech": 0.7090851811137625, "tweetsentbr": 0.4521585213804042 }, "result_metrics_average": 0.6173183506223013, "result_metrics_npm": 0.4163010463383218, "main_language": "English" }