File size: 420 Bytes
5cc0d04
1
{"model": "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO", "base_model": "", "revision": "main", "private": false, "precision": "float16", "params": 46.703, "architectures": "MixtralForCausalLM", "weight_type": "Original", "status": "PENDING", "submitted_time": "2024-02-21T13:34:22Z", "model_type": "\ud83d\udd36 : fine-tuned/fp on domain-specific datasets", "source": "leaderboard", "job_id": -1, "job_start_time": null}