{ "model": "lmsys/vicuna-7b-v1.5", "base_model": "", "revision": "main", "private": false, "precision": "float16", "params": 7.0, "architectures": "LlamaForCausalLM", "weight_type": "Original", "status": "RERUN", "submitted_time": "2024-02-27T16:58:43Z", "model_type": "💬 : chat models (RLHF, DPO, IFT, ...)", "source": "leaderboard", "job_id": -1, "job_start_time": null }