File size: 369 Bytes
2c75940
1
{"model": "mistralai/Mixtral-8x7B-v0.1", "base_model": "", "revision": "main", "private": false, "precision": "bfloat16", "params": 46.703, "architectures": "MixtralForCausalLM", "weight_type": "Original", "status": "PENDING", "submitted_time": "2024-02-05T23:05:20Z", "model_type": "\ud83d\udfe2 : pretrained", "source": "script", "job_id": -1, "job_start_time": null}