File size: 382 Bytes
f3e1fd2
1
{"model": "mistralai/Mixtral-8x7B-v0.1", "base_model": null, "revision": "985aa055896a8f943d4a9f2572e6ea1341823841", "precision": "bfloat16", "params": 46.703, "architectures": "MixtralForCausalLM", "weight_type": "Original", "status": "PENDING", "submitted_time": "2024-06-12T12:24:50Z", "model_type": "pretrained", "job_id": -1, "job_start_time": null, "use_chat_template": false}