File size: 379 Bytes
f643a8b
1
{"model": "mistralai/Mistral-7B-v0.3", "base_model": null, "revision": "b67d6a03ca097c5122fa65904fce0413500bf8c8", "precision": "bfloat16", "params": 7.248, "architectures": "MistralForCausalLM", "weight_type": "Original", "status": "PENDING", "submitted_time": "2024-06-12T12:24:43Z", "model_type": "pretrained", "job_id": -1, "job_start_time": null, "use_chat_template": false}