File size: 433 Bytes
69ca8d5
1
{"model": "allknowingroger/Neuralgamma-2.5-7B-slerp", "base_model": "", "revision": "13f38b8e3ca17cee9a7546b103ffa0854a5a29b9", "precision": "bfloat16", "params": 7.242, "architectures": "MistralForCausalLM", "weight_type": "Original", "status": "PENDING", "submitted_time": "2024-06-26T16:33:17Z", "model_type": "\ud83e\udd1d : \ud83e\udd1d base merges and moerges", "job_id": -1, "job_start_time": null, "use_chat_template": false}