File size: 427 Bytes
21b9bdb |
1 |
{"model": "allknowingroger/MultiCalm-7B-slerp", "base_model": "", "revision": "1c23540e907fab4dfe0ef66edd0003e764bfe568", "precision": "bfloat16", "params": 7.242, "architectures": "MistralForCausalLM", "weight_type": "Original", "status": "PENDING", "submitted_time": "2024-06-26T16:26:58Z", "model_type": "\ud83e\udd1d : \ud83e\udd1d base merges and moerges", "job_id": -1, "job_start_time": null, "use_chat_template": false} |