File size: 429 Bytes
6bb605b |
1 |
{"model": "allknowingroger/Neuralcoven-7B-slerp", "base_model": "", "revision": "129b40a7fd816f679ef5d4ab29fc77345f33a7b1", "precision": "bfloat16", "params": 7.242, "architectures": "MistralForCausalLM", "weight_type": "Original", "status": "PENDING", "submitted_time": "2024-06-26T16:28:14Z", "model_type": "\ud83e\udd1d : \ud83e\udd1d base merges and moerges", "job_id": -1, "job_start_time": null, "use_chat_template": false} |