File size: 426 Bytes
a0d01fa
1
{"model": "allknowingroger/ROGERphi-7B-slerp", "base_model": "", "revision": "a92f90ae5e4286daa2399df4951a3347aaf414e1", "precision": "bfloat16", "params": 7.242, "architectures": "MistralForCausalLM", "weight_type": "Original", "status": "PENDING", "submitted_time": "2024-06-26T16:32:40Z", "model_type": "\ud83e\udd1d : \ud83e\udd1d base merges and moerges", "job_id": -1, "job_start_time": null, "use_chat_template": false}