File size: 393 Bytes
f640dc2 |
1 |
{"model": "teknium/CollectiveCognition-v1.1-Mistral-7B", "base_model": null, "revision": "5f57f70ec99450c70da2540e94dd7fd67be4b23c", "precision": "float16", "params": 7.0, "architectures": "MistralForCausalLM", "weight_type": "Original", "status": "PENDING", "submitted_time": "2024-06-12T09:26:36Z", "model_type": "finetuned", "job_id": -1, "job_start_time": null, "use_chat_template": false} |