File size: 361 Bytes
431c0d2 |
1 |
{"model": "LLM360/K2", "base_model": null, "revision": "f08338984d2762cd13e4855adf162c7f986600eb", "precision": "float16", "params": 65.286, "architectures": "LlamaForCausalLM", "weight_type": "Original", "status": "PENDING", "submitted_time": "2024-06-12T12:28:13Z", "model_type": "pretrained", "job_id": -1, "job_start_time": null, "use_chat_template": false} |