File size: 377 Bytes
d3dd5c7
1
{"model": "meta-llama/Llama-2-70b-hf", "base_model": null, "revision": "3aba440b59558f995867ba6e1f58f21d0336b5bb", "precision": "float16", "params": 68.977, "architectures": "LlamaForCausalLM", "weight_type": "Original", "status": "PENDING", "submitted_time": "2024-06-12T13:18:24Z", "model_type": "pretrained", "job_id": -1, "job_start_time": null, "use_chat_template": false}