File size: 486 Bytes
163c496 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 |
{
"model": "NousResearch/Nous-Capybara-34B",
"base_model": "",
"revision": "main",
"private": false,
"precision": "bfloat16",
"params": 34.0,
"architectures": "LlamaForCausalLM",
"weight_type": "Original",
"main_language": "English",
"status": "PENDING",
"submitted_time": "2024-04-26T07:21:50Z",
"model_type": "🔶 : fine-tuned/fp on domain-specific datasets",
"source": "leaderboard",
"job_id": -1,
"job_start_time": null
} |