File size: 349 Bytes
94aaa8e |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 |
{
"architectures": [
"MyLLaMa"
],
"auto_map": {
"AutoConfig": "configure_for_hf.MyLLaMaConfig",
"AutoModelForCausalLM": "configure_for_hf.MyLLaMa"
},
"embed_dim": 1536,
"model_type": "LLaMa",
"n_chckpnt_segments": 24,
"n_heads": 24,
"n_layers": 24,
"torch_dtype": "float32",
"transformers_version": "4.47.0.dev0"
}
|