File size: 381 Bytes
767ac6a |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 |
{
"architectures": [
"MambaForCausalLM"
],
"bias": false,
"conv_bias": true,
"d_conv": 4,
"d_inner": 1536,
"d_model": 768,
"d_state": 16,
"dt_rank": 48,
"expand": 2,
"initializer_range": 0.02,
"model_type": "mamba",
"n_layer": 24,
"pad_vocab_size_multiple": 8,
"torch_dtype": "float32",
"transformers_version": "4.35.2",
"vocab_size": 50280
}
|