File size: 198 Bytes
815783e
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
{
    "d_model": 280,
    "n_layer": 6,
    "vocab_size": 4096,
    "ssm_cfg": {},
    "rms_norm": true,
    "residual_in_fp32": true,
    "fused_add_norm": true,
    "pad_vocab_size_multiple": 8
}