{"d_model": 1792, "n_layer": 48, "vocab_size": 256, "ssm_cfg": {"expand": 2}, "rms_norm": false, "residual_in_fp32": true, "fused_add_norm": true, "pad_vocab_size_multiple": 8, "tie_embeddings": false}