{ "attn_cfg": {}, "attn_layer_idx": [], "d_intermediate": 0, "d_model": 768, "fused_add_norm": true, "n_layer": 24, "pad_vocab_size_multiple": 8, "residual_in_fp32": true, "rms_norm": true, "ssm_cfg": {}, "tie_embeddings": true, "vocab_size": 50277 }