{"d_model": 2048, "n_layer": 46, "vocab_size": 50277, "ssm_cfg": {}, "rms_norm": true, "residual_in_fp32": true, "fused_add_norm": true, "pad_vocab_size_multiple": 16, "reorder_and_upcast_attn": false, "scale_attn_by_inverse_layer_idx": true, "n_positions": 2048, "n_embd": 1024, "n_head": 16, "use_flash_attn": true, "fused_dropout_add_ln": true, "fused_mlp": true, "fused_bias_fc": true, "use_fast_path": true}