{ "_name_or_path": "mosaicml/mpt-30b-chat", "architectures": [ "ExtendedMptForCausalLM" ], "attn_config": { "model_type": "" }, "auto_map": { "AutoConfig": "mosaicml/mpt-30b-chat--configuration_mpt.MPTConfig", "AutoModelForCausalLM": "mosaicml/mpt-30b-chat--modeling_mpt.MPTForCausalLM" }, "d_model": 7168, "emb_pdrop": 0, "embedding_fraction": 1.0, "expansion_ratio": 4, "init_config": { "emb_init_std": null, "emb_init_uniform_lim": null, "fan_mode": "fan_in", "init_div_is_residual": true, "init_gain": 0.0, "init_nonlinearity": "relu", "init_std": null, "name": "kaiming_normal_", "verbose": 0 }, "init_device": "cpu", "initializer_range": 0.02, "layer_norm_epsilon": 1e-05, "learned_pos_emb": true, "logit_scale": null, "max_seq_len": 8192, "max_seq_len_train": 2048, "model_type": "extended-mpt", "n_heads": 64, "n_layers": 48, "no_bias": true, "norm_type": "low_precision_layernorm", "resid_pdrop": 0, "tokenizer_name": "sam-mosaic/gpt-neox-20b-chatml", "torch_dtype": "float32", "transformers_version": "4.33.0", "use_cache": false, "use_external_mind": true, "use_external_mind_by_layer": [ true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true ], "verbose": 0, "vocab_size": 50432 }