{ "architectures": [ "MptForCausalLM" ], "attn_config": { "model_type": "" }, "d_model": 256, "emb_pdrop": 0.0, "embedding_fraction": 1.0, "expansion_ratio": 4, "hidden_act": "gelu", "init_device": "cpu", "initializer_range": 0.02, "intermediate_size": 1024, "layer_norm_epsilon": 1e-05, "learned_pos_emb": true, "logit_scale": null, "max_seq_len": 2048, "model_type": "mpt", "n_heads": 8, "n_layers": 2, "no_bias": true, "norm_type": "low_precision_layernorm", "num_key_value_heads": 8, "resid_pdrop": 0.0, "torch_dtype": "float32", "transformers_version": "4.42.4", "use_cache": false, "verbose": 0, "vocab_size": 2000 }