{ "_name_or_path": "huseinzol05-dummy-mamba-1.4b/checkpoint-400", "architectures": [ "MambaLMHeadModel" ], "auto_map": { "AutoModel": "modeling.MambaLMHeadModel" }, "d_model": 2048, "fused_add_norm": true, "hidden_size": 2048, "model_type": "mamba", "n_layer": 48, "pad_vocab_size_multiple": 8, "residual_in_fp32": true, "rms_norm": true, "ssm_cfg": {}, "torch_dtype": "float32", "transformers_version": "4.36.0.dev0", "vocab_size": 32000 }