{ "_name_or_path": "mistralai/Mistral-7B-v0.1", "architectures": [ "SparseMistralforCausalLM" ], "attention_dropout": 0.0, "auto_map": { "AutoConfig": "sparsification_sftt.SparseMistralConfig", "AutoModelForCausalLM": "sparsification_sftt.SparseMistralforCausalLM" }, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 32768, "model_type": "sparse_mistral", "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "rms_norm_eps": 1e-05, "rope_theta": 10000.0, "sliding_window": 4096, "thresholds": [ 0.06519558280706406, 0.08726178109645844, 0.10732196271419525, 0.13340020179748535, 0.16349045932292938, 0.18956869840621948, 0.20561684668064117, 0.22367100417613983, 0.23370109498500824, 0.23971915245056152, 0.24373118579387665, 0.24774321913719177, 0.25376129150390625, 0.2577733099460602, 0.26379138231277466, 0.26980942487716675, 0.27382147312164307, 0.27582746744155884, 0.27582746744155884, 0.27582746744155884, 0.27582746744155884, 0.27582746744155884, 0.27582746744155884, 0.27582746744155884, 0.27582746744155884, 0.27582746744155884, 0.27582746744155884, 0.27582746744155884, 0.27582746744155884, 0.277833491563797, 0.31594783067703247, 0.4704112410545349 ], "tie_word_embeddings": false, "torch_dtype": "bfloat16", "transformers_version": "4.36.2", "use_cache": false, "use_relu": false, "use_sparse_model": true, "use_sparse_predictor": false, "use_sparse_regularization": false, "vocab_size": 32000 }