{ "_name_or_path": "mistralai/Mistral-7B-v0.1", "architectures": [ "SparseMistralforCausalLM" ], "attention_dropout": 0.0, "auto_map": { "AutoConfig": "sparsification_sftt.SparseMistralConfig", "AutoModelForCausalLM": "sparsification_sftt.SparseMistralforCausalLM" }, "bos_token_id": 1, "cut_pre_attn": false, "cut_pre_mlp": false, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 32768, "model_type": "sparse_mistral", "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "rms_norm_eps": 1e-05, "rope_theta": 10000.0, "sliding_window": 4096, "thresholds": [ 0.0247037410736084, 0.03390511870384216, 0.04450671002268791, 0.05650851130485535, 0.07611145079135895, 0.09411414712667465, 0.10531583428382874, 0.11991801857948303, 0.1259189248085022, 0.12691907584667206, 0.1307196468114853, 0.13372008502483368, 0.1375206559896469, 0.1405211091041565, 0.15412315726280212, 0.16792522370815277, 0.18552786111831665, 0.19032858312129974, 0.20693106949329376, 0.2187328338623047, 0.22653400897979736, 0.2371356040239334, 0.23433518409729004, 0.2371356040239334, 0.24213634431362152, 0.24213634431362152, 0.240136057138443, 0.24213634431362152, 0.24313649535179138, 0.240136057138443, 0.2371356040239334, 0.22753416001796722 ], "tie_word_embeddings": false, "torch_dtype": "bfloat16", "transformers_version": "4.36.2", "use_cache": false, "use_relu": false, "use_resilu": false, "use_sparse_model": true, "use_sparse_predictor": false, "use_sparse_regularization": false, "vocab_size": 32000 }