vxbrandon's picture
End of training
3948f9d verified
raw
history blame
1.79 kB
{
"_name_or_path": "/scr/jay/ckpt/2024-03-11/Mistral_Sparse_refined_web_50p_no_adapter",
"architectures": [
"SparseMistralforCausalLM"
],
"attention_dropout": 0.0,
"auto_map": {
"AutoConfig": "sparsification_sftt.SparseMistralConfig",
"AutoModelForCausalLM": "sparsification_sftt.SparseMistralforCausalLM"
},
"bos_token_id": 1,
"eos_token_id": 2,
"hidden_act": "silu",
"hidden_size": 4096,
"initializer_range": 0.02,
"intermediate_size": 14336,
"max_position_embeddings": 32768,
"model_type": "sparse_mistral",
"num_attention_heads": 32,
"num_hidden_layers": 32,
"num_key_value_heads": 8,
"rms_norm_eps": 1e-05,
"rope_theta": 10000.0,
"sliding_window": 4096,
"thresholds": [
0.017051145434379578,
0.023069201037287712,
0.049147434532642365,
0.051153454929590225,
0.061183542013168335,
0.07121363282203674,
0.0732196494936943,
0.07923770695924759,
0.08324974030256271,
0.08124372363090515,
0.089267797768116,
0.09127381443977356,
0.10130390524864197,
0.0992978885769844,
0.10732196271419525,
0.12337010353803635,
0.14343027770519257,
0.16148445010185242,
0.17953860759735107,
0.1935807317495346,
0.1995987892150879,
0.2196589708328247,
0.2196589708328247,
0.23169508576393127,
0.2357071191072464,
0.23370109498500824,
0.225677028298378,
0.22968906164169312,
0.225677028298378,
0.22768303751945496,
0.2457372099161148,
0.2678034007549286
],
"tie_word_embeddings": false,
"torch_dtype": "bfloat16",
"transformers_version": "4.36.2",
"use_cache": false,
"use_relu": false,
"use_sparse_model": true,
"use_sparse_predictor": false,
"use_sparse_regularization": false,
"vocab_size": 32000
}