File size: 1,191 Bytes
f52774c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 |
{
"_name_or_path": "/nfs-gpu/xlstm/converted_model_checkpoints/dclm_mLSTMv1_7B_ctx8192_sep_finetune_2024-11-29T17:03:51_0_550000",
"add_embedding_dropout": false,
"add_forward_backend_padding": false,
"add_out_norm": true,
"add_post_blocks_norm": true,
"add_post_norm": false,
"add_qk_norm": false,
"architectures": [
"xLSTMForCausalLM"
],
"bos_token_id": 0,
"cell_norm_eps": 1e-06,
"embedding_dim": 4096,
"eos_token_id": 2,
"ffn_proj_factor": 2.667,
"ffn_round_up_to_multiple_of": 64,
"force_bos_token_insert": true,
"forward_backend_name": "chunkwise--triton_limit_chunk",
"gate_soft_cap": 15.0,
"head_dim": 512,
"igate_bias_init_range": -10.0,
"mlstm_round_up_to_multiple_of": 64,
"model_type": "xlstm",
"norm_eps": 1e-06,
"norm_reduction_force_float32": true,
"num_blocks": 32,
"num_heads": 8,
"output_logit_soft_cap": 30.0,
"pad_token_id": 1,
"qk_dim_factor": 0.5,
"return_last_states": true,
"step_backend_name": "triton_fused",
"tie_word_embeddings": false,
"torch_dtype": "float32",
"transformers_version": "4.47.0.dev0",
"use_bias": false,
"use_cache": true,
"v_dim_factor": 1.0,
"vocab_size": 50304
}
|