|
{ |
|
"model": "qwen", |
|
"hidden_act": "silu", |
|
"intermediate_size": 13696, |
|
"initializer_range": 0.02, |
|
"layer_norm_eps": 1e-06, |
|
"hidden_size": 5120, |
|
"num_attention_heads": 40, |
|
"num_hidden_layers": 40, |
|
"rotary_emb_base": 10000, |
|
"rotary_pct": 1.0, |
|
"scale_attn_weights": true, |
|
"seq_length": 2048, |
|
"tie_word_embeddings": false, |
|
"use_cache": true, |
|
"flash_attention": "flash_attn_2", |
|
"vocab_size": 152064, |
|
"use_dynamic_ntk": true, |
|
"use_logn_attn": true, |
|
"segment_vocab_size": 0, |
|
"skip_init": true, |
|
"rope_rank": "updown", |
|
"max_position_embeddings": 8192 |
|
} |