File size: 734 Bytes
2f12b42
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
{
    "attention_qkv_bias": true,
    "codebook_size": 1024,
    "dim": 2048,
    "dropout": 0.0,
    "fast_attention_qkv_bias": false,
    "fast_dim": 1024,
    "fast_head_dim": 64,
    "fast_intermediate_size": 4096,
    "fast_n_head": 16,
    "fast_n_local_heads": 2,
    "head_dim": 128,
    "initializer_range": 0.02,
    "intermediate_size": 11008,
    "is_reward_model": false,
    "max_seq_len": 8192,
    "model_type": "dual_ar",
    "n_fast_layer": 4,
    "n_head": 16,
    "n_layer": 36,
    "n_local_heads": 2,
    "norm_eps": 1e-06,
    "num_codebooks": 8,
    "rope_base": 1000000,
    "share_codebook_embeddings": true,
    "tie_word_embeddings": true,
    "use_gradient_checkpointing": true,
    "vocab_size": 151936
}