YikangS commited on
Commit
243d98f
·
1 Parent(s): a7ee27c

update model config

Browse files
Files changed (1) hide show
  1. config.json +1 -1
config.json CHANGED
@@ -13,9 +13,9 @@
13
  "moe_num_experts": 8,
14
  "moe_top_k": 2,
15
  "n_embd": 2048,
16
- "n_head": 16,
17
  "n_layer": 24,
18
  "n_positions": 4096,
 
19
  "num_key_value_heads": 16,
20
  "num_layers": 24,
21
  "rms_norm_eps": 1e-05,
 
13
  "moe_num_experts": 8,
14
  "moe_top_k": 2,
15
  "n_embd": 2048,
 
16
  "n_layer": 24,
17
  "n_positions": 4096,
18
+ "num_attention_heads": 32,
19
  "num_key_value_heads": 16,
20
  "num_layers": 24,
21
  "rms_norm_eps": 1e-05,