nathanrchn commited on
Commit
fa1e762
1 Parent(s): bf1dba4

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +1 -1
config.json CHANGED
@@ -14,7 +14,7 @@
14
  "max_window_layers": 21,
15
  "model_type": "qwen2",
16
  "num_attention_heads": 16,
17
- "num_hidden_layers": 24,
18
  "num_key_value_heads": 16,
19
  "rms_norm_eps": 1e-06,
20
  "rope_theta": 1000000.0,
 
14
  "max_window_layers": 21,
15
  "model_type": "qwen2",
16
  "num_attention_heads": 16,
17
+ "num_hidden_layers": 20,
18
  "num_key_value_heads": 16,
19
  "rms_norm_eps": 1e-06,
20
  "rope_theta": 1000000.0,