ArthurZ HF staff commited on
Commit
59e7e25
1 Parent(s): e871fbc

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +0 -2
config.json CHANGED
@@ -12,11 +12,9 @@
12
  "layer_norm_eps": 1e-05,
13
  "max_position_embeddings": 16384,
14
  "model_type": "persimmon",
15
- "norm_eps": 1e-05,
16
  "num_attention_heads": 64,
17
  "num_hidden_layers": 36,
18
  "num_key_value_heads": 64,
19
- "pretraining_tp": 1,
20
  "qk_layernorm": true,
21
  "rope_scaling": null,
22
  "rope_theta": 25000.0,
 
12
  "layer_norm_eps": 1e-05,
13
  "max_position_embeddings": 16384,
14
  "model_type": "persimmon",
 
15
  "num_attention_heads": 64,
16
  "num_hidden_layers": 36,
17
  "num_key_value_heads": 64,
 
18
  "qk_layernorm": true,
19
  "rope_scaling": null,
20
  "rope_theta": 25000.0,