ybelkada HF staff commited on
Commit
270ba2d
1 Parent(s): 3807d7c

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +0 -1
config.json CHANGED
@@ -16,7 +16,6 @@
16
  "n_layer": 24,
17
  "num_attention_heads": 16,
18
  "offset_alibi": 100,
19
- "pretraining_pp": 2,
20
  "pretraining_tp": 2,
21
  "seq_length": 4096,
22
  "skip_bias_add": true,
16
  "n_layer": 24,
17
  "num_attention_heads": 16,
18
  "offset_alibi": 100,
 
19
  "pretraining_tp": 2,
20
  "seq_length": 4096,
21
  "skip_bias_add": true,