Fix seq_length to align with the BLOOM paper

#46
Files changed (1) hide show
  1. config.json +1 -1
config.json CHANGED
@@ -21,7 +21,7 @@
21
  "num_attention_heads": 16,
22
  "offset_alibi": 100,
23
  "pretraining_tp": 2,
24
- "seq_length": 4096,
25
  "skip_bias_add": true,
26
  "skip_bias_add_qkv": false,
27
  "transformers_version": "4.20.0",
 
21
  "num_attention_heads": 16,
22
  "offset_alibi": 100,
23
  "pretraining_tp": 2,
24
+ "seq_length": 2048,
25
  "skip_bias_add": true,
26
  "skip_bias_add_qkv": false,
27
  "transformers_version": "4.20.0",