bwarner commited on
Commit
f87846c
1 Parent(s): 52efb96

Bump `max_position_embeddings` to 8192

Browse files

also harmonize `layer_norm_eps` with `norm_eps` although the former isn't used

Files changed (1) hide show
  1. config.json +2 -2
config.json CHANGED
@@ -23,10 +23,10 @@
23
  "initializer_cutoff_factor": 2.0,
24
  "initializer_range": 0.02,
25
  "intermediate_size": 2624,
26
- "layer_norm_eps": 1e-12,
27
  "local_attention": 128,
28
  "local_rope_theta": 10000.0,
29
- "max_position_embeddings": 512,
30
  "mlp_bias": false,
31
  "mlp_dropout": 0.0,
32
  "model_type": "modernbert",
 
23
  "initializer_cutoff_factor": 2.0,
24
  "initializer_range": 0.02,
25
  "intermediate_size": 2624,
26
+ "layer_norm_eps": 1e-5,
27
  "local_attention": 128,
28
  "local_rope_theta": 10000.0,
29
+ "max_position_embeddings": 8192,
30
  "mlp_bias": false,
31
  "mlp_dropout": 0.0,
32
  "model_type": "modernbert",