Mingyuyang-1 commited on
Commit
574f332
·
1 Parent(s): 3aaf878

Update mla_layer_config.json

Browse files
Files changed (1) hide show
  1. mla_layer_config.json +1 -4
mla_layer_config.json CHANGED
@@ -98,10 +98,7 @@
98
  "rms_norm_eps": 1e-06,
99
  "rope_scaling": {
100
  "factor": 32.0,
101
- "beta_fast": 32.0,
102
- "beta_slow": 1.0,
103
- "mscale": 1.0,
104
- "mscale_all_dim": 0.0,
105
  "type": "yarn"
106
  },
107
  "rope_theta": 500000.0,
 
98
  "rms_norm_eps": 1e-06,
99
  "rope_scaling": {
100
  "factor": 32.0,
101
+ "original_max_position_embeddings": 8192,
 
 
 
102
  "type": "yarn"
103
  },
104
  "rope_theta": 500000.0,