Mingyuyang-1 commited on
Commit
66bb400
·
1 Parent(s): 264bdbb

Update mla_layer_config.json

Browse files
Files changed (1) hide show
  1. mla_layer_config.json +1 -4
mla_layer_config.json CHANGED
@@ -99,10 +99,7 @@
99
  "rms_norm_eps": 1e-06,
100
  "rope_scaling": {
101
  "factor": 32.0,
102
- "beta_fast": 32.0,
103
- "beta_slow": 1.0,
104
- "mscale": 1.0,
105
- "mscale_all_dim": 0.0,
106
  "type": "yarn"
107
  },
108
  "rope_theta": 500000.0,
 
99
  "rms_norm_eps": 1e-06,
100
  "rope_scaling": {
101
  "factor": 32.0,
102
+ "original_max_position_embeddings": 8192,
 
 
 
103
  "type": "yarn"
104
  },
105
  "rope_theta": 500000.0,