Update configuration_llama.py, required to get model to Load as `rope_scaling` needs to be None, or else a dictionary

#1
by TheBloke - opened
Files changed (1) hide show
  1. configuration_llama.py +1 -1
configuration_llama.py CHANGED
@@ -124,7 +124,7 @@ class LlamaConfig(PretrainedConfig):
124
  pretraining_tp=1,
125
  tie_word_embeddings=False,
126
  rope_theta=10000,
127
- rope_scaling="yarn",
128
  attention_bias=False,
129
  **kwargs,
130
  ):
 
124
  pretraining_tp=1,
125
  tie_word_embeddings=False,
126
  rope_theta=10000,
127
+ rope_scaling=None,
128
  attention_bias=False,
129
  **kwargs,
130
  ):