Weights mismatch?

#1
by wchen22 - opened

Just trying to load up the model and getting this error:

ValueError Traceback (most recent call last)
in <cell line: 3>()
1 from mlx_lm import load, generate
2
----> 3 model, tokenizer = load("mlx-community/Llama-3-8B-Instruct-262k-4bit")
4 response = generate(model, tokenizer, prompt="hello", verbose=True)

2 frames
/usr/local/lib/python3.10/dist-packages/mlx_lm/utils.py in load(path_or_hf_repo, tokenizer_config, adapter_file, lazy)
354 model_path = get_model_path(path_or_hf_repo)
355
--> 356 model = load_model(model_path, lazy)
357 if adapter_file is not None:
358 model = apply_lora_layers(model, adapter_file)

/usr/local/lib/python3.10/dist-packages/mlx_lm/utils.py in load_model(model_path, lazy)
318 )
319
--> 320 model.load_weights(list(weights.items()))
321
322 if not lazy:

/usr/local/lib/python3.10/dist-packages/mlx/nn/layers/base.py in load_weights(self, file_or_weights, strict)
162 if extras := (new_weights.keys() - curr_weights.keys()):
163 extras = " ".join(extras)
--> 164 raise ValueError(f"Received parameters not in model: {extras}.")
165 if missing := (curr_weights.keys() - new_weights.keys()):
166 missing = " ".join(missing)

ValueError: Received parameters not in model: model.embed_tokens.scales model.embed_tokens.biases.

Sign up or log in to comment