Can't load

#1
by edisonzf2020 - opened

Traceback (most recent call last):
File "", line 198, in _run_module_as_main
File "", line 88, in _run_code
File "/Users/fanmac/.miniconda3/envs/mlx/lib/python3.11/site-packages/mlx_lm/server.py", line 447, in
MODEL, TOKENIZER = load(
^^^^^
File "/Users/fanmac/.miniconda3/envs/mlx/lib/python3.11/site-packages/mlx_lm/utils.py", line 381, in load
model = load_model(model_path, lazy)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/fanmac/.miniconda3/envs/mlx/lib/python3.11/site-packages/mlx_lm/utils.py", line 345, in load_model
model.load_weights(list(weights.items()))
File "/Users/fanmac/.miniconda3/envs/mlx/lib/python3.11/site-packages/mlx/nn/layers/base.py", line 211, in load_weights
raise ValueError(f"Received parameters not in model: {extras}.")
ValueError: Received parameters not in model: model.layers.8.self_attn.q_norm.weight model.layers.12.self_attn.k_norm.weight model.layers.21.self_attn.k_norm.weight model.layers.4.self_attn.k_norm.weight model.layers.37.self_attn.q_norm.weight model.layers.3.self_attn.k_norm.weight model.layers.26.self_attn.q_norm.weight model.layers.59.self_attn.k_norm.weight model.layers.36.self_attn.k_norm.weight model.layers.41.self_attn.k_norm.weight model.layers.58.self_attn.k_norm.weight model.layers.60.self_attn.k_norm.weight model.layers.60.self_attn.q_norm.weight model.layers.42.self_attn.k_norm.weight model.layers.62.self_attn.k_norm.weight model.layers.39.self_attn.k_norm.weight model.layers.28.self_attn.q_norm.weight model.layers.42.self_attn.q_norm.weight model.layers.49.self_attn.k_norm.weight model.layers.0.self_attn.q_norm.weight model.layers.27.self_attn.k_norm.weight model.layers.29.self_attn.k_norm.weight model.layers.33.self_attn.k_norm.weight model.layers.10.self_attn.k_norm.weight model.layers.56.self_attn.q_norm.weight model.layers.25.self_attn.k_norm.weight model.layers.54.self_attn.q_norm.weight model.layers.15.self_attn.k_norm.weight model.layers.3.self_attn.q_norm.weight model.layers.58.self_attn.q_norm.weight model.layers.19.self_attn.q_norm.weight model.layers.39.self_attn.q_norm.weight model.layers.5.self_attn.k_norm.weight model.layers.30.self_attn.k_norm.weight model.layers.51.self_attn.k_norm.weight model.layers.55.self_attn.q_norm.weight model.layers.17.self_attn.k_norm.weight model.layers.20.self_attn.k_norm.weight model.layers.24.self_attn.q_norm.weight model.layers.1.self_attn.k_norm.weight model.layers.61.self_attn.q_norm.weight model.layers.21.self_attn.q_norm.weight model.layers.24.self_attn.k_norm.weight model.layers.59.self_attn.q_norm.weight model.layers.13.self_attn.q_norm.weight model.layers.53.self_attn.k_norm.weight model.layers.23.self_attn.k_norm.weight model.layers.20.self_attn.q_norm.weight model.layers.52.self_attn.k_norm.weight model.layers.11.self_attn.k_norm.weight model.layers.15.self_attn.q_norm.weight model.layers.41.self_attn.q_norm.weight model.layers.4.self_attn.q_norm.weight model.layers.8.self_attn.k_norm.weight model.layers.53.self_attn.q_norm.weight model.layers.38.self_attn.k_norm.weight model.layers.43.self_attn.k_norm.weight model.layers.30.self_attn.q_norm.weight model.layers.63.self_attn.q_norm.weight model.layers.55.self_attn.k_norm.weight model.layers.46.self_attn.q_norm.weight model.layers.2.self_attn.q_norm.weight model.layers.26.self_attn.k_norm.weight model.layers.6.self_attn.q_norm.weight model.layers.49.self_attn.q_norm.weight model.layers.28.self_attn.k_norm.weight model.layers.19.self_attn.k_norm.weight model.layers.44.self_attn.q_norm.weight model.layers.50.self_attn.k_norm.weight model.layers.31.self_attn.k_norm.weight model.layers.10.self_attn.q_norm.weight model.layers.13.self_attn.k_norm.weight model.layers.34.self_attn.q_norm.weight model.layers.9.self_attn.k_norm.weight model.layers.14.self_attn.k_norm.weight model.layers.33.self_attn.q_norm.weight model.layers.45.self_attn.k_norm.weight model.layers.37.self_attn.k_norm.weight model.layers.0.self_attn.k_norm.weight model.layers.34.self_attn.k_norm.weight model.layers.25.self_attn.q_norm.weight model.layers.17.self_attn.q_norm.weight model.layers.16.self_attn.k_norm.weight model.layers.22.self_attn.q_norm.weight model.layers.18.self_attn.k_norm.weight model.layers.32.self_attn.k_norm.weight model.layers.43.self_attn.q_norm.weight model.layers.23.self_attn.q_norm.weight model.layers.5.self_attn.q_norm.weight model.layers.63.self_attn.k_norm.weight model.layers.56.self_attn.k_norm.weight model.layers.61.self_attn.k_norm.weight model.layers.18.self_attn.q_norm.weight model.layers.45.self_attn.q_norm.weight model.layers.62.self_attn.q_norm.weight model.layers.57.self_attn.q_norm.weight model.layers.50.self_attn.q_norm.weight model.layers.46.self_attn.k_norm.weight model.layers.32.self_attn.q_norm.weight model.layers.36.self_attn.q_norm.weight model.layers.47.self_attn.q_norm.weight model.layers.1.self_attn.q_norm.weight model.layers.48.self_attn.k_norm.weight model.layers.12.self_attn.q_norm.weight model.layers.31.self_attn.q_norm.weight model.layers.54.self_attn.k_norm.weight model.layers.38.self_attn.q_norm.weight model.layers.44.self_attn.k_norm.weight model.layers.9.self_attn.q_norm.weight model.layers.7.self_attn.q_norm.weight model.layers.16.self_attn.q_norm.weight model.layers.35.self_attn.q_norm.weight model.layers.7.self_attn.k_norm.weight model.layers.40.self_attn.k_norm.weight model.layers.40.self_attn.q_norm.weight model.layers.35.self_attn.k_norm.weight model.layers.27.self_attn.q_norm.weight model.layers.11.self_attn.q_norm.weight model.layers.6.self_attn.k_norm.weight model.layers.2.self_attn.k_norm.weight model.layers.51.self_attn.q_norm.weight model.layers.22.self_attn.k_norm.weight model.layers.29.self_attn.q_norm.weight model.layers.57.self_attn.k_norm.weight model.layers.14.self_attn.q_norm.weight model.layers.52.self_attn.q_norm.weight model.layers.47.self_attn.k_norm.weight model.layers.48.self_attn.q_norm.weight.

MLX Community org

That's becuase the PR is not merged yet.

Try installing this branch if you want to test it out: https://github.com/ml-explore/mlx-examples/tree/cohere_plus

prince-canuma changed discussion status to closed
prince-canuma changed discussion status to open

Sign up or log in to comment