Finetuned Phi-2 Model throwing `OSError` for `configuration_phi.py`
Hello!
I'm loading my sqlcoder model from pavankumarbalijepalli/phi2-sqlcoder
using below code.
import torch
from transformers import AutoModelForCausalLM, AutoTokenizer
model_name = "pavankumarbalijepalli/phi2-sqlcoder"
model = AutoModelForCausalLM.from_pretrained(
model_name,
trust_remote_code=True,
device_map="auto"
)
tokenizer = AutoTokenizer.from_pretrained(model_name, trust_remote_code=True)
tokenizer.pad_token = tokenizer.eos_token
The below error is being raised when I'm trying to load my model. I see that 30 days ago, the file configuration_phi.py is removed. I do not understand how to load my model as it is asking for the said file. I have GGUF version of my model as well but loading model from hub is much easier than downloading the GGUF.
OSError: microsoft/phi-2 does not appear to have a file named configuration_phi.py. Checkout 'https://huggingface.co/microsoft/phi-2/tree/main' for available files.
Complete error log is in the comment below. Kindly help. Thank you. ππ
Whole log
{
"name": "OSError",
"message": "microsoft/phi-2 does not appear to have a file named configuration_phi.py. Checkout 'https://huggingface.co/microsoft/phi-2/tree/main' for available files.",
"stack": "---------------------------------------------------------------------------
HTTPError Traceback (most recent call last)
File ~/.python/current/lib/python3.10/site-packages/huggingface_hub/utils/_errors.py:304, in hf_raise_for_status(response, endpoint_name)
303 try:
--> 304 response.raise_for_status()
305 except HTTPError as e:
File ~/.local/lib/python3.10/site-packages/requests/models.py:1021, in Response.raise_for_status(self)
1020 if http_error_msg:
-> 1021 raise HTTPError(http_error_msg, response=self)
HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/microsoft/phi-2/resolve/main/configuration_phi.py
The above exception was the direct cause of the following exception:
EntryNotFoundError Traceback (most recent call last)
File ~/.python/current/lib/python3.10/site-packages/transformers/utils/hub.py:399, in cached_file(path_or_repo_id, filename, cache_dir, force_download, resume_download, proxies, token, revision, local_files_only, subfolder, repo_type, user_agent, _raise_exceptions_for_gated_repo, _raise_exceptions_for_missing_entries, _raise_exceptions_for_connection_errors, _commit_hash, **deprecated_kwargs)
397 try:
398 # Load from URL or cache if already cached
--> 399 resolved_file = hf_hub_download(
400 path_or_repo_id,
401 filename,
402 subfolder=None if len(subfolder) == 0 else subfolder,
403 repo_type=repo_type,
404 revision=revision,
405 cache_dir=cache_dir,
406 user_agent=user_agent,
407 force_download=force_download,
408 proxies=proxies,
409 resume_download=resume_download,
410 token=token,
411 local_files_only=local_files_only,
412 )
413 except GatedRepoError as e:
File ~/.python/current/lib/python3.10/site-packages/huggingface_hub/utils/_validators.py:114, in validate_hf_hub_args.<locals>._inner_fn(*args, **kwargs)
112 kwargs = smoothly_deprecate_use_auth_token(fn_name=fn.__name__, has_token=has_token, kwargs=kwargs)
--> 114 return fn(*args, **kwargs)
File ~/.python/current/lib/python3.10/site-packages/huggingface_hub/file_download.py:1221, in hf_hub_download(repo_id, filename, subfolder, repo_type, revision, library_name, library_version, cache_dir, local_dir, user_agent, force_download, proxies, etag_timeout, token, local_files_only, headers, endpoint, legacy_cache_layout, resume_download, force_filename, local_dir_use_symlinks)
1220 else:
-> 1221 return _hf_hub_download_to_cache_dir(
1222 # Destination
1223 cache_dir=cache_dir,
1224 # File info
1225 repo_id=repo_id,
1226 filename=filename,
1227 repo_type=repo_type,
1228 revision=revision,
1229 # HTTP info
1230 headers=headers,
1231 proxies=proxies,
1232 etag_timeout=etag_timeout,
1233 endpoint=endpoint,
1234 # Additional options
1235 local_files_only=local_files_only,
1236 force_download=force_download,
1237 )
File ~/.python/current/lib/python3.10/site-packages/huggingface_hub/file_download.py:1282, in _hf_hub_download_to_cache_dir(cache_dir, repo_id, filename, repo_type, revision, headers, proxies, etag_timeout, endpoint, local_files_only, force_download)
1280 # Try to get metadata (etag, commit_hash, url, size) from the server.
1281 # If we can't, a HEAD request error is returned.
-> 1282 (url_to_download, etag, commit_hash, expected_size, head_call_error) = _get_metadata_or_catch_error(
1283 repo_id=repo_id,
1284 filename=filename,
1285 repo_type=repo_type,
1286 revision=revision,
1287 endpoint=endpoint,
1288 proxies=proxies,
1289 etag_timeout=etag_timeout,
1290 headers=headers,
1291 local_files_only=local_files_only,
1292 storage_folder=storage_folder,
1293 relative_filename=relative_filename,
1294 )
1296 # etag can be None for several reasons:
1297 # 1. we passed local_files_only.
1298 # 2. we don't have a connection
(...)
1304 # If the specified revision is a commit hash, look inside \"snapshots\".
1305 # If the specified revision is a branch or tag, look inside \"refs\".
File ~/.python/current/lib/python3.10/site-packages/huggingface_hub/file_download.py:1722, in _get_metadata_or_catch_error(repo_id, filename, repo_type, revision, endpoint, proxies, etag_timeout, headers, local_files_only, relative_filename, storage_folder)
1721 try:
-> 1722 metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers)
1723 except EntryNotFoundError as http_error:
File ~/.python/current/lib/python3.10/site-packages/huggingface_hub/utils/_validators.py:114, in validate_hf_hub_args.<locals>._inner_fn(*args, **kwargs)
112 kwargs = smoothly_deprecate_use_auth_token(fn_name=fn.__name__, has_token=has_token, kwargs=kwargs)
--> 114 return fn(*args, **kwargs)
File ~/.python/current/lib/python3.10/site-packages/huggingface_hub/file_download.py:1645, in get_hf_file_metadata(url, token, proxies, timeout, library_name, library_version, user_agent, headers)
1644 # Retrieve metadata
-> 1645 r = _request_wrapper(
1646 method=\"HEAD\",
1647 url=url,
1648 headers=headers,
1649 allow_redirects=False,
1650 follow_relative_redirects=True,
1651 proxies=proxies,
1652 timeout=timeout,
1653 )
1654 hf_raise_for_status(r)
File ~/.python/current/lib/python3.10/site-packages/huggingface_hub/file_download.py:372, in _request_wrapper(method, url, follow_relative_redirects, **params)
371 if follow_relative_redirects:
--> 372 response = _request_wrapper(
373 method=method,
374 url=url,
375 follow_relative_redirects=False,
376 **params,
377 )
379 # If redirection, we redirect only relative paths.
380 # This is useful in case of a renamed repository.
File ~/.python/current/lib/python3.10/site-packages/huggingface_hub/file_download.py:396, in _request_wrapper(method, url, follow_relative_redirects, **params)
395 response = get_session().request(method=method, url=url, **params)
--> 396 hf_raise_for_status(response)
397 return response
File ~/.python/current/lib/python3.10/site-packages/huggingface_hub/utils/_errors.py:315, in hf_raise_for_status(response, endpoint_name)
314 message = f\"{response.status_code} Client Error.\" + \"\
\
\" + f\"Entry Not Found for url: {response.url}.\"
--> 315 raise EntryNotFoundError(message, response) from e
317 elif error_code == \"GatedRepo\":
EntryNotFoundError: 404 Client Error. (Request ID: Root=1-6656c650-45a6c51629c07b9316351f1a;c37986d1-da4c-43ae-9b71-0f10575a9645)
Entry Not Found for url: https://huggingface.co/microsoft/phi-2/resolve/main/configuration_phi.py.
The above exception was the direct cause of the following exception:
OSError Traceback (most recent call last)
/workspaces/talking-tables/src/talking-tables/explore.ipynb Cell 2 line 6
<a href='vscode-notebook-cell://codespaces%2Bubiquitous-space-acorn-q4v9x56jg4q399v5/workspaces/talking-tables/src/talking-tables/explore.ipynb#W5sdnNjb2RlLXJlbW90ZQ%3D%3D?line=1'>2</a> from transformers import AutoModelForCausalLM, AutoTokenizer
<a href='vscode-notebook-cell://codespaces%2Bubiquitous-space-acorn-q4v9x56jg4q399v5/workspaces/talking-tables/src/talking-tables/explore.ipynb#W5sdnNjb2RlLXJlbW90ZQ%3D%3D?line=3'>4</a> model_name = \"pavankumarbalijepalli/phi2-sqlcoder\"
----> <a href='vscode-notebook-cell://codespaces%2Bubiquitous-space-acorn-q4v9x56jg4q399v5/workspaces/talking-tables/src/talking-tables/explore.ipynb#W5sdnNjb2RlLXJlbW90ZQ%3D%3D?line=5'>6</a> model = AutoModelForCausalLM.from_pretrained(
<a href='vscode-notebook-cell://codespaces%2Bubiquitous-space-acorn-q4v9x56jg4q399v5/workspaces/talking-tables/src/talking-tables/explore.ipynb#W5sdnNjb2RlLXJlbW90ZQ%3D%3D?line=6'>7</a> model_name,
<a href='vscode-notebook-cell://codespaces%2Bubiquitous-space-acorn-q4v9x56jg4q399v5/workspaces/talking-tables/src/talking-tables/explore.ipynb#W5sdnNjb2RlLXJlbW90ZQ%3D%3D?line=7'>8</a> trust_remote_code=True,
<a href='vscode-notebook-cell://codespaces%2Bubiquitous-space-acorn-q4v9x56jg4q399v5/workspaces/talking-tables/src/talking-tables/explore.ipynb#W5sdnNjb2RlLXJlbW90ZQ%3D%3D?line=8'>9</a> device_map=\"auto\"
<a href='vscode-notebook-cell://codespaces%2Bubiquitous-space-acorn-q4v9x56jg4q399v5/workspaces/talking-tables/src/talking-tables/explore.ipynb#W5sdnNjb2RlLXJlbW90ZQ%3D%3D?line=9'>10</a> )
<a href='vscode-notebook-cell://codespaces%2Bubiquitous-space-acorn-q4v9x56jg4q399v5/workspaces/talking-tables/src/talking-tables/explore.ipynb#W5sdnNjb2RlLXJlbW90ZQ%3D%3D?line=10'>11</a> tokenizer = AutoTokenizer.from_pretrained(model_name, trust_remote_code=True)
<a href='vscode-notebook-cell://codespaces%2Bubiquitous-space-acorn-q4v9x56jg4q399v5/workspaces/talking-tables/src/talking-tables/explore.ipynb#W5sdnNjb2RlLXJlbW90ZQ%3D%3D?line=11'>12</a> tokenizer.pad_token = tokenizer.eos_token
File ~/.python/current/lib/python3.10/site-packages/transformers/models/auto/auto_factory.py:523, in _BaseAutoModelClass.from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs)
520 if kwargs.get(\"quantization_config\", None) is not None:
521 _ = kwargs.pop(\"quantization_config\")
--> 523 config, kwargs = AutoConfig.from_pretrained(
524 pretrained_model_name_or_path,
525 return_unused_kwargs=True,
526 trust_remote_code=trust_remote_code,
527 code_revision=code_revision,
528 _commit_hash=commit_hash,
529 **hub_kwargs,
530 **kwargs,
531 )
533 # if torch_dtype=auto was passed here, ensure to pass it on
534 if kwargs_orig.get(\"torch_dtype\", None) == \"auto\":
File ~/.python/current/lib/python3.10/site-packages/transformers/models/auto/configuration_auto.py:943, in AutoConfig.from_pretrained(cls, pretrained_model_name_or_path, **kwargs)
941 if has_remote_code and trust_remote_code:
942 class_ref = config_dict[\"auto_map\"][\"AutoConfig\"]
--> 943 config_class = get_class_from_dynamic_module(
944 class_ref, pretrained_model_name_or_path, code_revision=code_revision, **kwargs
945 )
946 if os.path.isdir(pretrained_model_name_or_path):
947 config_class.register_for_auto_class()
File ~/.python/current/lib/python3.10/site-packages/transformers/dynamic_module_utils.py:498, in get_class_from_dynamic_module(class_reference, pretrained_model_name_or_path, cache_dir, force_download, resume_download, proxies, token, revision, local_files_only, repo_type, code_revision, **kwargs)
496 code_revision = revision
497 # And lastly we get the class inside our newly created module
--> 498 final_module = get_cached_module_file(
499 repo_id,
500 module_file + \".py\",
501 cache_dir=cache_dir,
502 force_download=force_download,
503 resume_download=resume_download,
504 proxies=proxies,
505 token=token,
506 revision=code_revision,
507 local_files_only=local_files_only,
508 repo_type=repo_type,
509 )
510 return get_class_in_module(class_name, final_module)
File ~/.python/current/lib/python3.10/site-packages/transformers/dynamic_module_utils.py:302, in get_cached_module_file(pretrained_model_name_or_path, module_file, cache_dir, force_download, resume_download, proxies, token, revision, local_files_only, repo_type, _commit_hash, **deprecated_kwargs)
299 new_files = []
300 try:
301 # Load from URL or cache if already cached
--> 302 resolved_module_file = cached_file(
303 pretrained_model_name_or_path,
304 module_file,
305 cache_dir=cache_dir,
306 force_download=force_download,
307 proxies=proxies,
308 resume_download=resume_download,
309 local_files_only=local_files_only,
310 token=token,
311 revision=revision,
312 repo_type=repo_type,
313 _commit_hash=_commit_hash,
314 )
315 if not is_local and cached_module != resolved_module_file:
316 new_files.append(module_file)
File ~/.python/current/lib/python3.10/site-packages/transformers/utils/hub.py:453, in cached_file(path_or_repo_id, filename, cache_dir, force_download, resume_download, proxies, token, revision, local_files_only, subfolder, repo_type, user_agent, _raise_exceptions_for_gated_repo, _raise_exceptions_for_missing_entries, _raise_exceptions_for_connection_errors, _commit_hash, **deprecated_kwargs)
451 if revision is None:
452 revision = \"main\"
--> 453 raise EnvironmentError(
454 f\"{path_or_repo_id} does not appear to have a file named {full_filename}. Checkout \"
455 f\"'https://huggingface.co/{path_or_repo_id}/tree/{revision}' for available files.\"
456 ) from e
457 except HTTPError as err:
458 resolved_file = _get_cache_file_to_return(path_or_repo_id, full_filename, cache_dir, revision)
OSError: microsoft/phi-2 does not appear to have a file named configuration_phi.py. Checkout 'https://huggingface.co/microsoft/phi-2/tree/main' for available files."
}
same problem
Same problem
Herewith, the same problem
They have removed the file. Use your adapter weights to merge the model (with the new commit), then you'll not face this issue.
Still i am having the issue,
if i make/remove trust_remote_code=False
raise ValueError(
ValueError: The repository for ank087/results contains custom code which must be executed to correctly load the model. You can inspect the repository content at https://hf.co/ank087/results.
Please pass the argument trust_remote_code=True
to allow custom code to be run.
if i make trust_remote_code=True
OSError: microsoft/phi-2 does not appear to have a file named configuration_phi.py. Checkout 'https://huggingface.co/microsoft/phi-2/tree/main' for available files.
My current transformers version: 4.43.3