|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
import importlib |
|
import os |
|
import sys |
|
from typing import TYPE_CHECKING |
|
|
|
|
|
__version__ = "0.20.2" |
|
|
|
|
|
|
|
|
|
_SUBMOD_ATTRS = { |
|
"_commit_scheduler": [ |
|
"CommitScheduler", |
|
], |
|
"_inference_endpoints": [ |
|
"InferenceEndpoint", |
|
"InferenceEndpointError", |
|
"InferenceEndpointStatus", |
|
"InferenceEndpointTimeoutError", |
|
"InferenceEndpointType", |
|
], |
|
"_login": [ |
|
"interpreter_login", |
|
"login", |
|
"logout", |
|
"notebook_login", |
|
], |
|
"_multi_commits": [ |
|
"MultiCommitException", |
|
"plan_multi_commits", |
|
], |
|
"_snapshot_download": [ |
|
"snapshot_download", |
|
], |
|
"_space_api": [ |
|
"SpaceHardware", |
|
"SpaceRuntime", |
|
"SpaceStage", |
|
"SpaceStorage", |
|
"SpaceVariable", |
|
], |
|
"_tensorboard_logger": [ |
|
"HFSummaryWriter", |
|
], |
|
"_webhooks_payload": [ |
|
"WebhookPayload", |
|
"WebhookPayloadComment", |
|
"WebhookPayloadDiscussion", |
|
"WebhookPayloadDiscussionChanges", |
|
"WebhookPayloadEvent", |
|
"WebhookPayloadMovedTo", |
|
"WebhookPayloadRepo", |
|
"WebhookPayloadUrl", |
|
"WebhookPayloadWebhook", |
|
], |
|
"_webhooks_server": [ |
|
"WebhooksServer", |
|
"webhook_endpoint", |
|
], |
|
"community": [ |
|
"Discussion", |
|
"DiscussionComment", |
|
"DiscussionCommit", |
|
"DiscussionEvent", |
|
"DiscussionStatusChange", |
|
"DiscussionTitleChange", |
|
"DiscussionWithDetails", |
|
], |
|
"constants": [ |
|
"CONFIG_NAME", |
|
"FLAX_WEIGHTS_NAME", |
|
"HUGGINGFACE_CO_URL_HOME", |
|
"HUGGINGFACE_CO_URL_TEMPLATE", |
|
"PYTORCH_WEIGHTS_NAME", |
|
"REPO_TYPE_DATASET", |
|
"REPO_TYPE_MODEL", |
|
"REPO_TYPE_SPACE", |
|
"TF2_WEIGHTS_NAME", |
|
"TF_WEIGHTS_NAME", |
|
], |
|
"fastai_utils": [ |
|
"_save_pretrained_fastai", |
|
"from_pretrained_fastai", |
|
"push_to_hub_fastai", |
|
], |
|
"file_download": [ |
|
"HfFileMetadata", |
|
"_CACHED_NO_EXIST", |
|
"cached_download", |
|
"get_hf_file_metadata", |
|
"hf_hub_download", |
|
"hf_hub_url", |
|
"try_to_load_from_cache", |
|
], |
|
"hf_api": [ |
|
"Collection", |
|
"CollectionItem", |
|
"CommitInfo", |
|
"CommitOperation", |
|
"CommitOperationAdd", |
|
"CommitOperationCopy", |
|
"CommitOperationDelete", |
|
"GitCommitInfo", |
|
"GitRefInfo", |
|
"GitRefs", |
|
"HfApi", |
|
"RepoUrl", |
|
"User", |
|
"UserLikes", |
|
"accept_access_request", |
|
"add_collection_item", |
|
"add_space_secret", |
|
"add_space_variable", |
|
"cancel_access_request", |
|
"change_discussion_status", |
|
"comment_discussion", |
|
"create_branch", |
|
"create_collection", |
|
"create_commit", |
|
"create_commits_on_pr", |
|
"create_discussion", |
|
"create_inference_endpoint", |
|
"create_pull_request", |
|
"create_repo", |
|
"create_tag", |
|
"dataset_info", |
|
"delete_branch", |
|
"delete_collection", |
|
"delete_collection_item", |
|
"delete_file", |
|
"delete_folder", |
|
"delete_inference_endpoint", |
|
"delete_repo", |
|
"delete_space_secret", |
|
"delete_space_storage", |
|
"delete_space_variable", |
|
"delete_tag", |
|
"duplicate_space", |
|
"edit_discussion_comment", |
|
"file_exists", |
|
"get_collection", |
|
"get_dataset_tags", |
|
"get_discussion_details", |
|
"get_full_repo_name", |
|
"get_inference_endpoint", |
|
"get_model_tags", |
|
"get_paths_info", |
|
"get_repo_discussions", |
|
"get_safetensors_metadata", |
|
"get_space_runtime", |
|
"get_space_variables", |
|
"get_token_permission", |
|
"grant_access", |
|
"like", |
|
"list_accepted_access_requests", |
|
"list_collections", |
|
"list_datasets", |
|
"list_files_info", |
|
"list_inference_endpoints", |
|
"list_liked_repos", |
|
"list_metrics", |
|
"list_models", |
|
"list_pending_access_requests", |
|
"list_rejected_access_requests", |
|
"list_repo_commits", |
|
"list_repo_files", |
|
"list_repo_likers", |
|
"list_repo_refs", |
|
"list_repo_tree", |
|
"list_spaces", |
|
"merge_pull_request", |
|
"model_info", |
|
"move_repo", |
|
"parse_safetensors_file_metadata", |
|
"pause_inference_endpoint", |
|
"pause_space", |
|
"preupload_lfs_files", |
|
"reject_access_request", |
|
"rename_discussion", |
|
"repo_exists", |
|
"repo_info", |
|
"repo_type_and_id_from_hf_id", |
|
"request_space_hardware", |
|
"request_space_storage", |
|
"restart_space", |
|
"resume_inference_endpoint", |
|
"run_as_future", |
|
"scale_to_zero_inference_endpoint", |
|
"set_space_sleep_time", |
|
"space_info", |
|
"super_squash_history", |
|
"unlike", |
|
"update_collection_item", |
|
"update_collection_metadata", |
|
"update_inference_endpoint", |
|
"update_repo_visibility", |
|
"upload_file", |
|
"upload_folder", |
|
"whoami", |
|
], |
|
"hf_file_system": [ |
|
"HfFileSystem", |
|
"HfFileSystemFile", |
|
"HfFileSystemResolvedPath", |
|
], |
|
"hub_mixin": [ |
|
"ModelHubMixin", |
|
"PyTorchModelHubMixin", |
|
], |
|
"inference._client": [ |
|
"InferenceClient", |
|
"InferenceTimeoutError", |
|
], |
|
"inference._generated._async_client": [ |
|
"AsyncInferenceClient", |
|
], |
|
"inference_api": [ |
|
"InferenceApi", |
|
], |
|
"keras_mixin": [ |
|
"KerasModelHubMixin", |
|
"from_pretrained_keras", |
|
"push_to_hub_keras", |
|
"save_pretrained_keras", |
|
], |
|
"repocard": [ |
|
"DatasetCard", |
|
"ModelCard", |
|
"RepoCard", |
|
"SpaceCard", |
|
"metadata_eval_result", |
|
"metadata_load", |
|
"metadata_save", |
|
"metadata_update", |
|
], |
|
"repocard_data": [ |
|
"CardData", |
|
"DatasetCardData", |
|
"EvalResult", |
|
"ModelCardData", |
|
"SpaceCardData", |
|
], |
|
"repository": [ |
|
"Repository", |
|
], |
|
"utils": [ |
|
"CacheNotFound", |
|
"CachedFileInfo", |
|
"CachedRepoInfo", |
|
"CachedRevisionInfo", |
|
"CorruptedCacheException", |
|
"DeleteCacheStrategy", |
|
"HFCacheInfo", |
|
"HfFolder", |
|
"cached_assets_path", |
|
"configure_http_backend", |
|
"dump_environment_info", |
|
"get_session", |
|
"get_token", |
|
"logging", |
|
"scan_cache_dir", |
|
], |
|
"utils.endpoint_helpers": [ |
|
"DatasetFilter", |
|
"ModelFilter", |
|
], |
|
} |
|
|
|
|
|
def _attach(package_name, submodules=None, submod_attrs=None): |
|
"""Attach lazily loaded submodules, functions, or other attributes. |
|
|
|
Typically, modules import submodules and attributes as follows: |
|
|
|
```py |
|
import mysubmodule |
|
import anothersubmodule |
|
|
|
from .foo import someattr |
|
``` |
|
|
|
The idea is to replace a package's `__getattr__`, `__dir__`, and |
|
`__all__`, such that all imports work exactly the way they would |
|
with normal imports, except that the import occurs upon first use. |
|
|
|
The typical way to call this function, replacing the above imports, is: |
|
|
|
```python |
|
__getattr__, __dir__, __all__ = lazy.attach( |
|
__name__, |
|
['mysubmodule', 'anothersubmodule'], |
|
{'foo': ['someattr']} |
|
) |
|
``` |
|
This functionality requires Python 3.7 or higher. |
|
|
|
Args: |
|
package_name (`str`): |
|
Typically use `__name__`. |
|
submodules (`set`): |
|
List of submodules to attach. |
|
submod_attrs (`dict`): |
|
Dictionary of submodule -> list of attributes / functions. |
|
These attributes are imported as they are used. |
|
|
|
Returns: |
|
__getattr__, __dir__, __all__ |
|
|
|
""" |
|
if submod_attrs is None: |
|
submod_attrs = {} |
|
|
|
if submodules is None: |
|
submodules = set() |
|
else: |
|
submodules = set(submodules) |
|
|
|
attr_to_modules = {attr: mod for mod, attrs in submod_attrs.items() for attr in attrs} |
|
|
|
__all__ = list(submodules | attr_to_modules.keys()) |
|
|
|
def __getattr__(name): |
|
if name in submodules: |
|
return importlib.import_module(f"{package_name}.{name}") |
|
elif name in attr_to_modules: |
|
submod_path = f"{package_name}.{attr_to_modules[name]}" |
|
submod = importlib.import_module(submod_path) |
|
attr = getattr(submod, name) |
|
|
|
|
|
|
|
|
|
if name == attr_to_modules[name]: |
|
pkg = sys.modules[package_name] |
|
pkg.__dict__[name] = attr |
|
|
|
return attr |
|
else: |
|
raise AttributeError(f"No {package_name} attribute {name}") |
|
|
|
def __dir__(): |
|
return __all__ |
|
|
|
if os.environ.get("EAGER_IMPORT", ""): |
|
for attr in set(attr_to_modules.keys()) | submodules: |
|
__getattr__(attr) |
|
|
|
return __getattr__, __dir__, list(__all__) |
|
|
|
|
|
__getattr__, __dir__, __all__ = _attach(__name__, submodules=[], submod_attrs=_SUBMOD_ATTRS) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if TYPE_CHECKING: |
|
from ._commit_scheduler import CommitScheduler |
|
from ._inference_endpoints import ( |
|
InferenceEndpoint, |
|
InferenceEndpointError, |
|
InferenceEndpointStatus, |
|
InferenceEndpointTimeoutError, |
|
InferenceEndpointType, |
|
) |
|
from ._login import ( |
|
interpreter_login, |
|
login, |
|
logout, |
|
notebook_login, |
|
) |
|
from ._multi_commits import ( |
|
MultiCommitException, |
|
plan_multi_commits, |
|
) |
|
from ._snapshot_download import snapshot_download |
|
from ._space_api import ( |
|
SpaceHardware, |
|
SpaceRuntime, |
|
SpaceStage, |
|
SpaceStorage, |
|
SpaceVariable, |
|
) |
|
from ._tensorboard_logger import HFSummaryWriter |
|
from ._webhooks_payload import ( |
|
WebhookPayload, |
|
WebhookPayloadComment, |
|
WebhookPayloadDiscussion, |
|
WebhookPayloadDiscussionChanges, |
|
WebhookPayloadEvent, |
|
WebhookPayloadMovedTo, |
|
WebhookPayloadRepo, |
|
WebhookPayloadUrl, |
|
WebhookPayloadWebhook, |
|
) |
|
from ._webhooks_server import ( |
|
WebhooksServer, |
|
webhook_endpoint, |
|
) |
|
from .community import ( |
|
Discussion, |
|
DiscussionComment, |
|
DiscussionCommit, |
|
DiscussionEvent, |
|
DiscussionStatusChange, |
|
DiscussionTitleChange, |
|
DiscussionWithDetails, |
|
) |
|
from .constants import ( |
|
CONFIG_NAME, |
|
FLAX_WEIGHTS_NAME, |
|
HUGGINGFACE_CO_URL_HOME, |
|
HUGGINGFACE_CO_URL_TEMPLATE, |
|
PYTORCH_WEIGHTS_NAME, |
|
REPO_TYPE_DATASET, |
|
REPO_TYPE_MODEL, |
|
REPO_TYPE_SPACE, |
|
TF2_WEIGHTS_NAME, |
|
TF_WEIGHTS_NAME, |
|
) |
|
from .fastai_utils import ( |
|
_save_pretrained_fastai, |
|
from_pretrained_fastai, |
|
push_to_hub_fastai, |
|
) |
|
from .file_download import ( |
|
_CACHED_NO_EXIST, |
|
HfFileMetadata, |
|
cached_download, |
|
get_hf_file_metadata, |
|
hf_hub_download, |
|
hf_hub_url, |
|
try_to_load_from_cache, |
|
) |
|
from .hf_api import ( |
|
Collection, |
|
CollectionItem, |
|
CommitInfo, |
|
CommitOperation, |
|
CommitOperationAdd, |
|
CommitOperationCopy, |
|
CommitOperationDelete, |
|
GitCommitInfo, |
|
GitRefInfo, |
|
GitRefs, |
|
HfApi, |
|
RepoUrl, |
|
User, |
|
UserLikes, |
|
accept_access_request, |
|
add_collection_item, |
|
add_space_secret, |
|
add_space_variable, |
|
cancel_access_request, |
|
change_discussion_status, |
|
comment_discussion, |
|
create_branch, |
|
create_collection, |
|
create_commit, |
|
create_commits_on_pr, |
|
create_discussion, |
|
create_inference_endpoint, |
|
create_pull_request, |
|
create_repo, |
|
create_tag, |
|
dataset_info, |
|
delete_branch, |
|
delete_collection, |
|
delete_collection_item, |
|
delete_file, |
|
delete_folder, |
|
delete_inference_endpoint, |
|
delete_repo, |
|
delete_space_secret, |
|
delete_space_storage, |
|
delete_space_variable, |
|
delete_tag, |
|
duplicate_space, |
|
edit_discussion_comment, |
|
file_exists, |
|
get_collection, |
|
get_dataset_tags, |
|
get_discussion_details, |
|
get_full_repo_name, |
|
get_inference_endpoint, |
|
get_model_tags, |
|
get_paths_info, |
|
get_repo_discussions, |
|
get_safetensors_metadata, |
|
get_space_runtime, |
|
get_space_variables, |
|
get_token_permission, |
|
grant_access, |
|
like, |
|
list_accepted_access_requests, |
|
list_collections, |
|
list_datasets, |
|
list_files_info, |
|
list_inference_endpoints, |
|
list_liked_repos, |
|
list_metrics, |
|
list_models, |
|
list_pending_access_requests, |
|
list_rejected_access_requests, |
|
list_repo_commits, |
|
list_repo_files, |
|
list_repo_likers, |
|
list_repo_refs, |
|
list_repo_tree, |
|
list_spaces, |
|
merge_pull_request, |
|
model_info, |
|
move_repo, |
|
parse_safetensors_file_metadata, |
|
pause_inference_endpoint, |
|
pause_space, |
|
preupload_lfs_files, |
|
reject_access_request, |
|
rename_discussion, |
|
repo_exists, |
|
repo_info, |
|
repo_type_and_id_from_hf_id, |
|
request_space_hardware, |
|
request_space_storage, |
|
restart_space, |
|
resume_inference_endpoint, |
|
run_as_future, |
|
scale_to_zero_inference_endpoint, |
|
set_space_sleep_time, |
|
space_info, |
|
super_squash_history, |
|
unlike, |
|
update_collection_item, |
|
update_collection_metadata, |
|
update_inference_endpoint, |
|
update_repo_visibility, |
|
upload_file, |
|
upload_folder, |
|
whoami, |
|
) |
|
from .hf_file_system import ( |
|
HfFileSystem, |
|
HfFileSystemFile, |
|
HfFileSystemResolvedPath, |
|
) |
|
from .hub_mixin import ( |
|
ModelHubMixin, |
|
PyTorchModelHubMixin, |
|
) |
|
from .inference._client import ( |
|
InferenceClient, |
|
InferenceTimeoutError, |
|
) |
|
from .inference._generated._async_client import AsyncInferenceClient |
|
from .inference_api import InferenceApi |
|
from .keras_mixin import ( |
|
KerasModelHubMixin, |
|
from_pretrained_keras, |
|
push_to_hub_keras, |
|
save_pretrained_keras, |
|
) |
|
from .repocard import ( |
|
DatasetCard, |
|
ModelCard, |
|
RepoCard, |
|
SpaceCard, |
|
metadata_eval_result, |
|
metadata_load, |
|
metadata_save, |
|
metadata_update, |
|
) |
|
from .repocard_data import ( |
|
CardData, |
|
DatasetCardData, |
|
EvalResult, |
|
ModelCardData, |
|
SpaceCardData, |
|
) |
|
from .repository import Repository |
|
from .utils import ( |
|
CachedFileInfo, |
|
CachedRepoInfo, |
|
CachedRevisionInfo, |
|
CacheNotFound, |
|
CorruptedCacheException, |
|
DeleteCacheStrategy, |
|
HFCacheInfo, |
|
HfFolder, |
|
cached_assets_path, |
|
configure_http_backend, |
|
dump_environment_info, |
|
get_session, |
|
get_token, |
|
logging, |
|
scan_cache_dir, |
|
) |
|
from .utils.endpoint_helpers import ( |
|
DatasetFilter, |
|
ModelFilter, |
|
) |
|
|