| # from huggingface_hub import list_repo_files, hf_hub_download | |
| # import os | |
| # # Optional: choose your loader | |
| # USE_SAFETENSORS = True | |
| # if USE_SAFETENSORS: | |
| # from safetensors.torch import load_file as model_loader | |
| # # Step 1: List all files in the dataset repo | |
| # repo_id = "MatchLab/PointMapVerse" | |
| # all_files = list_repo_files(repo_id=repo_id, repo_type="dataset") | |
| # # Step 2: Automatically detect all subfolders (first-level only) | |
| # subfolders = set(f.split('/')[0] for f in all_files if '/' in f) | |
| # print(f"Detected subfolders: {subfolders}") | |
| # # Step 3: Collect target files (e.g., only .safetensors inside subfolders) | |
| # target_files = [f for f in all_files if f.split('/')[0] in ['light_arkitscenes']] | |
| # print(f"Found {len(target_files)} .safetensors files in subfolders.") | |
| # for file_path in target_files: | |
| # print(f"Caching: {file_path}") | |
| # cached_file = hf_hub_download( | |
| # repo_id=repo_id, | |
| # filename=file_path, | |
| # repo_type="dataset", | |
| # local_files_only=False, | |
| # resume_download = True | |
| # ) | |
| # # Optional: Load into memory | |
| # data = model_loader(cached_file) | |
| # print(data['point_map'].shape) | |
| # print(f"Loaded: {file_path}, keys: {list(data.keys())}") | |
| # # import os | |
| # # import glob | |
| # # from safetensors.torch import load_file | |
| # # repo_id = "MatchLab/PointMapVerse" | |
| # # # Step 1: Download & cache the dataset snapshot | |
| # # from huggingface_hub import snapshot_download | |
| # # local_dir = snapshot_download( | |
| # # repo_id=repo_id, | |
| # # repo_type="dataset", | |
| # # allow_patterns=["light_scannet/*", "light_3rscan/*", "light_arkitscenes/*"], # include just these subfolders | |
| # # ) | |
| # # print(f"Local dataset directory: {local_dir}") | |
| # # # Step 2: Find all .safetensors files inside the target subfolders | |
| # # file_paths = glob.glob(os.path.join(local_dir, "light_*", "*.safetensors")) | |
| # # print(f"Found {len(file_paths)} .safetensors files") | |
| # from huggingface_hub import hf_hub_download | |
| # repo_id = "MatchLab/PointMapVerse" | |
| # subfolders = ["light_scannet", "light_3rscan", "light_arkitscenes"] | |
| # all_files = [] | |
| # for sub in subfolders: | |
| # for fname in filenames: | |
| # try: | |
| # cached_path = hf_hub_download( | |
| # repo_id=repo_id, | |
| # repo_type="dataset", | |
| # filename=f"{sub}/{fname}", | |
| # local_files_only=False, # set True if you already downloaded and cached | |
| # resume_download=True, | |
| # ) | |
| # all_files.append(cached_path) | |
| # except Exception as e: | |
| # print(f"⚠️ Could not download {sub}/{fname}: {e}") | |
| # print(f"Downloaded {len(all_files)} files") | |
| # # Step 2: Load the files | |
| # for path in all_files: | |
| # data = load_file(path) # dict-like object | |
| # print( | |
| # f"Loaded {os.path.basename(path)}: " | |
| # f"keys={list(data.keys())}, " | |
| # f"point_map shape={data['point_map'].shape}" | |
| # ) | |