Sou-Cheng commited on
Commit
167a6bd
·
verified ·
1 Parent(s): ce45ac4

Upload upload.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. upload.py +297 -0
upload.py ADDED
@@ -0,0 +1,297 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ """
3
+ upload.py
4
+
5
+ Upload the QMCSoftware/LDData repository (low discrepancy generating vectors
6
+ and matrices) to the Hugging Face Datasets Hub as a dataset repo.
7
+
8
+ This script:
9
+
10
+ 1. Creates (or reuses) a dataset repo on the Hub.
11
+ 2. Uploads all files from a local LDData checkout using `upload_folder`.
12
+ 3. Leaves README.md in place so it becomes the dataset card.
13
+ After upload you can edit the card in the web UI to:
14
+ - Link to the paper (arXiv:2502.14256).
15
+ - Add "Citation" and "Uses" sections, similar to
16
+ - facebook/omnilingual-asr-corpus
17
+ - nvidia/PhysicalAI-Autonomous-Vehicles
18
+ - moondream/refcoco-m
19
+
20
+ Requirements:
21
+ pip install "huggingface_hub>=0.32.0"
22
+
23
+ Authentication:
24
+ - Either set HF_TOKEN in your environment:
25
+ export HF_TOKEN=hf_xxx...
26
+ OR pass --token on the command line.
27
+
28
+ Example usage:
29
+ python upload.py \
30
+ --repo-id QMCSoftware/LDData \
31
+ --local-path /path/to/local/LDData
32
+
33
+ After upload you’ll be able to do, e.g.:
34
+
35
+ from datasets import load_dataset
36
+ ds = load_dataset("QMCSoftware/LDData")
37
+
38
+ and link the dataset to your paper page on Hugging Face.
39
+ """
40
+
41
+ import argparse
42
+ import os
43
+ import sys
44
+ from pathlib import Path
45
+ import httpx
46
+ import fnmatch
47
+
48
+ from huggingface_hub import HfApi, create_repo # type: ignore
49
+
50
+
51
+ def parse_args() -> argparse.Namespace:
52
+ parser = argparse.ArgumentParser(
53
+ description="Upload LDData to the Hugging Face Datasets Hub."
54
+ )
55
+ parser.add_argument(
56
+ "--repo-id",
57
+ type=str,
58
+ default="QMCSoftware/LDData",
59
+ help="Target dataset repo id on the Hub (e.g. 'QMCSoftware/LDData').",
60
+ )
61
+ parser.add_argument(
62
+ "--local-path",
63
+ type=str,
64
+ default=".",
65
+ help="Path to local LDData checkout (default: current directory).",
66
+ )
67
+ parser.add_argument(
68
+ "--token",
69
+ type=str,
70
+ default=None,
71
+ help="Hugging Face access token. If omitted, HF_TOKEN env var is used.",
72
+ )
73
+ parser.add_argument(
74
+ "--private",
75
+ action="store_true",
76
+ help="Create the dataset repo as private (default: public).",
77
+ )
78
+ parser.add_argument(
79
+ "--dry-run",
80
+ action="store_true",
81
+ help="Do not upload, just print what would be done.",
82
+ )
83
+ return parser.parse_args()
84
+
85
+
86
+ def get_token(cmd_token: str | None) -> str:
87
+ token = cmd_token or os.environ.get("HF_TOKEN")
88
+ if not token:
89
+ raise SystemExit(
90
+ "No token provided. Please either:\n"
91
+ " - set HF_TOKEN in your environment, or\n"
92
+ " - pass --token hf_xxx... on the command line."
93
+ )
94
+ return token
95
+
96
+
97
+ def main() -> None:
98
+ args = parse_args()
99
+
100
+ local_path = Path(args.local_path).expanduser().resolve()
101
+ if not local_path.exists():
102
+ raise SystemExit(f"Local path does not exist: {local_path}")
103
+
104
+ # Sanity check: are we in LDData?
105
+ readme = local_path / "README.md"
106
+ if not readme.exists():
107
+ print(
108
+ f"WARNING: {readme} does not exist. "
109
+ "Are you sure this is the LDData repo root?",
110
+ file=sys.stderr,
111
+ )
112
+
113
+ token = get_token(args.token)
114
+ repo_id = args.repo_id
115
+
116
+ print(f"Using repo_id: {repo_id}")
117
+ print(f"Local path : {local_path}")
118
+ print(f"Private : {args.private}")
119
+ if args.dry_run:
120
+ print("Dry run enabled: NOT creating or uploading, just showing intent.")
121
+ return
122
+
123
+ # Initialize API client
124
+ api = HfApi(token=token)
125
+
126
+ # 1. Create (or reuse) the dataset repo on the Hub
127
+ print(f"Creating (or reusing) dataset repo '{repo_id}' on the Hub...")
128
+ create_repo(
129
+ repo_id=repo_id,
130
+ repo_type="dataset",
131
+ private=args.private,
132
+ exist_ok=True,
133
+ token=token,
134
+ )
135
+
136
+ # 2. Upload folder contents
137
+ # We ignore some typical non-data files to keep the repo clean.
138
+ # Adjust this list if you want to exclude more or fewer things.
139
+ ignore_patterns = [
140
+ ".git/*",
141
+ ".gitignore",
142
+ ".DS_Store",
143
+ "__pycache__/*",
144
+ "*.pyc",
145
+ "*.pyo",
146
+ "*~",
147
+ "*.ipynb_checkpoints*",
148
+ # If you do NOT want to upload the demo notebook or env file, keep these:
149
+ # "LDData Demo.ipynb",
150
+ # "env.yml",
151
+ ]
152
+
153
+ print("Uploading local folder to the Hub (this may take a while)...")
154
+
155
+ # Build list of candidate files and compute total size (exclude ignored)
156
+ def is_ignored(rel_path: str) -> bool:
157
+ # fnmatch against each ignore pattern; use posix style paths
158
+ for patt in ignore_patterns:
159
+ if fnmatch.fnmatch(rel_path, patt):
160
+ return True
161
+ # also allow matching against patterns that start with a leading slash
162
+ if fnmatch.fnmatch("/" + rel_path, patt):
163
+ return True
164
+ return False
165
+
166
+ files_to_upload = []
167
+ total_size = 0
168
+ for root, _, files in os.walk(local_path):
169
+ for fname in files:
170
+ full = Path(root) / fname
171
+ try:
172
+ rel = full.relative_to(local_path).as_posix()
173
+ except Exception:
174
+ rel = full.as_posix()
175
+ if is_ignored(rel):
176
+ continue
177
+ try:
178
+ sz = full.stat().st_size
179
+ except OSError:
180
+ sz = 0
181
+ files_to_upload.append((full, rel))
182
+ total_size += sz
183
+
184
+ # Threshold to prefer upload_large_folder (50 MiB)
185
+ LARGE_THRESHOLD = 50 * 1024 * 1024
186
+
187
+ try:
188
+ if total_size > LARGE_THRESHOLD and hasattr(api, "upload_large_folder"):
189
+ # Prefer API method designed for large folders when available.
190
+ try:
191
+ # Try calling with ignore_patterns if supported
192
+ api.upload_large_folder(
193
+ folder_path=str(local_path),
194
+ repo_id=repo_id,
195
+ repo_type="dataset",
196
+ ignore_patterns=ignore_patterns,
197
+ token=token,
198
+ )
199
+ except TypeError:
200
+ # Fallback: call without ignore_patterns if signature differs
201
+ try:
202
+ api.upload_large_folder(
203
+ folder_path=str(local_path),
204
+ repo_id=repo_id,
205
+ repo_type="dataset",
206
+ token=token,
207
+ )
208
+ except Exception as exc:
209
+ print()
210
+ print("ERROR: upload_large_folder failed:")
211
+ print(f" {exc!r}")
212
+ print("Falling back to per-file upload...")
213
+ # fall through to per-file upload below
214
+ raise
215
+ else:
216
+ # For small-ish folders or when upload_large_folder is not available,
217
+ # use upload_folder. This may raise httpx.ReadTimeout which we handle.
218
+ api.upload_folder(
219
+ folder_path=str(local_path),
220
+ repo_id=repo_id,
221
+ repo_type="dataset",
222
+ ignore_patterns=ignore_patterns,
223
+ )
224
+ except httpx.ReadTimeout:
225
+ print()
226
+ print("ERROR: Upload read timed out.")
227
+ print("Possible actions:")
228
+ print(" - Check your network connection and try again.")
229
+ print(" - Try uploading in smaller batches (split large files or directories).")
230
+ print(" - Use `HfApi().upload_large_folder(...)` or the CLI `hf upload-large-folder` if available.")
231
+ print(" - Upgrade huggingface_hub to the latest version in case it adds improved timeout handling.")
232
+ print(" - If you have very large files, consider using git-lfs or the web UI.")
233
+ sys.exit(1)
234
+ except TypeError as exc:
235
+ # Defensive: if upload_folder/upload_large_folder signature mismatch happens
236
+ print()
237
+ print("ERROR: Upload function raised a TypeError (likely a signature mismatch):")
238
+ print(f" {exc!r}")
239
+ print("Falling back to per-file upload...")
240
+ # fall through to per-file upload
241
+ except Exception:
242
+ # If upload_large_folder raised but we want to fallback to per-file, continue below.
243
+ pass
244
+
245
+ # If we reached here and no success message printed above, ensure the repo has content.
246
+ # If upload_folder/upload_large_folder raised or failed, perform per-file upload as fallback.
247
+ # Check whether we should perform per-file upload by seeing if repo appears empty locally (simpler: always attempt fallback if upload methods failed)
248
+ # Per-file upload (slower but avoids large single requests)
249
+ if files_to_upload:
250
+ # Check whether repo already has been uploaded by previous step by trying to fetch remote files?
251
+ # Simpler heuristic: attempt per-file upload only if previous methods likely did not raise SystemExit.
252
+ # We'll attempt per-file upload if the repo directory on Hugging Face is probably empty due to failure above.
253
+ # Perform per-file upload with retries on transient errors.
254
+ print("Falling back to per-file upload (this is slower but more robust for flaky networks)...")
255
+ for file_path, rel in files_to_upload:
256
+ success = False
257
+ try:
258
+ api.upload_file(
259
+ path_or_fileobj=str(file_path),
260
+ path_in_repo=rel,
261
+ repo_id=repo_id,
262
+ repo_type="dataset",
263
+ token=token,
264
+ )
265
+ success = True
266
+ except httpx.ReadTimeout:
267
+ print()
268
+ print("ERROR: Per-file upload read timed out on:", rel)
269
+ print("You can retry this script or use `hf upload-large-folder` / `HfApi.upload_large_folder`.")
270
+ sys.exit(1)
271
+ except Exception as exc:
272
+ print(f"WARNING: Failed to upload {rel!s}: {exc!r}")
273
+ if not success:
274
+ print(f"Failed to upload: {rel}")
275
+ # Note: this fallback may not preserve folder-level commit atomicity; it's a pragmatic fallback.
276
+
277
+ dataset_url = f"https://huggingface.co/datasets/{repo_id}"
278
+ print()
279
+ print("✅ Upload complete.")
280
+ print(f"Dataset is now available at: {dataset_url}")
281
+ print()
282
+ print("Next steps (recommended):")
283
+ print(" 1. Open the dataset page above in your browser.")
284
+ print(" 2. Edit the Dataset Card (README.md) to:")
285
+ print(" - Add paper links (e.g., your QMCSoftware/LDData arXiv paper).")
286
+ print(" - Add a 'Citations' section.")
287
+ print(" - Add 'Uses' and 'Limitations' sections, similar to:")
288
+ print(" - facebook/omnilingual-asr-corpus")
289
+ print(" - nvidia/PhysicalAI-Autonomous-Vehicles")
290
+ print(" - moondream/refcoco-m")
291
+ print(" 3. Use 'Paper' / 'Dataset' linking in the Hugging Face UI to")
292
+ print(" attach the dataset to your paper so it shows up on the")
293
+ print(" paper page and in discovery views.")
294
+
295
+
296
+ if __name__ == "__main__":
297
+ main()