gvecchio commited on
Commit
37115c1
1 Parent(s): bedaaa3

Update scripts/download_dataset.py

Browse files
Files changed (1) hide show
  1. scripts/download_dataset.py +43 -31
scripts/download_dataset.py CHANGED
@@ -1,41 +1,53 @@
 
1
  import argparse
2
- import requests
3
  from pathlib import Path
 
4
 
5
- classlist = ["Blends", "Ceramic", "Concrete", "Fabric", "Leather", "Marble", "Metal", "Misc", "Plaster", "Plastic", "Stone", "Terracotta", "Wood"]
6
 
7
- def download_stream(url, dest_file):
8
- with requests.get(url, stream=True) as r:
9
- r.raise_for_status()
10
- with open(dest_file, "wb") as f:
11
- for chunk in r.iter_content(chunk_size=8192):
12
- f.write(chunk)
13
 
14
- def download_dataset(base_dir, class_names=None):
15
- dset_url = "https://huggingface.co/datasets/gvecchio/MatSynth/resolve/main/maps"
16
 
17
- classes = class_names.split(",") if class_names else classlist
18
- if classes:
19
- for split in ["train", "test"]:
20
- dest_dir = base_dir/split
21
- dest_dir.mkdir(parents=True, exist_ok=True)
22
-
23
- for class_name in classes:
24
- req = f"{dset_url}/{split}/{class_name}.zip"
25
- download_stream(req, dest_dir/(class_name + ".zip"))
26
-
27
- if class_name in ["Ground", "Wood"] and split == "train":
28
- req = f"{dset_url}/{split}/{class_name}.z01"
29
- download_stream(req, dest_dir/(class_name + ".z01"))
30
-
31
  if __name__ == "__main__":
32
  # Create argument parser
33
- parser = argparse.ArgumentParser(description="Download dataset categories.")
34
- parser.add_argument("--base_dir", required=True, help="Base directory to save the downloaded files.")
35
- parser.add_argument("--class_names", help="Specify the class name to download a specific category.")
36
  args = parser.parse_args()
37
 
38
- # Call the download_dataset function with command-line arguments
39
- download_dataset(Path(args.base_dir), args.class_names)
40
-
41
- # python script.py --base_dir /path/to/save --class_name Leather
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datasets import load_dataset
2
  import argparse
 
3
  from pathlib import Path
4
+ import json
5
 
 
6
 
7
+ def process_batch(examples):
8
+ return examples
 
 
 
 
9
 
 
 
10
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
11
  if __name__ == "__main__":
12
  # Create argument parser
13
+ parser = argparse.ArgumentParser(description="Download dataset.")
14
+ parser.add_argument("--base_dir", required=True, help="Directory to save the downloaded files.")
 
15
  args = parser.parse_args()
16
 
17
+ base_dir = Path(args.base_dir)
18
+ base_dir.mkdir(exist_ok=True, parents=True)
19
+
20
+ # Load dataset
21
+ ds = load_dataset(
22
+ "gvecchio/MatSynth",
23
+ streaming=True,
24
+ )
25
+
26
+ # Setup dummy processing
27
+ ds = ds.map(process_batch, batched=False, batch_size=1)
28
+
29
+ for split in ds:
30
+ for item in ds[split]:
31
+ name = item["name"]
32
+ dest_dir = base_dir / split / item["metadata"]["category"] / name
33
+ dest_dir.mkdir(exist_ok=True, parents=True)
34
+
35
+ # Save metadata
36
+ with open(dest_dir / "metadata.json", "w") as f:
37
+ item["metadata"]["physical_size"] = str(
38
+ item["metadata"]["physical_size"]
39
+ )
40
+ json.dump(item["metadata"], f, indent=4)
41
+
42
+ # Save images
43
+ item["basecolor"].save(dest_dir / "basecolor.png")
44
+ item["diffuse"].save(dest_dir / "diffuse.png")
45
+ item["displacement"].save(dest_dir / "displacement.png")
46
+ item["specular"].save(dest_dir / "specular.png")
47
+ item["height"].save(dest_dir / "height.png")
48
+ item["metallic"].save(dest_dir / "metallic.png")
49
+ item["normal"].save(dest_dir / "normal.png")
50
+ item["opacity"].save(dest_dir / "opacity.png")
51
+ item["roughness"].save(dest_dir / "roughness.png")
52
+ if item["blend_mask"] is not None:
53
+ item["blend_mask"].save(dest_dir / "blend_mask.png")