aclegg3
commited on
Commit
•
e1a7176
1
Parent(s):
c6b323f
add a script to copy asset files for a specific scenes from fphab to another directory
Browse files- get_scene_object_assets.py +162 -0
get_scene_object_assets.py
ADDED
@@ -0,0 +1,162 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Copyright (c) Meta Platforms, Inc. and its affiliates.
|
2 |
+
# This source code is licensed under the MIT license found in the
|
3 |
+
# LICENSE file in the root directory of this source tree.
|
4 |
+
|
5 |
+
import argparse
|
6 |
+
import json
|
7 |
+
import os
|
8 |
+
from typing import Callable, List, Dict, Optional
|
9 |
+
import shutil
|
10 |
+
|
11 |
+
def file_endswith(filepath:str, end_str:str)->bool:
|
12 |
+
"""
|
13 |
+
Return whether or not the file ends with a string.
|
14 |
+
"""
|
15 |
+
return filepath.endswith(end_str)
|
16 |
+
|
17 |
+
def find_files(root_dir: str, discriminator: Callable[[str, str], bool], disc_str:str) -> List[str]:
|
18 |
+
"""
|
19 |
+
Recursively find all filepaths under a root directory satisfying a particular constraint as defined by a discriminator function.
|
20 |
+
|
21 |
+
:param root_dir: The roor directory for the recursive search.
|
22 |
+
:param discriminator: The discriminator function which takes a filepath and discriminator string and returns a bool.
|
23 |
+
|
24 |
+
:return: The list of all absolute filepaths found satisfying the discriminator.
|
25 |
+
"""
|
26 |
+
filepaths: List[str] = []
|
27 |
+
|
28 |
+
if not os.path.exists(root_dir):
|
29 |
+
print(" Directory does not exist: " + str(dir))
|
30 |
+
return filepaths
|
31 |
+
|
32 |
+
for entry in os.listdir(root_dir):
|
33 |
+
entry_path = os.path.join(root_dir, entry)
|
34 |
+
if os.path.isdir(entry_path):
|
35 |
+
sub_dir_filepaths = find_files(entry_path, discriminator, disc_str)
|
36 |
+
filepaths.extend(sub_dir_filepaths)
|
37 |
+
# apply a user-provided discriminator function to cull filepaths
|
38 |
+
elif discriminator(entry_path, disc_str):
|
39 |
+
filepaths.append(entry_path)
|
40 |
+
return filepaths
|
41 |
+
|
42 |
+
def get_model_ids_from_scene_instance_json(filepath: str) -> List[str]:
|
43 |
+
"""
|
44 |
+
Scrape a list of all unique model ids from the scene instance file.
|
45 |
+
"""
|
46 |
+
assert filepath.endswith(".scene_instance.json"), "Must be a scene instance JSON."
|
47 |
+
|
48 |
+
model_ids = []
|
49 |
+
|
50 |
+
with open(filepath, "r") as f:
|
51 |
+
scene_conf = json.load(f)
|
52 |
+
if "object_instances" in scene_conf:
|
53 |
+
for obj_inst in scene_conf["object_instances"]:
|
54 |
+
model_ids.append(obj_inst["template_name"])
|
55 |
+
else:
|
56 |
+
print("No object instances field detected, are you sure this is scene instance file?")
|
57 |
+
|
58 |
+
print(f" {filepath} has {len(model_ids)} object instances.")
|
59 |
+
model_ids = list(set(model_ids))
|
60 |
+
print(f" {filepath} has {len(model_ids)} unique objects.")
|
61 |
+
|
62 |
+
return model_ids
|
63 |
+
|
64 |
+
def copy_file_to(root_dir, source_file, destination_dir)->None:
|
65 |
+
"""
|
66 |
+
Copied files with their relative folder structure to a target directory.
|
67 |
+
"""
|
68 |
+
abs_dest = os.path.abspath(destination_dir)
|
69 |
+
rel_source = os.path.relpath(source_file, root_dir)
|
70 |
+
dest_rel_path = os.path.join(abs_dest, rel_source)
|
71 |
+
print(f"source_file = {source_file}")
|
72 |
+
print(f"dest_file = {dest_rel_path}")
|
73 |
+
os.makedirs(os.path.dirname(dest_rel_path), exist_ok=True)
|
74 |
+
shutil.copyfile(source_file, dest_rel_path)
|
75 |
+
|
76 |
+
|
77 |
+
#------------------------------------------------------
|
78 |
+
# Run this script to copy asset files for a scene into another directory (e.g. benchmark files)
|
79 |
+
# e.g. python get_scene_object_assets.py --dataset-root-dir <path-to>/fphab/ --scenes 102816009 --file-ends .glb .object_config.json .ply --dest-dir <path-to>/hab3_bench_assets/hab3-hssd/
|
80 |
+
#------------------------------------------------------
|
81 |
+
def main():
|
82 |
+
parser = argparse.ArgumentParser(
|
83 |
+
description="Get all specified asset files associated with the models in a given scene."
|
84 |
+
)
|
85 |
+
parser.add_argument(
|
86 |
+
"--dataset-root-dir",
|
87 |
+
type=str,
|
88 |
+
help="path to HSSD SceneDataset root directory containing 'fphab-uncluttered.scene_dataset_config.json'.",
|
89 |
+
)
|
90 |
+
parser.add_argument(
|
91 |
+
"--scenes",
|
92 |
+
nargs="+",
|
93 |
+
type=str,
|
94 |
+
help="one or more scene ids",
|
95 |
+
)
|
96 |
+
parser.add_argument(
|
97 |
+
"--dest-dir",
|
98 |
+
type=str,
|
99 |
+
default=None,
|
100 |
+
help="Path to destination directory if copy is desired.",
|
101 |
+
)
|
102 |
+
parser.add_argument(
|
103 |
+
"--render-only",
|
104 |
+
action="store_true",
|
105 |
+
help="If selected, only render glbs will be selected (i.e., no collision or receptacle assets).",
|
106 |
+
)
|
107 |
+
parser.add_argument(
|
108 |
+
"--file-ends",
|
109 |
+
type=str,
|
110 |
+
nargs="+",
|
111 |
+
help="One or more file ending strings to look for."
|
112 |
+
)
|
113 |
+
|
114 |
+
args = parser.parse_args()
|
115 |
+
scene_ids = list(dict.fromkeys(args.scenes))
|
116 |
+
|
117 |
+
fp_root_dir = args.dataset_root_dir
|
118 |
+
config_root_dir = os.path.join(fp_root_dir, "scenes-uncluttered")
|
119 |
+
configs = find_files(config_root_dir, file_endswith, ".scene_instance.json")
|
120 |
+
my_files = [f for end_str in args.file_ends for f in find_files(fp_root_dir, file_endswith, end_str)]
|
121 |
+
|
122 |
+
#for render only
|
123 |
+
if args.render_only:
|
124 |
+
render_glbs = [f for f in my_files if (".collider" not in f and ".filteredSupportSurface" not in f and f.endswith(".glb"))]
|
125 |
+
my_files = render_glbs
|
126 |
+
|
127 |
+
scene_asset_filepaths = {}
|
128 |
+
for filepath in configs:
|
129 |
+
#these should be removed, but screen them for now
|
130 |
+
if "orig" in filepath:
|
131 |
+
print(f"Skipping alleged 'original' instance file {filepath}")
|
132 |
+
continue
|
133 |
+
for scene_id in scene_ids:
|
134 |
+
#NOTE: add the extension back here to avoid partial matches
|
135 |
+
if scene_id+".scene_instance.json" in filepath:
|
136 |
+
print(f"filepath '{filepath}' matches scene_id '{scene_id}'")
|
137 |
+
assert scene_id not in scene_asset_filepaths, f"Duplicate scene instance file {filepath} found for scene_id {scene_id}"
|
138 |
+
scene_asset_filepaths[scene_id] = []
|
139 |
+
model_ids = get_model_ids_from_scene_instance_json(filepath)
|
140 |
+
for model_id in model_ids:
|
141 |
+
for f in my_files:
|
142 |
+
model_id_split = f.split(model_id)
|
143 |
+
#we only want files which exactly match the model id. Some short model ids are concatenated with another via '_', these must be culled.
|
144 |
+
if len(model_id_split) > 1 and model_id_split[-1][0] == "." and model_id_split[-2][-1] != "_":
|
145 |
+
if "part" in f and "part" not in model_id:
|
146 |
+
continue
|
147 |
+
if f not in scene_asset_filepaths[scene_id]:
|
148 |
+
scene_asset_filepaths[scene_id].append(f)
|
149 |
+
|
150 |
+
|
151 |
+
for scene_id in scene_asset_filepaths.keys():
|
152 |
+
print(f"Scene {scene_id}")
|
153 |
+
for asset_path in scene_asset_filepaths[scene_id]:
|
154 |
+
print(f" {asset_path}")
|
155 |
+
if args.dest_dir is not None:
|
156 |
+
copy_file_to(fp_root_dir, asset_path, args.dest_dir)
|
157 |
+
for scene, models in scene_asset_filepaths.items():
|
158 |
+
print(f" Scene {scene} contains {len(models)} requested assets.")
|
159 |
+
|
160 |
+
|
161 |
+
if __name__ == "__main__":
|
162 |
+
main()
|