Spaces:
Running
Running
import numpy as np | |
import gradio as gr | |
import os | |
import tempfile | |
import shutil | |
# from gradio_inter.predict_from_file import predict_from_file | |
# from gradio_inter.create_bash_file import create_bash_file | |
def create_bash_file(single_path_seq): | |
ori_bash_file = "./scripts_demo/train_grab_pointset_points_dyn_s1.sh" | |
with open(ori_bash_file) as rf: | |
bash_string = rf.read() | |
bash_string = bash_string.replace("./data/102_grab_all_data.npy", single_path_seq) | |
dst_bash_file = "./scripts_demo/train_grab_pointset_points_dyn_s1.sh" | |
with open(dst_bash_file, "w") as wf: | |
wf.write(bash_string) | |
return dst_bash_file | |
# from sample.reconstruct_data_taco import reconstruct_from_file | |
def create_temp_file(path: str) -> str: | |
temp_dir = tempfile.gettempdir() | |
temp_folder = os.path.join(temp_dir, "denoising") | |
os.makedirs(temp_folder, exist_ok=True) | |
# Clean up directory | |
# for i in os.listdir(temp_folder): | |
# print("Removing", i) | |
# os.remove(os.path.join(temp_folder, i)) | |
temp_path = os.path.join(temp_folder, path.split("/")[-1]) | |
shutil.copy2(path, temp_path) | |
return temp_path | |
# from gradio_inter.predict import predict_from_data | |
# from gradio_inter.predi | |
def transpose(matrix): | |
return matrix.T | |
def predict(file_path: str): | |
temp_file_path = create_temp_file(file_path) | |
# predict_from_file | |
temp_bash_file = create_bash_file(temp_file_path) | |
os.system(f"bash {temp_bash_file}") | |
temp_dir = tempfile.gettempdir() | |
base_exp_dir = os.path.join(temp_dir, "quasi_sim") | |
os.makedirs(base_exp_dir, exist_ok=True) | |
base_exp_dir = os.path.join(base_exp_dir, "exp") | |
os.makedirs(base_exp_dir, exist_ok=True) | |
base_exp_dir = f"{base_exp_dir}/wmask" | |
print(f"self.base_exp_dir:", base_exp_dir) | |
base_exp_dir = base_exp_dir + f"_reverse_value_totviews_" | |
os.makedirs(base_exp_dir, exist_ok=True) | |
# self.dataset = Dataset(self.conf['dataset']) | |
mano_pts_retar_sv_info_fn = os.path.join(base_exp_dir, f"ts_to_hand_obj_verts.npy") | |
return mano_pts_retar_sv_info_fn | |
# res_file_path = "/tmp/denoising/save/predicted_infos_seed_0_tag_20231104_017_jts_spatial_t_100__st_0.npy" | |
# saved_path = reconstruct_from_file(temp_file_path) | |
# return saved_path | |
def create_demo(): | |
USAGE = """# QuasiSim: Parameterized Quasi-Physical Simulators for Dexterous Manipulations Transfer | |
**[Project](https://meowuu7.github.io/QuasiSim/) | [Github](https://github.com/Meowuu7/QuasiSim)** | |
This demo transforms the input human manipulation demonstration to the trajectory of `point set` (a relaxed representation of articulated rigid object introduced in QuasiSim). It is the first step of the first stage of our optimization process. Please checkout our [github repo](https://github.com/Meowuu7/QuasiSim) for more details and instructions of running locally. | |
## Input data format | |
Currently, the demo accepts a `.npy` file containing a human manipulation trajectory organized as the following format: | |
```python | |
{ | |
"sv_dict": { | |
"rhand_global_orient_gt": numpy.ndarray(seq_length, 3), # MANO global orientation coefficient | |
"rhand_transl": numpy.ndarray(seq_length, 3), # MANO global translation coefficient | |
"rhand_verts": numpy.ndarray(seq_length, 778, 3), # MANO hand vertices | |
"object_global_orient": numpy.ndarray(seq_length, 3), # Object global orientation (represented as rotation vectors, check below for details w.r.t. how to convert it to the rotation matrix) | |
"object_transl": numpy.ndarray(seq_length, 3), # Object global translations | |
"obj_faces": numpy.ndarray(nn_faces, 3), # Object mesh faces | |
"obj_verts": numpy.ndarray(nn_vertices, 3), # Object mesh vertices | |
"obj_vertex_normals": numpy.ndarray(nn_vertices, 3), # Object mesh vertex normals | |
}, | |
"obj_sdf": numpy.ndarray(sdf_res, sdf_res, sdf_res), # Pre-processed object SDF values (see below for SDF processing details) | |
} | |
``` | |
**How to transform the object global orientation to the rotation matrix**: The object global orientation is represented as the rotation vector. To convert it to the rotation matrix, you can use `scipy.spatial.transform.Rotation` as follows: | |
```python | |
from scipy.spatial.transform import Rotation | |
r = Rotation.from_rotvec(object_global_orient) | |
object_global_orient_rotmat = r.as_matrix() | |
``` | |
To transform the canonical object mesh vertices using the orientation vector (`object_global_orient`) and the global translation (`object_global_trans`), you can use the following code: | |
```python | |
from scipy.spatial.transform import Rotation | |
r = Rotation.from_rotvec(object_global_orient) | |
object_global_orient_rotmat = r.as_matrix() | |
cur_transformed_verts = np.matmul( | |
obj_verts, object_global_orient_rotmat | |
) + object_global_trans[None, :] | |
``` | |
We use [mesh-to-sdf](https://github.com/wang-ps/mesh2sdf) to pre-process the object mesh to the SDF values. The SDF values are stored in a 3D numpy array with the shape of `(sdf_res, sdf_res, sdf_res)`, where `sdf_res` is set to `128` in our experiments. Please check out [compute sdf](https://github.com/Meowuu7/QuasiSim/blob/main/utils/grab_preprocessing.py#L151) for our pre-processing function. | |
Currently, the demo only accepts input trajectories with 60 frames. | |
We provide an example [here](https://1drv.ms/u/s!AgSPtac7QUbHgVncbYUdKI1f5TvE?e=sRhirK). | |
## To run the demo, | |
1. Upload a `numpy` file to the left box by draging your file or clicking the box to open the file explorer. | |
2. Clik the `Submit` button to run the demo. | |
3. The optimized trajectory of the point set will be output as a `.npy` file and can be downloaded from the right box. | |
Since the model runs on CPU currently, the speed is quite slow. For instance, it takes around 32h (yeah, hours...) to process the [example](https://1drv.ms/u/s!AgSPtac7QUbHgVoY8jPkPZfrDkJw?e=JYFi5a) mentioned above which contains 60 frames. However, it takes only several minutes to complish when running on a GPU! Therefore, we highly recommand checking out our [github repo](https://github.com/Meowuu7/QuasiSim), setting up an environment with GPU support, and running it locally. | |
## Output data format | |
The output is a `.npy` file containing the optimized trajectory of the point set sequence organized as a `dict` in the following format: | |
```python | |
{ | |
"ts_to_hand_obj_verts": { | |
ts: (hand points (numpy.ndarray(number of points contained in the point set rep, 3)), object points (numpy.ndarray(nn_vertices, 3))) for ts in range(0, seq_length) | |
} | |
} | |
``` | |
The corresponding output file of the [example](https://1drv.ms/u/s!AgSPtac7QUbHgVoY8jPkPZfrDkJw?e=JYFi5a) mentioned above can be downloaded [here](https://1drv.ms/u/s!AgSPtac7QUbHgVoY8jPkPZfrDkJw?e=JYFi5a). | |
""" | |
with gr.Blocks() as demo: | |
gr.Markdown(USAGE) | |
# demo = | |
# gr.Interface( | |
# predict, | |
# # gr.Dataframe(type="numpy", datatype="number", row_count=5, col_count=3), | |
# gr.File(type="filepath"), | |
# gr.File(type="filepath"), | |
# cache_examples=False | |
# ) | |
input_file = gr.File(type="filepath") | |
output_file = gr.File(type="filepath") | |
gr.Interface( | |
predict, | |
# gr.Dataframe(type="numpy", datatype="number", row_count=5, col_count=3), | |
input_file, | |
output_file, | |
cache_examples=False | |
) | |
inputs = input_file | |
outputs = output_file | |
gr.Examples( | |
examples=[os.path.join(os.path.dirname(__file__), "./data/102_grab_all_data.npy")], | |
inputs=inputs, | |
fn=predict, | |
outputs=outputs, | |
) | |
return demo | |
if __name__ == "__main__": | |
demo = create_demo() | |
demo.launch() |