Spaces:
Running
on
Zero
Running
on
Zero
daidedou
commited on
Commit
·
d408533
1
Parent(s):
0fcda69
Try to fix the gpu aborted problem : processing on downsampled meshes during optimization
Browse files- app.py +27 -12
- config/matching/sds.yaml +2 -2
- notebook_helpers.py +26 -12
- utils/fmap.py +23 -2
- utils/geometry.py +3 -3
- utils/mesh.py +74 -12
app.py
CHANGED
|
@@ -28,6 +28,7 @@ import yaml
|
|
| 28 |
from utils.surfaces import Surface
|
| 29 |
import notebook_helpers as helper
|
| 30 |
from utils.meshplot import visu_pts
|
|
|
|
| 31 |
from utils.torch_fmap import extract_p2p_torch_fmap, torch_zoomout
|
| 32 |
import torch
|
| 33 |
import argparse
|
|
@@ -94,7 +95,7 @@ DEFAULT_SETTINGS = {
|
|
| 94 |
"deepfeat_conf.fmap.lambda_": 1,
|
| 95 |
"sds_conf.zoomout": 32,
|
| 96 |
"diffusion.time": 1.0,
|
| 97 |
-
"opt.n_loop":
|
| 98 |
"loss.sds": 1.0,
|
| 99 |
"loss.proper": 1.0,
|
| 100 |
}
|
|
@@ -144,13 +145,17 @@ class Datadicts:
|
|
| 144 |
def __init__(self, shape_path, target_path):
|
| 145 |
self.shape_path = shape_path
|
| 146 |
basename_1 = os.path.basename(shape_path)
|
| 147 |
-
self.shape_dict,
|
| 148 |
self.shape_surf = Surface(filename=shape_path)
|
|
|
|
| 149 |
self.target_path = target_path
|
| 150 |
basename_2 = os.path.basename(target_path)
|
| 151 |
-
self.target_dict,
|
| 152 |
self.target_surf = Surface(filename=target_path)
|
|
|
|
|
|
|
| 153 |
self.cmap1 = visu_pts(self.shape_surf)
|
|
|
|
| 154 |
|
| 155 |
# -----------------------------
|
| 156 |
# Gradio UI
|
|
@@ -189,16 +194,21 @@ def init_clicked(mesh1_path, mesh2_path,
|
|
| 189 |
raise gr.Error("Please upload both meshes.")
|
| 190 |
global datadicts
|
| 191 |
datadicts = Datadicts(mesh1_path, mesh2_path)
|
| 192 |
-
shape_dict, target_dict = convert_dict(datadicts.
|
| 193 |
fmap_model_cuda = matcher.fmap_model.cuda()
|
| 194 |
diff_model_cuda = matcher.diffusion_model
|
| 195 |
diff_model_cuda.net.cuda()
|
| 196 |
C12_pred_init, C21_pred_init, feat1, feat2, evecs_trans1, evecs_trans2 = fmap_model_cuda({"shape1": shape_dict, "shape2": target_dict}, diff_model=diff_model_cuda, scale=matcher.fmap_cfg.diffusion.time)
|
| 197 |
C12_pred, C12_obj, mask_12 = C12_pred_init
|
| 198 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 199 |
return build_outputs(datadicts.shape_surf, datadicts.target_surf, datadicts.cmap1, p2p_init, tag="init")
|
| 200 |
|
| 201 |
-
@spaces.GPU(duration=
|
| 202 |
def run_clicked(mesh1_path, mesh2_path, yaml_path, lambda_val, zoomout_val, time_val, nloop_val, sds_val, proper_val, progress=gr.Progress(track_tqdm=True)):
|
| 203 |
if not mesh1_path or not mesh2_path:
|
| 204 |
raise gr.Error("Please upload both meshes.")
|
|
@@ -220,15 +230,19 @@ def run_clicked(mesh1_path, mesh2_path, yaml_path, lambda_val, zoomout_val, time
|
|
| 220 |
if not (datadicts.shape_path == mesh1_path and datadicts.target_path == mesh2_path):
|
| 221 |
datadicts = Datadicts(mesh1_path, mesh2_path)
|
| 222 |
|
| 223 |
-
shape_dict, target_dict = convert_dict(datadicts.
|
| 224 |
-
target_normals = torch.from_numpy(datadicts.
|
| 225 |
|
| 226 |
C12_new, p2p, p2p_init, _, loss_save = matcher.optimize(shape_dict, target_dict, target_normals)
|
| 227 |
-
|
| 228 |
-
|
|
|
|
|
|
|
| 229 |
with torch.no_grad():
|
| 230 |
-
|
| 231 |
-
|
|
|
|
|
|
|
| 232 |
return build_outputs(datadicts.shape_surf, datadicts.target_surf, datadicts.cmap1, p2p_zo, tag="run")
|
| 233 |
|
| 234 |
|
|
@@ -327,5 +341,6 @@ if __name__ == "__main__":
|
|
| 327 |
print("Matcher ready")
|
| 328 |
#shutil.rmtree("tmp")
|
| 329 |
os.makedirs("tmp", exist_ok=True)
|
|
|
|
| 330 |
datadicts = None
|
| 331 |
demo.launch(share=args.share)
|
|
|
|
| 28 |
from utils.surfaces import Surface
|
| 29 |
import notebook_helpers as helper
|
| 30 |
from utils.meshplot import visu_pts
|
| 31 |
+
from utils.fmap import FM_to_p2p
|
| 32 |
from utils.torch_fmap import extract_p2p_torch_fmap, torch_zoomout
|
| 33 |
import torch
|
| 34 |
import argparse
|
|
|
|
| 95 |
"deepfeat_conf.fmap.lambda_": 1,
|
| 96 |
"sds_conf.zoomout": 32,
|
| 97 |
"diffusion.time": 1.0,
|
| 98 |
+
"opt.n_loop": 200,
|
| 99 |
"loss.sds": 1.0,
|
| 100 |
"loss.proper": 1.0,
|
| 101 |
}
|
|
|
|
| 145 |
def __init__(self, shape_path, target_path):
|
| 146 |
self.shape_path = shape_path
|
| 147 |
basename_1 = os.path.basename(shape_path)
|
| 148 |
+
self.shape_dict, self.shape_dict_down = helper.load_data(shape_path, "tmp/" + os.path.splitext(basename_1)[0]+".npz", "source", make_cache=True)
|
| 149 |
self.shape_surf = Surface(filename=shape_path)
|
| 150 |
+
self.shape_surf_down = Surface(filename=self.shape_dict_down["file"])
|
| 151 |
self.target_path = target_path
|
| 152 |
basename_2 = os.path.basename(target_path)
|
| 153 |
+
self.target_dict, self.target_dict_down = helper.load_data(target_path, "tmp/" + os.path.splitext(basename_2)[0]+".npz", "target", make_cache=True)
|
| 154 |
self.target_surf = Surface(filename=target_path)
|
| 155 |
+
self.target_surf_down = Surface(filename=self.target_dict_down["file"])
|
| 156 |
+
|
| 157 |
self.cmap1 = visu_pts(self.shape_surf)
|
| 158 |
+
self.cmap1_down = visu_pts(self.shape_surf_down)
|
| 159 |
|
| 160 |
# -----------------------------
|
| 161 |
# Gradio UI
|
|
|
|
| 194 |
raise gr.Error("Please upload both meshes.")
|
| 195 |
global datadicts
|
| 196 |
datadicts = Datadicts(mesh1_path, mesh2_path)
|
| 197 |
+
shape_dict, target_dict = convert_dict(datadicts.shape_dict_down, 'cuda'), convert_dict(datadicts.target_dict_down, 'cuda')
|
| 198 |
fmap_model_cuda = matcher.fmap_model.cuda()
|
| 199 |
diff_model_cuda = matcher.diffusion_model
|
| 200 |
diff_model_cuda.net.cuda()
|
| 201 |
C12_pred_init, C21_pred_init, feat1, feat2, evecs_trans1, evecs_trans2 = fmap_model_cuda({"shape1": shape_dict, "shape2": target_dict}, diff_model=diff_model_cuda, scale=matcher.fmap_cfg.diffusion.time)
|
| 202 |
C12_pred, C12_obj, mask_12 = C12_pred_init
|
| 203 |
+
evecs1, evecs2 = torch.from_numpy(datadicts.shape_dict["evecs"]).cuda(), torch.from_numpy(datadicts.target_dict["evecs"]).cuda()
|
| 204 |
+
C_up, C_down = torch.from_numpy(datadicts.target_dict["Cup"]).cuda(), torch.from_numpy(datadicts.shape_dict_down["Cdown"]).cuda()
|
| 205 |
+
n_fmap = C12_obj.shape[-1]
|
| 206 |
+
with torch.no_grad():
|
| 207 |
+
C12_all = C_up.squeeze()[:n_fmap, :n_fmap] @ C12_obj.clone().squeeze() @ C_down.squeeze()[:n_fmap, :n_fmap]
|
| 208 |
+
p2p_init = FM_to_p2p(C12_all.cpu().numpy(), datadicts.shape_dict["evecs"], datadicts.target_dict["evecs"])
|
| 209 |
return build_outputs(datadicts.shape_surf, datadicts.target_surf, datadicts.cmap1, p2p_init, tag="init")
|
| 210 |
|
| 211 |
+
@spaces.GPU(duration=60)
|
| 212 |
def run_clicked(mesh1_path, mesh2_path, yaml_path, lambda_val, zoomout_val, time_val, nloop_val, sds_val, proper_val, progress=gr.Progress(track_tqdm=True)):
|
| 213 |
if not mesh1_path or not mesh2_path:
|
| 214 |
raise gr.Error("Please upload both meshes.")
|
|
|
|
| 230 |
if not (datadicts.shape_path == mesh1_path and datadicts.target_path == mesh2_path):
|
| 231 |
datadicts = Datadicts(mesh1_path, mesh2_path)
|
| 232 |
|
| 233 |
+
shape_dict, target_dict = convert_dict(datadicts.shape_dict_down, 'cuda'), convert_dict(datadicts.target_dict_down, 'cuda')
|
| 234 |
+
target_normals = torch.from_numpy(datadicts.target_surf_down.surfel/np.linalg.norm(datadicts.target_surf_down.surfel, axis=-1, keepdims=True)).float().to("cuda")
|
| 235 |
|
| 236 |
C12_new, p2p, p2p_init, _, loss_save = matcher.optimize(shape_dict, target_dict, target_normals)
|
| 237 |
+
|
| 238 |
+
C_up, C_down = torch.from_numpy(datadicts.target_dict["Cup"]).cuda(), torch.from_numpy(datadicts.shape_dict_down["Cdown"]).cuda()
|
| 239 |
+
evecs1, evecs2 = torch.from_numpy(datadicts.shape_dict["evecs"]).cuda(), torch.from_numpy(datadicts.target_dict["evecs"]).cuda()
|
| 240 |
+
evecs_2trans = evecs2.t() @ torch.diag(torch.from_numpy(datadicts.target_dict["mass"]).cuda())
|
| 241 |
with torch.no_grad():
|
| 242 |
+
n_fmap = C12_new.shape[-1]
|
| 243 |
+
C12_all = C_up.squeeze()[:n_fmap, :n_fmap] @ C12_new.clone().squeeze() @ C_down.squeeze()[:n_fmap, :n_fmap]
|
| 244 |
+
C12_end_zo = torch_zoomout(evecs1, evecs2, evecs_2trans, C12_all, matcher.cfg.sds_conf.zoomout)
|
| 245 |
+
p2p_zo, _ = extract_p2p_torch_fmap(C12_end_zo, evecs1, evecs2)
|
| 246 |
return build_outputs(datadicts.shape_surf, datadicts.target_surf, datadicts.cmap1, p2p_zo, tag="run")
|
| 247 |
|
| 248 |
|
|
|
|
| 341 |
print("Matcher ready")
|
| 342 |
#shutil.rmtree("tmp")
|
| 343 |
os.makedirs("tmp", exist_ok=True)
|
| 344 |
+
os.makedirs("tmp/plys", exist_ok=True)
|
| 345 |
datadicts = None
|
| 346 |
demo.launch(share=args.share)
|
config/matching/sds.yaml
CHANGED
|
@@ -23,8 +23,8 @@ deepfeat_conf:
|
|
| 23 |
abs: True
|
| 24 |
normalize: False
|
| 25 |
time: 1
|
| 26 |
-
batch_sds:
|
| 27 |
-
batch_mask:
|
| 28 |
|
| 29 |
opt:
|
| 30 |
n_loop: 300
|
|
|
|
| 23 |
abs: True
|
| 24 |
normalize: False
|
| 25 |
time: 1
|
| 26 |
+
batch_sds: 1
|
| 27 |
+
batch_mask: 16
|
| 28 |
|
| 29 |
opt:
|
| 30 |
n_loop: 300
|
notebook_helpers.py
CHANGED
|
@@ -1,27 +1,41 @@
|
|
| 1 |
from utils.mesh import load_mesh
|
| 2 |
from utils.geometry import get_operators, load_operators
|
| 3 |
import os
|
| 4 |
-
from utils.
|
| 5 |
from utils.surfaces import Surface
|
| 6 |
import numpy as np
|
| 7 |
|
| 8 |
-
|
| 9 |
-
|
| 10 |
-
|
| 11 |
-
|
| 12 |
-
|
| 13 |
-
|
| 14 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 15 |
# print("Cache is: ", cache_path)
|
| 16 |
if not os.path.exists(cache_path) or make_cache:
|
| 17 |
print("Computing operators ...")
|
| 18 |
-
|
|
|
|
|
|
|
|
|
|
| 19 |
data_dict = load_operators(cache_path)
|
| 20 |
data_dict['name'] = name
|
| 21 |
-
data_dict['
|
| 22 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 23 |
#batchify_dict(data_dict_torch)
|
| 24 |
-
return data_dict,
|
| 25 |
|
| 26 |
def get_map_info(file_1, file_2, dict_1, dict_2, dataset):
|
| 27 |
shape_dict, target_dict = dict_1, dict_2
|
|
|
|
| 1 |
from utils.mesh import load_mesh
|
| 2 |
from utils.geometry import get_operators, load_operators
|
| 3 |
import os
|
| 4 |
+
from utils.fmap import p2p_to_FM, KNNSearch
|
| 5 |
from utils.surfaces import Surface
|
| 6 |
import numpy as np
|
| 7 |
|
| 8 |
+
|
| 9 |
+
def get_down_p2ps(verts, verts_down):
|
| 10 |
+
tree = KNNSearch(verts)
|
| 11 |
+
ind_up = tree.query(verts_down, k=1).flatten()
|
| 12 |
+
|
| 13 |
+
tree_down = KNNSearch(verts_down)
|
| 14 |
+
ind_down = tree_down.query(verts, k=1).flatten()
|
| 15 |
+
return ind_up, ind_down
|
| 16 |
+
|
| 17 |
+
def load_data(file, cache_path, name, num_evecs=128, make_cache=False):
|
| 18 |
+
surf, surf_down, file_down = load_mesh(file)
|
| 19 |
+
cache_down = cache_path[:-4] + "_down" + ".npz"
|
| 20 |
# print("Cache is: ", cache_path)
|
| 21 |
if not os.path.exists(cache_path) or make_cache:
|
| 22 |
print("Computing operators ...")
|
| 23 |
+
vnormals = surf.computeVertexNormals()
|
| 24 |
+
get_operators(surf.vertices, surf.faces, num_evecs, cache_path, vnormals)
|
| 25 |
+
vnormals_down = surf_down.computeVertexNormals()
|
| 26 |
+
get_operators(surf_down.vertices, surf_down.faces, num_evecs, cache_down, vnormals_down)
|
| 27 |
data_dict = load_operators(cache_path)
|
| 28 |
data_dict['name'] = name
|
| 29 |
+
data_dict['vertices'] = surf.vertices
|
| 30 |
+
data_dict_down = load_operators(cache_down)
|
| 31 |
+
data_dict_down['name'] = name
|
| 32 |
+
data_dict_down['file'] = file_down
|
| 33 |
+
data_dict_down['vertices'] = surf_down.vertices
|
| 34 |
+
p2p_up, p2p_down = get_down_p2ps(surf.vertices, surf_down.vertices)
|
| 35 |
+
data_dict_down["Cdown"] = p2p_to_FM(p2p_up, data_dict['evecs'], data_dict_down['evecs'])
|
| 36 |
+
data_dict["Cup"] = p2p_to_FM(p2p_down, data_dict_down['evecs'], data_dict['evecs'])
|
| 37 |
#batchify_dict(data_dict_torch)
|
| 38 |
+
return data_dict, data_dict_down
|
| 39 |
|
| 40 |
def get_map_info(file_1, file_2, dict_1, dict_2, dataset):
|
| 41 |
shape_dict, target_dict = dict_1, dict_2
|
utils/fmap.py
CHANGED
|
@@ -4,13 +4,12 @@ import sys
|
|
| 4 |
import numpy as np
|
| 5 |
import scipy.linalg
|
| 6 |
from tqdm import tqdm
|
|
|
|
| 7 |
|
| 8 |
ROOT_DIR = osp.join(osp.abspath(osp.dirname(__file__)), '../')
|
| 9 |
if ROOT_DIR not in sys.path:
|
| 10 |
sys.path.append(ROOT_DIR)
|
| 11 |
|
| 12 |
-
from utils_fmaps.misc import KNNSearch
|
| 13 |
-
|
| 14 |
try:
|
| 15 |
import pynndescent
|
| 16 |
index = pynndescent.NNDescent(np.random.random((100, 3)), n_jobs=2)
|
|
@@ -19,6 +18,28 @@ try:
|
|
| 19 |
except ImportError:
|
| 20 |
ANN = False
|
| 21 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 22 |
# https://github.com/RobinMagnet/pyFM
|
| 23 |
|
| 24 |
|
|
|
|
| 4 |
import numpy as np
|
| 5 |
import scipy.linalg
|
| 6 |
from tqdm import tqdm
|
| 7 |
+
from scipy.spatial import cKDTree
|
| 8 |
|
| 9 |
ROOT_DIR = osp.join(osp.abspath(osp.dirname(__file__)), '../')
|
| 10 |
if ROOT_DIR not in sys.path:
|
| 11 |
sys.path.append(ROOT_DIR)
|
| 12 |
|
|
|
|
|
|
|
| 13 |
try:
|
| 14 |
import pynndescent
|
| 15 |
index = pynndescent.NNDescent(np.random.random((100, 3)), n_jobs=2)
|
|
|
|
| 18 |
except ImportError:
|
| 19 |
ANN = False
|
| 20 |
|
| 21 |
+
|
| 22 |
+
class KNNSearch(object):
|
| 23 |
+
DTYPE = np.float32
|
| 24 |
+
NJOBS = 4
|
| 25 |
+
|
| 26 |
+
def __init__(self, data):
|
| 27 |
+
self.data = np.asarray(data, dtype=self.DTYPE)
|
| 28 |
+
self.kdtree = cKDTree(self.data)
|
| 29 |
+
|
| 30 |
+
def query(self, kpts, k, return_dists=False):
|
| 31 |
+
kpts = np.asarray(kpts, dtype=self.DTYPE)
|
| 32 |
+
nndists, nnindices = self.kdtree.query(kpts, k=k, workers=self.NJOBS)
|
| 33 |
+
if return_dists:
|
| 34 |
+
return nnindices, nndists
|
| 35 |
+
else:
|
| 36 |
+
return nnindices
|
| 37 |
+
|
| 38 |
+
def query_ball(self, kpt, radius):
|
| 39 |
+
kpt = np.asarray(kpt, dtype=self.DTYPE)
|
| 40 |
+
assert kpt.ndim == 1
|
| 41 |
+
nnindices = self.kdtree.query_ball_point(kpt, radius, n_jobs=self.NJOBS)
|
| 42 |
+
return nnindices
|
| 43 |
# https://github.com/RobinMagnet/pyFM
|
| 44 |
|
| 45 |
|
utils/geometry.py
CHANGED
|
@@ -477,7 +477,7 @@ def get_operators(verts, faces, k_eig=128, cache_path=None, normals=None, overwr
|
|
| 477 |
if (not np.array_equal(verts, cache_verts)) or (not np.array_equal(faces, cache_faces)):
|
| 478 |
i_cache_search += 1
|
| 479 |
print("hash collision! searching next.")
|
| 480 |
-
|
| 481 |
|
| 482 |
# print(" cache hit!")
|
| 483 |
|
|
@@ -755,8 +755,8 @@ def get_operators_small(verts, faces, k_eig=128, cache_path=None, overwrite_cach
|
|
| 755 |
# If the cache doesn't match, keep looking
|
| 756 |
if (not np.array_equal(verts, cache_verts)) or (not np.array_equal(faces, cache_faces)):
|
| 757 |
i_cache_search += 1
|
| 758 |
-
print("hash collision!
|
| 759 |
-
|
| 760 |
|
| 761 |
# print(" cache hit!")
|
| 762 |
|
|
|
|
| 477 |
if (not np.array_equal(verts, cache_verts)) or (not np.array_equal(faces, cache_faces)):
|
| 478 |
i_cache_search += 1
|
| 479 |
print("hash collision! searching next.")
|
| 480 |
+
overwrite_cache = True
|
| 481 |
|
| 482 |
# print(" cache hit!")
|
| 483 |
|
|
|
|
| 755 |
# If the cache doesn't match, keep looking
|
| 756 |
if (not np.array_equal(verts, cache_verts)) or (not np.array_equal(faces, cache_faces)):
|
| 757 |
i_cache_search += 1
|
| 758 |
+
print("hash collision! overwriting.")
|
| 759 |
+
overwrite_cache = True
|
| 760 |
|
| 761 |
# print(" cache hit!")
|
| 762 |
|
utils/mesh.py
CHANGED
|
@@ -10,7 +10,8 @@ import sys
|
|
| 10 |
from utils.surfaces import Surface, centroid
|
| 11 |
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
|
| 12 |
from shape_data import get_data_dirs
|
| 13 |
-
|
|
|
|
| 14 |
# List of file extensions to consider as "mesh" files.
|
| 15 |
# Kudos to chatgpt!
|
| 16 |
# Add or remove extensions here as needed.
|
|
@@ -104,24 +105,85 @@ def save_ply(file_name, V, F, Rho=None, color=None):
|
|
| 104 |
|
| 105 |
|
| 106 |
|
| 107 |
-
def load_mesh(filepath, scale=True
|
| 108 |
V, F = pp3d.read_mesh(filepath)
|
|
|
|
|
|
|
|
|
|
|
|
|
| 109 |
surf = Surface(FV=[F,V])
|
| 110 |
center, area = centroid(surf)
|
| 111 |
if scale:
|
| 112 |
new_vertices = (surf.vertices - center)/area
|
|
|
|
| 113 |
else:
|
| 114 |
new_vertices = (surf.vertices - center)
|
| 115 |
-
|
| 116 |
-
|
| 117 |
-
|
| 118 |
-
|
| 119 |
-
|
| 120 |
-
|
| 121 |
-
|
| 122 |
-
|
| 123 |
-
|
| 124 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 125 |
|
| 126 |
|
| 127 |
def mesh_geod_matrix(vertices, faces, do_tqdm=False, verbose=False):
|
|
|
|
| 10 |
from utils.surfaces import Surface, centroid
|
| 11 |
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
|
| 12 |
from shape_data import get_data_dirs
|
| 13 |
+
import vtk
|
| 14 |
+
from vtk.util import numpy_support
|
| 15 |
# List of file extensions to consider as "mesh" files.
|
| 16 |
# Kudos to chatgpt!
|
| 17 |
# Add or remove extensions here as needed.
|
|
|
|
| 105 |
|
| 106 |
|
| 107 |
|
| 108 |
+
def load_mesh(filepath, scale=True):
|
| 109 |
V, F = pp3d.read_mesh(filepath)
|
| 110 |
+
down_V, down_F = decimation(V, F)
|
| 111 |
+
down_surf = Surface(FV=[down_F,down_V])
|
| 112 |
+
down_file = os.path.join("tmp/plys", os.path.basename(filepath)[:-4])
|
| 113 |
+
save_ply(down_file, down_surf.vertices, down_surf.faces)
|
| 114 |
surf = Surface(FV=[F,V])
|
| 115 |
center, area = centroid(surf)
|
| 116 |
if scale:
|
| 117 |
new_vertices = (surf.vertices - center)/area
|
| 118 |
+
new_vertices_down = (down_surf.vertices - center) / area
|
| 119 |
else:
|
| 120 |
new_vertices = (surf.vertices - center)
|
| 121 |
+
new_vertices_down = (down_surf.vertices - center) / area
|
| 122 |
+
surf.updateVertices(np.array(new_vertices, dtype=np.float32))
|
| 123 |
+
down_surf.updateVertices(np.array(new_vertices_down, dtype=np.float32))
|
| 124 |
+
return surf, down_surf, down_file+".ply"
|
| 125 |
+
|
| 126 |
+
|
| 127 |
+
def add_vectors(vertices: np.ndarray, faces: np.ndarray, vtk_vertices, vtk_faces):
|
| 128 |
+
assert len(vertices.shape) == 2
|
| 129 |
+
assert len(faces.shape) == 2
|
| 130 |
+
assert vertices.shape[1] == 3
|
| 131 |
+
assert faces.shape[1] == 3
|
| 132 |
+
|
| 133 |
+
# Add points
|
| 134 |
+
[num_vertices, _] = vertices.shape
|
| 135 |
+
for vertex_idx in range(num_vertices):
|
| 136 |
+
vtk_vertices.InsertNextPoint(vertices[vertex_idx, 0], vertices[vertex_idx, 1], vertices[vertex_idx, 2])
|
| 137 |
+
[num_faces, _] = faces.shape
|
| 138 |
+
for face_idx in range(num_faces):
|
| 139 |
+
vtk_faces.InsertNextCell(3)
|
| 140 |
+
for corner_idx in range(3):
|
| 141 |
+
vtk_faces.InsertCellPoint(faces[face_idx, corner_idx])
|
| 142 |
+
# Allocate additional memory
|
| 143 |
+
vtk_vertices.Resize(num_vertices)
|
| 144 |
+
# self._faces.Resize(self.num_faces + num_faces*3)
|
| 145 |
+
|
| 146 |
+
vtk_vertices.Modified()
|
| 147 |
+
vtk_faces.Modified()
|
| 148 |
+
|
| 149 |
+
def decimation(verts, faces, target_n=1500):
|
| 150 |
+
vtk_vertices = vtk.vtkPoints()
|
| 151 |
+
|
| 152 |
+
# VTK polygone(surface) representation
|
| 153 |
+
vtk_faces = vtk.vtkCellArray()
|
| 154 |
+
surface_data = vtk.vtkPolyData()
|
| 155 |
+
surface_data.SetPoints(vtk_vertices)
|
| 156 |
+
surface_data.SetPolys(vtk_faces)
|
| 157 |
+
add_vectors(verts, faces, vtk_vertices, vtk_faces)
|
| 158 |
+
|
| 159 |
+
|
| 160 |
+
ratio = target_n /verts.shape[0]
|
| 161 |
+
print(ratio)
|
| 162 |
+
print("Before decimation\n"
|
| 163 |
+
"-----------------\n"
|
| 164 |
+
"There are " + str(surface_data.GetNumberOfPoints()) + "points.\n"
|
| 165 |
+
"There are " + str(surface_data.GetNumberOfPolys()) + "polygons.\n")
|
| 166 |
+
|
| 167 |
+
decimate = vtk.vtkQuadricDecimation()
|
| 168 |
+
decimate.SetInputData(surface_data)
|
| 169 |
+
decimate.SetTargetReduction(1-ratio)
|
| 170 |
+
decimate.Update()
|
| 171 |
+
|
| 172 |
+
decimatedPoly = vtk.vtkPolyData()
|
| 173 |
+
decimatedPoly.ShallowCopy(decimate.GetOutput())
|
| 174 |
+
|
| 175 |
+
print("After decimation \n"
|
| 176 |
+
"-----------------\n"
|
| 177 |
+
"There are " + str(decimatedPoly.GetNumberOfPoints()) + "points.\n"
|
| 178 |
+
"There are " + str(decimatedPoly.GetNumberOfPolys()) + "polygons.\n")
|
| 179 |
+
points = decimatedPoly.GetPoints().GetData()
|
| 180 |
+
vertices = numpy_support.vtk_to_numpy(points) # shape: (n_points, 3)
|
| 181 |
+
|
| 182 |
+
# --- Faces (connectivity) ---
|
| 183 |
+
faces = decimatedPoly.GetPolys().GetData()
|
| 184 |
+
faces_np = numpy_support.vtk_to_numpy(faces)
|
| 185 |
+
faces_np = faces_np.reshape(-1, 4)[:, 1:]
|
| 186 |
+
return vertices, faces_np
|
| 187 |
|
| 188 |
|
| 189 |
def mesh_geod_matrix(vertices, faces, do_tqdm=False, verbose=False):
|