Spaces:
Runtime error
Runtime error
title update
Browse files- LICENSE +1 -1
- README.md +2 -2
- environment.yaml +1 -1
- lib/common/render.py +4 -10
- lib/dataset/mesh_util.py +1 -1
LICENSE
CHANGED
@@ -40,7 +40,7 @@ You acknowledge that the Data & Software is a valuable scientific resource and a
|
|
40 |
Citation:
|
41 |
|
42 |
@inproceedings{xiu2023econ,
|
43 |
-
title = {{ECON: Explicit Clothed humans
|
44 |
author = {Xiu, Yuliang and Yang, Jinlong and Cao, Xu and Tzionas, Dimitrios and Black, Michael J.},
|
45 |
booktitle = {Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition (CVPR)},
|
46 |
month = {June},
|
|
|
40 |
Citation:
|
41 |
|
42 |
@inproceedings{xiu2023econ,
|
43 |
+
title = {{ECON: Explicit Clothed humans Optimized via Normal integration}},
|
44 |
author = {Xiu, Yuliang and Yang, Jinlong and Cao, Xu and Tzionas, Dimitrios and Black, Michael J.},
|
45 |
booktitle = {Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition (CVPR)},
|
46 |
month = {June},
|
README.md
CHANGED
@@ -2,7 +2,7 @@
|
|
2 |
|
3 |
<p align="center">
|
4 |
|
5 |
-
<h1 align="center">ECON: Explicit Clothed humans
|
6 |
<p align="center">
|
7 |
<a href="http://xiuyuliang.cn/"><strong>Yuliang Xiu</strong></a>
|
8 |
·
|
@@ -152,7 +152,7 @@ python -m apps.avatarizer -n <filename>
|
|
152 |
|
153 |
```bibtex
|
154 |
@inproceedings{xiu2023econ,
|
155 |
-
title = {{ECON: Explicit Clothed humans
|
156 |
author = {Xiu, Yuliang and Yang, Jinlong and Cao, Xu and Tzionas, Dimitrios and Black, Michael J.},
|
157 |
booktitle = {Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition (CVPR)},
|
158 |
month = {June},
|
|
|
2 |
|
3 |
<p align="center">
|
4 |
|
5 |
+
<h1 align="center">ECON: Explicit Clothed humans Optimized via Normal integration</h1>
|
6 |
<p align="center">
|
7 |
<a href="http://xiuyuliang.cn/"><strong>Yuliang Xiu</strong></a>
|
8 |
·
|
|
|
152 |
|
153 |
```bibtex
|
154 |
@inproceedings{xiu2023econ,
|
155 |
+
title = {{ECON: Explicit Clothed humans Optimized via Normal integration}},
|
156 |
author = {Xiu, Yuliang and Yang, Jinlong and Cao, Xu and Tzionas, Dimitrios and Black, Michael J.},
|
157 |
booktitle = {Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition (CVPR)},
|
158 |
month = {June},
|
environment.yaml
CHANGED
@@ -9,7 +9,7 @@ channels:
|
|
9 |
- defaults
|
10 |
dependencies:
|
11 |
- python=3.8
|
12 |
-
- pytorch-cuda=11.
|
13 |
- pytorch=1.13.0
|
14 |
- nvidiacub
|
15 |
- torchvision
|
|
|
9 |
- defaults
|
10 |
dependencies:
|
11 |
- python=3.8
|
12 |
+
- pytorch-cuda=11.6
|
13 |
- pytorch=1.13.0
|
14 |
- nvidiacub
|
15 |
- torchvision
|
lib/common/render.py
CHANGED
@@ -315,7 +315,7 @@ class Render:
|
|
315 |
save_path,
|
316 |
fourcc,
|
317 |
self.fps,
|
318 |
-
(width, int(height
|
319 |
)
|
320 |
|
321 |
pbar = tqdm(range(len(self.meshes)))
|
@@ -358,15 +358,9 @@ class Render:
|
|
358 |
img_cloth = blend_rgb_norm(
|
359 |
(torch.stack(mesh_renders)[num_obj:, cam_id] - 0.5) * 2.0, data
|
360 |
)
|
361 |
-
|
362 |
-
|
363 |
-
|
364 |
-
dim=-1).squeeze(0).permute(1, 2, 0).numpy().astype(np.uint8),
|
365 |
-
(width, height // 2)
|
366 |
-
)
|
367 |
-
final_img = np.concatenate(
|
368 |
-
[top_img, img_cloth.squeeze(0).permute(1, 2, 0).numpy().astype(np.uint8)], axis=0
|
369 |
-
)
|
370 |
video.write(final_img[:, :, ::-1])
|
371 |
|
372 |
video.release()
|
|
|
315 |
save_path,
|
316 |
fourcc,
|
317 |
self.fps,
|
318 |
+
(width*3, int(height)),
|
319 |
)
|
320 |
|
321 |
pbar = tqdm(range(len(self.meshes)))
|
|
|
358 |
img_cloth = blend_rgb_norm(
|
359 |
(torch.stack(mesh_renders)[num_obj:, cam_id] - 0.5) * 2.0, data
|
360 |
)
|
361 |
+
final_img = torch.cat(
|
362 |
+
[img_raw, img_smpl, img_cloth], dim=-1).squeeze(0).permute(1, 2, 0).numpy().astype(np.uint8)
|
363 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
364 |
video.write(final_img[:, :, ::-1])
|
365 |
|
366 |
video.release()
|
lib/dataset/mesh_util.py
CHANGED
@@ -388,7 +388,7 @@ def poisson(mesh, obj_path, depth=10, decimation=True):
|
|
388 |
pcl = o3d.io.read_point_cloud(pcd_path)
|
389 |
with o3d.utility.VerbosityContextManager(o3d.utility.VerbosityLevel.Error) as cm:
|
390 |
mesh, densities = o3d.geometry.TriangleMesh.create_from_point_cloud_poisson(
|
391 |
-
pcl, depth=depth, n_threads
|
392 |
)
|
393 |
|
394 |
# only keep the largest component
|
|
|
388 |
pcl = o3d.io.read_point_cloud(pcd_path)
|
389 |
with o3d.utility.VerbosityContextManager(o3d.utility.VerbosityLevel.Error) as cm:
|
390 |
mesh, densities = o3d.geometry.TriangleMesh.create_from_point_cloud_poisson(
|
391 |
+
pcl, depth=depth, n_threads=-1
|
392 |
)
|
393 |
|
394 |
# only keep the largest component
|