Spaces:
Runtime error
Runtime error
# install | |
import glob | |
import gradio as gr | |
import os | |
import random | |
import subprocess | |
if os.getenv('SYSTEM') == 'spaces': | |
subprocess.run('pip install pyembree'.split()) | |
subprocess.run('pip install git+https://github.com/danielgatis/rembg.git@v2.0.13'.split()) | |
subprocess.run( | |
'pip install torch==1.11.0+cu113 torchvision==0.12.0+cu113 -f https://download.pytorch.org/whl/cu113/torch_stable.html'.split()) | |
subprocess.run( | |
'pip install git+https://github.com/YuliangXiu/kaolin.git'.split()) | |
subprocess.run('pip install --no-index --no-cache-dir pytorch3d -f https://dl.fbaipublicfiles.com/pytorch3d/packaging/wheels/py38_cu113_pyt1110/download.html'.split()) | |
subprocess.run( | |
'pip install git+https://github.com/Project-Splinter/human_det.git'.split()) | |
subprocess.run( | |
'pip install git+https://github.com/YuliangXiu/neural_voxelization_layer.git'.split()) | |
from apps.infer import generate_model | |
# running | |
description = ''' | |
# ICON Clothed Human Digitization | |
### ICON: Implicit Clothed humans Obtained from Normals (CVPR 2022) | |
<table> | |
<th> | |
<ul> | |
<li><strong>Homepage</strong> <a href="http://icon.is.tue.mpg.de">icon.is.tue.mpg.de</a></li> | |
<li><strong>Code</strong> <a href="https://github.com/YuliangXiu/ICON">YuliangXiu/ICON</a> | |
<li><strong>Paper</strong> <a href="https://arxiv.org/abs/2112.09127">arXiv</a>, <a href="https://readpaper.com/paper/4569785684533977089">ReadPaper</a> | |
<li><strong>Chatroom</strong> <a href="https://discord.gg/Vqa7KBGRyk">Discord</a> | |
</ul> | |
<a href="https://twitter.com/yuliangxiu"><img alt="Twitter Follow" src="https://img.shields.io/twitter/follow/yuliangxiu?style=social"></a> | |
<iframe src="https://ghbtns.com/github-btn.html?user=yuliangxiu&repo=ICON&type=star&count=true&v=2&size=small" frameborder="0" scrolling="0" width="100" height="20"></iframe> | |
<a href="https://youtu.be/hZd6AYin2DE"><img alt="YouTube Video Views" src="https://img.shields.io/youtube/views/hZd6AYin2DE?style=social"></a> | |
</th> | |
<th> | |
<iframe width="560" height="315" src="https://www.youtube.com/embed/hZd6AYin2DE" title="YouTube video player" frameborder="0" allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture" allowfullscreen></iframe> | |
</th> | |
</table> | |
<h4> The reconstruction + refinement + video take about 80~200 seconds for single image. <span style="color:red"> If ERROR, try "Submit Image" again.</span></h4> | |
<details> | |
<summary>More</summary> | |
#### Citation | |
``` | |
@inproceedings{xiu2022icon, | |
title = {{ICON}: {I}mplicit {C}lothed humans {O}btained from {N}ormals}, | |
author = {Xiu, Yuliang and Yang, Jinlong and Tzionas, Dimitrios and Black, Michael J.}, | |
booktitle = {Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition (CVPR)}, | |
month = {June}, | |
year = {2022}, | |
pages = {13296-13306} | |
} | |
``` | |
#### Acknowledgments: | |
- [StyleGAN-Human, ECCV 2022](https://stylegan-human.github.io/) | |
- [nagolinc/styleGanHuman_and_PIFu](https://huggingface.co/spaces/nagolinc/styleGanHuman_and_PIFu) | |
- [radames/PIFu-Clothed-Human-Digitization](https://huggingface.co/spaces/radames/PIFu-Clothed-Human-Digitization) | |
#### Image Credits | |
* [Pinterest](https://www.pinterest.com/search/pins/?q=parkour&rs=sitelinks_searchbox) | |
#### Related works | |
* [ICON @ MPI](https://icon.is.tue.mpg.de/) | |
* [MonoPort @ USC](https://xiuyuliang.cn/monoport) | |
* [Phorhum @ Google](https://phorhum.github.io/) | |
* [PIFuHD @ Meta](https://shunsukesaito.github.io/PIFuHD/) | |
* [PaMIR @ Tsinghua](http://www.liuyebin.com/pamir/pamir.html) | |
</details> | |
''' | |
def generate_image(seed, psi): | |
iface = gr.Interface.load("spaces/hysts/StyleGAN-Human") | |
img = iface(seed, psi) | |
return img | |
random.seed(2022) | |
model_types = ['icon-filter', 'pifu', 'pamir'] | |
examples = [[item, random.choice(model_types)] for item in glob.glob('examples/*.png')] | |
with gr.Blocks() as demo: | |
gr.Markdown(description) | |
out_lst = [] | |
with gr.Row(): | |
with gr.Column(): | |
with gr.Row(): | |
with gr.Column(): | |
seed = gr.inputs.Slider( | |
0, 100, step=1, default=0, label='Seed (For Image Generation)') | |
psi = gr.inputs.Slider( | |
0, 2, step=0.05, default=0.7, label='Truncation psi (For Image Generation)') | |
radio_choice = gr.Radio( | |
model_types, label='Method (For Reconstruction)', value='icon-filter') | |
inp = gr.Image(type="filepath", label="Input Image") | |
with gr.Row(): | |
btn_sample = gr.Button("Sample Image") | |
btn_submit = gr.Button("Submit Image") | |
gr.Examples(examples=examples, | |
inputs=[inp, radio_choice], | |
cache_examples=True, | |
fn=generate_model, | |
outputs=out_lst) | |
out_vid = gr.Video(label="Image + Normal + Recon + Refined Recon") | |
out_vid_download = gr.File( | |
label="Download Video, welcome share on Twitter with #ICON") | |
with gr.Column(): | |
overlap_inp = gr.Image( | |
type="filepath", label="Image Normal Overlap") | |
out_smpl = gr.Model3D( | |
clear_color=[0.0, 0.0, 0.0, 0.0], label="SMPL") | |
out_smpl_download = gr.File(label="Download SMPL mesh") | |
out_smpl_npy_download = gr.File(label="Download SMPL params") | |
out_recon = gr.Model3D( | |
clear_color=[0.0, 0.0, 0.0, 0.0], label="Recon") | |
out_recon_download = gr.File(label="Download clothed human mesh") | |
out_final = gr.Model3D( | |
clear_color=[0.0, 0.0, 0.0, 0.0], label="Refined Recon") | |
out_final_download = gr.File( | |
label="Download refined clothed human mesh") | |
out_lst = [out_smpl, out_smpl_download, out_smpl_npy_download, out_recon, out_recon_download, | |
out_final, out_final_download, out_vid, out_vid_download, overlap_inp] | |
btn_submit.click(fn=generate_model, inputs=[ | |
inp, radio_choice], outputs=out_lst) | |
btn_sample.click(fn=generate_image, inputs=[seed, psi], outputs=inp) | |
if __name__ == "__main__": | |
# demo.launch(debug=False, enable_queue=False, | |
# auth=(os.environ['USER'], os.environ['PASSWORD']), | |
# auth_message="Register at icon.is.tue.mpg.de to get HuggingFace username and password.") | |
demo.launch(debug=True, enable_queue=True) | |