StyleGAN-Human / app.py
hysts's picture
hysts HF staff
Fix type annotation
7e0a48a
raw
history blame
6.42 kB
#!/usr/bin/env python
from __future__ import annotations
import argparse
import os
import pickle
import sys
import gradio as gr
import numpy as np
import torch
import torch.nn as nn
from huggingface_hub import hf_hub_download
sys.path.insert(0, 'StyleGAN-Human')
TOKEN = os.environ['TOKEN']
def parse_args() -> argparse.Namespace:
parser = argparse.ArgumentParser()
parser.add_argument('--device', type=str, default='cpu')
parser.add_argument('--theme', type=str)
parser.add_argument('--share', action='store_true')
parser.add_argument('--port', type=int)
parser.add_argument('--disable-queue',
dest='enable_queue',
action='store_false')
return parser.parse_args()
class App:
def __init__(self, device: torch.device):
self.device = device
self.model = self.load_model('stylegan_human_v2_1024.pkl')
def load_model(self, file_name: str) -> nn.Module:
path = hf_hub_download('hysts/StyleGAN-Human',
f'models/{file_name}',
use_auth_token=TOKEN)
with open(path, 'rb') as f:
model = pickle.load(f)['G_ema']
model.eval()
model.to(self.device)
with torch.inference_mode():
z = torch.zeros((1, model.z_dim)).to(self.device)
label = torch.zeros([1, model.c_dim], device=self.device)
model(z, label, force_fp32=True)
return model
def generate_z(self, z_dim: int, seed: int) -> torch.Tensor:
return torch.from_numpy(np.random.RandomState(seed).randn(
1, z_dim)).to(self.device).float()
@torch.inference_mode()
def generate_single_image(self, seed: int,
truncation_psi: float) -> np.ndarray:
seed = int(np.clip(seed, 0, np.iinfo(np.uint32).max))
z = self.generate_z(self.model.z_dim, seed)
label = torch.zeros([1, self.model.c_dim], device=self.device)
out = self.model(z,
label,
truncation_psi=truncation_psi,
force_fp32=True)
out = (out.permute(0, 2, 3, 1) * 127.5 + 128).clamp(0, 255).to(
torch.uint8)
return out[0].cpu().numpy()
@torch.inference_mode()
def generate_interpolated_images(
self, seed0: int, psi0: float, seed1: int, psi1: float,
num_intermediate: int) -> list[np.ndarray]:
seed0 = int(np.clip(seed0, 0, np.iinfo(np.uint32).max))
seed1 = int(np.clip(seed1, 0, np.iinfo(np.uint32).max))
z0 = self.generate_z(self.model.z_dim, seed0)
z1 = self.generate_z(self.model.z_dim, seed1)
vec = z1 - z0
dvec = vec / (num_intermediate + 1)
zs = [z0 + dvec * i for i in range(num_intermediate + 2)]
dpsi = (psi1 - psi0) / (num_intermediate + 1)
psis = [psi0 + dpsi * i for i in range(num_intermediate + 2)]
label = torch.zeros([1, self.model.c_dim], device=self.device)
res = []
for z, psi in zip(zs, psis):
out = self.model(z, label, truncation_psi=psi, force_fp32=True)
out = (out.permute(0, 2, 3, 1) * 127.5 + 128).clamp(0, 255).to(
torch.uint8)
out = out[0].cpu().numpy()
res.append(out)
return res
def main():
args = parse_args()
app = App(device=torch.device(args.device))
with gr.Blocks(theme=args.theme) as demo:
gr.Markdown('''<center><h1>StyleGAN-Human</h1></center>
This is a Blocks version of [this app](https://huggingface.co/spaces/hysts/StyleGAN-Human) and [this app](https://huggingface.co/spaces/hysts/StyleGAN-Human-Interpolation).
''')
with gr.Row():
with gr.Column():
with gr.Row():
seed1 = gr.Number(value=6876, label='Seed 1')
psi1 = gr.Slider(0,
2,
value=0.7,
step=0.05,
label='Truncation psi 1')
with gr.Row():
generate_button1 = gr.Button('Generate')
with gr.Row():
generated_image1 = gr.Image(type='numpy',
label='Generated Image 1')
with gr.Column():
with gr.Row():
seed2 = gr.Number(value=6886, label='Seed 2')
psi2 = gr.Slider(0,
2,
value=0.7,
step=0.05,
label='Truncation psi 2')
with gr.Row():
generate_button2 = gr.Button('Generate')
with gr.Row():
generated_image2 = gr.Image(type='numpy',
label='Generated Image 2')
with gr.Row():
with gr.Column():
with gr.Row():
num_frames = gr.Slider(
0,
41,
value=7,
step=1,
label='Number of Intermediate Frames')
with gr.Row():
interpolate_button = gr.Button('Interpolate')
with gr.Row():
interpolated_images = gr.Gallery(label='Output Images')
gr.Markdown(
'<center><img src="https://visitor-badge.glitch.me/badge?page_id=gradio-blocks.stylegan-human" alt="visitor badge"/></center>'
)
generate_button1.click(app.generate_single_image,
inputs=[seed1, psi1],
outputs=generated_image1)
generate_button2.click(app.generate_single_image,
inputs=[seed2, psi2],
outputs=generated_image2)
interpolate_button.click(app.generate_interpolated_images,
inputs=[seed1, psi1, seed2, psi2, num_frames],
outputs=interpolated_images)
demo.launch(
enable_queue=args.enable_queue,
server_port=args.port,
share=args.share,
)
if __name__ == '__main__':
main()