Spaces:
Runtime error
Runtime error
File size: 2,369 Bytes
88298b8 41165d6 a5b686e d4caca0 88298b8 6a22c0b 88298b8 6a22c0b 88298b8 6a22c0b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 |
#!/usr/bin/env python
from __future__ import annotations
import functools
import os
import pickle
import sys
import gradio as gr
import numpy as np
import torch
import torch.nn as nn
from huggingface_hub import hf_hub_download
sys.path.insert(0, 'StyleGAN-Human')
TITLE = 'StyleGAN-Human'
DESCRIPTION = '''This is an unofficial demo for https://github.com/stylegan-human/StyleGAN-Human.
Related App: [StyleGAN-Human (Interpolation)](https://huggingface.co/spaces/hysts/StyleGAN-Human-Interpolation)
'''
HF_TOKEN = os.getenv('HF_TOKEN')
def generate_z(z_dim: int, seed: int, device: torch.device) -> torch.Tensor:
return torch.from_numpy(np.random.RandomState(seed).randn(
1, z_dim)).to(device).float()
@torch.inference_mode()
def generate_image(seed: int, truncation_psi: float, model: nn.Module,
device: torch.device) -> np.ndarray:
seed = int(np.clip(seed, 0, np.iinfo(np.uint32).max))
z = generate_z(model.z_dim, seed, device)
label = torch.zeros([1, model.c_dim], device=device)
out = model(z, label, truncation_psi=truncation_psi, force_fp32=True)
out = (out.permute(0, 2, 3, 1) * 127.5 + 128).clamp(0, 255).to(torch.uint8)
return out[0].cpu().numpy()
def load_model(file_name: str, device: torch.device) -> nn.Module:
path = hf_hub_download('hysts/StyleGAN-Human',
f'models/{file_name}',
use_auth_token=HF_TOKEN)
with open(path, 'rb') as f:
model = pickle.load(f)['G_ema']
model.eval()
model.to(device)
with torch.inference_mode():
z = torch.zeros((1, model.z_dim)).to(device)
label = torch.zeros([1, model.c_dim], device=device)
model(z, label, force_fp32=True)
return model
device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
model = load_model('stylegan_human_v2_1024.pkl', device)
func = functools.partial(generate_image, model=model, device=device)
gr.Interface(
fn=func,
inputs=[
gr.Slider(label='Seed', minimum=0, maximum=100000, step=1, value=0),
gr.Slider(label='Truncation psi',
minimum=0,
maximum=2,
step=0.05,
value=0.7),
],
outputs=gr.Image(label='Output', type='numpy'),
title=TITLE,
description=DESCRIPTION,
).launch(show_api=False)
|