Spaces:
Runtime error
Runtime error
from diffusers import LatentDiffusionUncondPipeline | |
import torch | |
import PIL.Image | |
import gradio as gr | |
pipeline = LatentDiffusionUncondPipeline.from_pretrained("CompVis/latent-diffusion-celeba-256") | |
def predict(seed): | |
torch.manual_seed(seed) | |
image = pipeline(generator=generator, num_inference_steps=1)["sample"] | |
image_processed = image.cpu().permute(0, 2, 3, 1) | |
image_processed = (image_processed + 1.0) * 127.5 | |
image_processed = image_processed.clamp(0, 255).numpy().astype(np.uint8) | |
return PIL.Image.fromarray(image_processed[0]) | |
gr.Interface( | |
predict, | |
inputs=[ | |
gr.inputs.Slider(0, 1000, label='Seed', default=42), | |
], | |
outputs="image", | |
).launch() |