Spaces:
Runtime error
Runtime error
File size: 2,071 Bytes
671b790 e7ec95e 671b790 e7ec95e e299aee 671b790 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 |
from huggingface_hub import from_pretrained_keras
from keras_cv import models
import gradio as gr
import tensorflow as tf
tf.keras.mixed_precision.set_global_policy("mixed_float16")
# load keras model
resolution = 512
dreambooth_model = models.StableDiffusion(
img_width=resolution, img_height=resolution, jit_compile=True,
)
loaded_diffusion_model = from_pretrained_keras("keras-dreambooth/dreambooth_diffusion_hokusai")
dreambooth_model._diffusion_model = loaded_diffusion_model
# generate images
def inference(prompt, negative_prompt, num_imgs_to_gen, num_steps, guidance_scale):
generated_images = dreambooth_model.text_to_image(
prompt,
negative_prompt=negative_prompt,
batch_size=num_imgs_to_gen,
num_steps=num_steps,
unconditional_guidance_scale=guidance_scale,
)
return generated_images
# pass function, input type for prompt, the output for multiple images
gr.Interface(
inference, [
gr.Textbox(label="Positive Prompt", value="a painting image in hks## style"),
gr.Textbox(label="Negative Prompt", value="bad anatomy, soft blurry"),
gr.Slider(label='Number of gen image', minimum=1, maximum=4, value=2, step=1),
gr.Slider(label="Inference Steps",value=100),
gr.Number(label='Guidance scale', value=7.5),
], [
gr.Gallery(show_label=False),
],
title="Keras Dreambooth - Hokusai artist π",
description = "This model has been fine-tuned to learn the concept of Hokusai artist. To use this demo, you should have {hks## style} in the input",
examples = [["a painting image of a fishing village under a cherry blossom forest at sunset in hks## style, ultra realistic, 4k, 8k", "bad anatomy, soft blurry", 4, 100, 15],
["a beautiful and highly detailed oil painting of a lost valley in the mountains in hks## style, running river, intricate details, 8k, sharp focus, hyper realism", "(bad anatomy), (blurry), grain", 4, 100, 15]],
cache_examples=True
).queue().launch(debug=True, share=True) |