hadenjax-dreams / app.py
Randolph's picture
Update app.py
bd37fdc
raw
history blame
No virus
2.27 kB
import torch
from torch import autocast
from diffusers import StableDiffusionPipeline, DDIMScheduler
from IPython.display import display
#@markdown Run Gradio UI for generating images.
import gradio as gr
model_path = "Randolph/hadenjax-dreams" # If you want to use previously trained model saved in gdrive, replace this with the full path of model in gdrive
scheduler = DDIMScheduler(beta_start=0.00085, beta_end=0.012, beta_schedule="scaled_linear", clip_sample=False, set_alpha_to_one=False)
pipe = StableDiffusionPipeline.from_pretrained(model_path, scheduler=scheduler, safety_checker=None, torch_dtype=torch.float16).to("cuda")
g_cuda = None
#@markdown Can set random seed here for reproducibility.
g_cuda = torch.Generator(device='cuda')
seed = 52362 #@param {type:"number"}
g_cuda.manual_seed(seed)
def inference(prompt, negative_prompt, num_samples, height=800, width=800, num_inference_steps=42, guidance_scale=10):
with torch.autocast("cuda"), torch.inference_mode():
return pipe(
prompt, height=int(height), width=int(width),
negative_prompt=negative_prompt,
num_images_per_prompt=int(num_samples),
num_inference_steps=int(num_inference_steps), guidance_scale=guidance_scale,
generator=g_cuda
).images
with gr.Blocks() as demo:
with gr.Row():
with gr.Column():
prompt = gr.Textbox(label="Prompt", value="page of graphic novel parts-unknown by hadenjax")
negative_prompt = gr.Textbox(label="Negative Prompt", value="")
run = gr.Button(value="Generate")
with gr.Row():
num_samples = gr.Number(label="Number of Samples", value=1)
guidance_scale = gr.Number(label="Guidance Scale", value=10)
with gr.Row():
height = gr.Number(label="Height", value=800)
width = gr.Number(label="Width", value=800)
num_inference_steps = gr.Slider(label="Steps", value=42)
with gr.Column():
gallery = gr.Gallery()
run.click(inference, inputs=[prompt, negative_prompt, num_samples, height, width, num_inference_steps, guidance_scale], outputs=gallery)
demo.launch(debug=True,share=True)