clementchadebec commited on
Commit
d0b932a
1 Parent(s): cfed92e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +23 -38
app.py CHANGED
@@ -1,20 +1,36 @@
1
  import gradio as gr
2
  import numpy as np
3
  import random
4
- from diffusers import DiffusionPipeline
5
  import torch
6
 
7
  device = "cuda" if torch.cuda.is_available() else "cpu"
 
8
 
9
  if torch.cuda.is_available():
10
  torch.cuda.max_memory_allocated(device=device)
11
- pipe = DiffusionPipeline.from_pretrained("stabilityai/sdxl-turbo", torch_dtype=torch.float16, variant="fp16", use_safetensors=True)
 
 
 
12
  pipe.enable_xformers_memory_efficient_attention()
13
  pipe = pipe.to(device)
14
  else:
15
- pipe = DiffusionPipeline.from_pretrained("stabilityai/sdxl-turbo", use_safetensors=True)
 
 
 
16
  pipe = pipe.to(device)
17
 
 
 
 
 
 
 
 
 
 
18
  MAX_SEED = np.iinfo(np.int32).max
19
  MAX_IMAGE_SIZE = 1024
20
 
@@ -38,9 +54,9 @@ def infer(prompt, negative_prompt, seed, randomize_seed, width, height, guidance
38
  return image
39
 
40
  examples = [
41
- "Astronaut in a jungle, cold color palette, muted colors, detailed, 8k",
42
- "An astronaut riding a green horse",
43
- "A delicious ceviche cheesecake slice",
44
  ]
45
 
46
  css="""
@@ -79,13 +95,6 @@ with gr.Blocks(css=css) as demo:
79
 
80
  with gr.Accordion("Advanced Settings", open=False):
81
 
82
- negative_prompt = gr.Text(
83
- label="Negative prompt",
84
- max_lines=1,
85
- placeholder="Enter a negative prompt",
86
- visible=False,
87
- )
88
-
89
  seed = gr.Slider(
90
  label="Seed",
91
  minimum=0,
@@ -96,34 +105,10 @@ with gr.Blocks(css=css) as demo:
96
 
97
  randomize_seed = gr.Checkbox(label="Randomize seed", value=True)
98
 
99
- with gr.Row():
100
-
101
- width = gr.Slider(
102
- label="Width",
103
- minimum=256,
104
- maximum=MAX_IMAGE_SIZE,
105
- step=32,
106
- value=512,
107
- )
108
-
109
- height = gr.Slider(
110
- label="Height",
111
- minimum=256,
112
- maximum=MAX_IMAGE_SIZE,
113
- step=32,
114
- value=512,
115
- )
116
 
117
  with gr.Row():
118
 
119
- guidance_scale = gr.Slider(
120
- label="Guidance scale",
121
- minimum=0.0,
122
- maximum=10.0,
123
- step=0.1,
124
- value=0.0,
125
- )
126
-
127
  num_inference_steps = gr.Slider(
128
  label="Number of inference steps",
129
  minimum=1,
 
1
  import gradio as gr
2
  import numpy as np
3
  import random
4
+ from diffusers import StableDiffusionPipeline, LCMScheduler
5
  import torch
6
 
7
  device = "cuda" if torch.cuda.is_available() else "cpu"
8
+ adapter_id = "jasperai/flash-sd"
9
 
10
  if torch.cuda.is_available():
11
  torch.cuda.max_memory_allocated(device=device)
12
+ pipe = StableDiffusionPipeline.from_pretrained(
13
+ "runwayml/stable-diffusion-v1-5",
14
+ use_safetensors=True,
15
+ )
16
  pipe.enable_xformers_memory_efficient_attention()
17
  pipe = pipe.to(device)
18
  else:
19
+ pipe = StableDiffusionPipeline.from_pretrained(
20
+ "runwayml/stable-diffusion-v1-5",
21
+ use_safetensors=True,
22
+ )
23
  pipe = pipe.to(device)
24
 
25
+ pipe.scheduler = LCMScheduler.from_pretrained(
26
+ "runwayml/stable-diffusion-v1-5",
27
+ subfolder="scheduler",
28
+ timestep_spacing="trailing",
29
+ )
30
+
31
+ pipe.load_lora_weights(adapter_id)
32
+ pipe.fuse_lora()
33
+
34
  MAX_SEED = np.iinfo(np.int32).max
35
  MAX_IMAGE_SIZE = 1024
36
 
 
54
  return image
55
 
56
  examples = [
57
+ "The image showcases a freshly baked bread, possibly focaccia, with rosemary sprigs and red pepper flakes sprinkled on top. It's sliced and placed on a wire cooling rack, with a bowl of mixed peppercorns beside it.",
58
+ "A raccoon reading a book in a lush forest.",
59
+ "A serene landscape showcases a winding road alongside a vast, turquoise lake, flanked by majestic snow-capped mountains under a partly cloudy sky.",
60
  ]
61
 
62
  css="""
 
95
 
96
  with gr.Accordion("Advanced Settings", open=False):
97
 
 
 
 
 
 
 
 
98
  seed = gr.Slider(
99
  label="Seed",
100
  minimum=0,
 
105
 
106
  randomize_seed = gr.Checkbox(label="Randomize seed", value=True)
107
 
108
+
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
109
 
110
  with gr.Row():
111
 
 
 
 
 
 
 
 
 
112
  num_inference_steps = gr.Slider(
113
  label="Number of inference steps",
114
  minimum=1,