Send model to GPU

#3
by multimodalart HF staff - opened
Files changed (1) hide show
  1. app.py +5 -1
app.py CHANGED
@@ -5,6 +5,7 @@ from diffusers import StableDiffusionPipeline
5
 
6
  model_id = "hakurei/waifu-diffusion"
7
  pipe = StableDiffusionPipeline.from_pretrained(model_id, torch_dtype=torch.float16, revision='fp16')
 
8
  num_samples = 2
9
 
10
  def infer(prompt):
@@ -128,6 +129,9 @@ css = """
128
  #prompt-container{
129
  gap: 0;
130
  }
 
 
 
131
  """
132
  block = gr.Blocks(css=css)
133
  with block as demo:
@@ -169,7 +173,7 @@ with block as demo:
169
  rounded=(False, True, True, False),
170
  )
171
 
172
- gallery = gr.Gallery(label="Generated images", show_label=False).style(
173
  grid=[2], height="auto"
174
  )
175
  text.submit(infer, inputs=[text], outputs=gallery)
 
5
 
6
  model_id = "hakurei/waifu-diffusion"
7
  pipe = StableDiffusionPipeline.from_pretrained(model_id, torch_dtype=torch.float16, revision='fp16')
8
+ pipe = pipe.to("cuda")
9
  num_samples = 2
10
 
11
  def infer(prompt):
 
129
  #prompt-container{
130
  gap: 0;
131
  }
132
+ #generated_id{
133
+ min-height: 700px
134
+ }
135
  """
136
  block = gr.Blocks(css=css)
137
  with block as demo:
 
173
  rounded=(False, True, True, False),
174
  )
175
 
176
+ gallery = gr.Gallery(label="Generated images", show_label=False, elem_id="generated_id").style(
177
  grid=[2], height="auto"
178
  )
179
  text.submit(infer, inputs=[text], outputs=gallery)