pengdaqian commited on
Commit
3ec570b
1 Parent(s): 91920f4
Files changed (1) hide show
  1. app.py +7 -4
app.py CHANGED
@@ -55,10 +55,10 @@ def infer(prompt: str, negative: str, width: int, height: int, sampler: str, ste
55
  seed = int(seed)
56
 
57
  images = []
58
- device = "cpu"
59
  if torch.cuda.is_available():
60
- device = "cuda"
61
- generator = torch.Generator(device=device).manual_seed(seed)
 
62
  if sampler == "EulerDiscrete":
63
  pipe.scheduler = EulerDiscreteScheduler.from_config(pipe.scheduler.config)
64
  elif sampler == "EulerAncestralDiscrete":
@@ -132,6 +132,9 @@ css = """
132
  margin: auto;
133
  padding-top: 1.5rem;
134
  }
 
 
 
135
  #gallery {
136
  min-height: 22rem;
137
  margin-bottom: 15px;
@@ -365,7 +368,7 @@ with block:
365
  with gr.Group():
366
  with gr.Box():
367
  with gr.Row(elem_id="prompt-container").style(mobile_collapse=False, equal_height=True):
368
- with gr.Column():
369
  text = gr.Textbox(
370
  label="Enter your prompt",
371
  show_label=False,
 
55
  seed = int(seed)
56
 
57
  images = []
 
58
  if torch.cuda.is_available():
59
+ generator = torch.Generator(device="cuda").manual_seed(seed)
60
+ else:
61
+ generator = torch.manual_seed(seed)
62
  if sampler == "EulerDiscrete":
63
  pipe.scheduler = EulerDiscreteScheduler.from_config(pipe.scheduler.config)
64
  elif sampler == "EulerAncestralDiscrete":
 
132
  margin: auto;
133
  padding-top: 1.5rem;
134
  }
135
+ #prompt-column {
136
+ min-height: 450px
137
+ }
138
  #gallery {
139
  min-height: 22rem;
140
  margin-bottom: 15px;
 
368
  with gr.Group():
369
  with gr.Box():
370
  with gr.Row(elem_id="prompt-container").style(mobile_collapse=False, equal_height=True):
371
+ with gr.Column(elem_id="prompt-column"):
372
  text = gr.Textbox(
373
  label="Enter your prompt",
374
  show_label=False,