wjs0725 commited on
Commit
a7b8414
1 Parent(s): f2b6361

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -6
app.py CHANGED
@@ -126,8 +126,7 @@ class FluxEditor:
126
  os.mkdir(self.feature_path)
127
 
128
  with torch.no_grad():
129
- self.t5, self.clip = self.t5.cuda(), self.clip.cuda()
130
- inp = prepare(self.t5.cuda(), self.clip, init_image, prompt=opts.source_prompt)
131
  inp_target = prepare(self.t5, self.clip, init_image, prompt=opts.target_prompt)
132
  timesteps = get_schedule(opts.num_steps, inp["img"].shape[1], shift=(self.name != "flux-schnell"))
133
 
@@ -139,14 +138,14 @@ class FluxEditor:
139
 
140
  # inversion initial noise
141
  with torch.no_grad():
142
- z, info = denoise(self.model.cuda(), **inp, timesteps=timesteps, guidance=1, inverse=True, info=info)
143
 
144
  inp_target["img"] = z
145
 
146
  timesteps = get_schedule(opts.num_steps, inp_target["img"].shape[1], shift=(self.name != "flux-schnell"))
147
 
148
  # denoise initial noise
149
- x, _ = denoise(self.model.cuda(), **inp_target, timesteps=timesteps, guidance=guidance, inverse=False, info=info)
150
 
151
  # offload model, load autoencoder to gpu
152
  if self.offload:
@@ -198,7 +197,7 @@ class FluxEditor:
198
 
199
 
200
 
201
- def create_demo(model_name: str, device: str = "cuda" if torch.cuda.is_available() else "cpu", offload: bool = False):
202
  editor = FluxEditor(args)
203
  is_schnell = model_name == "flux-schnell"
204
 
@@ -238,7 +237,7 @@ if __name__ == "__main__":
238
  import argparse
239
  parser = argparse.ArgumentParser(description="Flux")
240
  parser.add_argument("--name", type=str, default="flux-dev", choices=list(configs.keys()), help="Model name")
241
- parser.add_argument("--device", type=str, default="cuda" if torch.cuda.is_available() else "cpu", help="Device to use")
242
  parser.add_argument("--offload", action="store_true", help="Offload model to CPU when not in use")
243
  parser.add_argument("--share", action="store_true", help="Create a public link to your demo")
244
 
 
126
  os.mkdir(self.feature_path)
127
 
128
  with torch.no_grad():
129
+ inp = prepare(self.t5, self.clip, init_image, prompt=opts.source_prompt)
 
130
  inp_target = prepare(self.t5, self.clip, init_image, prompt=opts.target_prompt)
131
  timesteps = get_schedule(opts.num_steps, inp["img"].shape[1], shift=(self.name != "flux-schnell"))
132
 
 
138
 
139
  # inversion initial noise
140
  with torch.no_grad():
141
+ z, info = denoise(self.model, **inp, timesteps=timesteps, guidance=1, inverse=True, info=info)
142
 
143
  inp_target["img"] = z
144
 
145
  timesteps = get_schedule(opts.num_steps, inp_target["img"].shape[1], shift=(self.name != "flux-schnell"))
146
 
147
  # denoise initial noise
148
+ x, _ = denoise(self.model, **inp_target, timesteps=timesteps, guidance=guidance, inverse=False, info=info)
149
 
150
  # offload model, load autoencoder to gpu
151
  if self.offload:
 
197
 
198
 
199
 
200
+ def create_demo(model_name: str, device: str = "cuda:0" if torch.cuda.is_available() else "cpu", offload: bool = False):
201
  editor = FluxEditor(args)
202
  is_schnell = model_name == "flux-schnell"
203
 
 
237
  import argparse
238
  parser = argparse.ArgumentParser(description="Flux")
239
  parser.add_argument("--name", type=str, default="flux-dev", choices=list(configs.keys()), help="Model name")
240
+ parser.add_argument("--device", type=str, default="cuda:0" if torch.cuda.is_available() else "cpu", help="Device to use")
241
  parser.add_argument("--offload", action="store_true", help="Offload model to CPU when not in use")
242
  parser.add_argument("--share", action="store_true", help="Create a public link to your demo")
243