Joseph Catrambone commited on
Commit
b5ecd5f
1 Parent(s): b7e9473

Use CPU devices if CUDA is not available.

Browse files
Files changed (1) hide show
  1. app.py +4 -3
app.py CHANGED
@@ -13,11 +13,12 @@ from cldm.ddim_hacked import DDIMSampler
13
  from mediapipe_face_common import generate_annotation
14
 
15
  # Download the SD 1.5 model from HF
 
16
  model_path = hf_hub_download(repo_id="CrucibleAI/ControlNetMediaPipeFace", filename="models/controlnet_sd21_laion_face_v2_full.ckpt", repo_type="model")
17
  config_path = hf_hub_download(repo_id="CrucibleAI/ControlNetMediaPipeFace", filename="models/cldm_v21.yaml", repo_type="model")
18
  model = create_model(config_path).cpu()
19
- model.load_state_dict(load_state_dict(model_path, location='cuda'))
20
- model = model.cuda()
21
  ddim_sampler = DDIMSampler(model) # ControlNet _only_ works with DDIM.
22
 
23
 
@@ -27,7 +28,7 @@ def process(input_image: Image.Image, prompt, a_prompt, n_prompt, max_faces: int
27
  visualization = Image.fromarray(empty) # Save to help debug.
28
 
29
  empty = numpy.moveaxis(empty, 2, 0) # h, w, c -> c, h, w
30
- control = torch.from_numpy(empty.copy()).float().cuda() / 255.0
31
  control = torch.stack([control for _ in range(num_samples)], dim=0)
32
  # control = einops.rearrange(control, 'b h w c -> b c h w').clone()
33
 
 
13
  from mediapipe_face_common import generate_annotation
14
 
15
  # Download the SD 1.5 model from HF
16
+ device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
17
  model_path = hf_hub_download(repo_id="CrucibleAI/ControlNetMediaPipeFace", filename="models/controlnet_sd21_laion_face_v2_full.ckpt", repo_type="model")
18
  config_path = hf_hub_download(repo_id="CrucibleAI/ControlNetMediaPipeFace", filename="models/cldm_v21.yaml", repo_type="model")
19
  model = create_model(config_path).cpu()
20
+ model.load_state_dict(load_state_dict(model_path, location=device))
21
+ model = model.to(device)
22
  ddim_sampler = DDIMSampler(model) # ControlNet _only_ works with DDIM.
23
 
24
 
 
28
  visualization = Image.fromarray(empty) # Save to help debug.
29
 
30
  empty = numpy.moveaxis(empty, 2, 0) # h, w, c -> c, h, w
31
+ control = torch.from_numpy(empty.copy()).float().to(device) / 255.0
32
  control = torch.stack([control for _ in range(num_samples)], dim=0)
33
  # control = einops.rearrange(control, 'b h w c -> b c h w').clone()
34