nielsr HF staff commited on
Commit
93aa78e
1 Parent(s): cb2e660

Use ImageProcessor

Browse files
Files changed (1) hide show
  1. README.md +3 -3
README.md CHANGED
@@ -33,13 +33,13 @@ import torch
33
  import numpy as np
34
  from PIL import Image
35
 
36
- from transformers import DPTFeatureExtractor, DPTForDepthEstimation
37
  from diffusers import ControlNetModel, StableDiffusionXLControlNetPipeline, AutoencoderKL
38
  from diffusers.utils import load_image
39
 
40
 
41
  depth_estimator = DPTForDepthEstimation.from_pretrained("Intel/dpt-hybrid-midas").to("cuda")
42
- feature_extractor = DPTFeatureExtractor.from_pretrained("Intel/dpt-hybrid-midas")
43
  controlnet = ControlNetModel.from_pretrained(
44
  "diffusers/controlnet-depth-sdxl-1.0",
45
  variant="fp16",
@@ -58,7 +58,7 @@ pipe = StableDiffusionXLControlNetPipeline.from_pretrained(
58
  pipe.enable_model_cpu_offload()
59
 
60
  def get_depth_map(image):
61
- image = feature_extractor(images=image, return_tensors="pt").pixel_values.to("cuda")
62
  with torch.no_grad(), torch.autocast("cuda"):
63
  depth_map = depth_estimator(image).predicted_depth
64
 
 
33
  import numpy as np
34
  from PIL import Image
35
 
36
+ from transformers import DPTImageProcessor, DPTForDepthEstimation
37
  from diffusers import ControlNetModel, StableDiffusionXLControlNetPipeline, AutoencoderKL
38
  from diffusers.utils import load_image
39
 
40
 
41
  depth_estimator = DPTForDepthEstimation.from_pretrained("Intel/dpt-hybrid-midas").to("cuda")
42
+ processor = DPTImageProcessor.from_pretrained("Intel/dpt-hybrid-midas")
43
  controlnet = ControlNetModel.from_pretrained(
44
  "diffusers/controlnet-depth-sdxl-1.0",
45
  variant="fp16",
 
58
  pipe.enable_model_cpu_offload()
59
 
60
  def get_depth_map(image):
61
+ image = processor(images=image, return_tensors="pt").pixel_values.to("cuda")
62
  with torch.no_grad(), torch.autocast("cuda"):
63
  depth_map = depth_estimator(image).predicted_depth
64