George Krupenchenkov commited on
Commit
03f037b
·
1 Parent(s): 99c7d27

add hw6 markups

Browse files
Files changed (1) hide show
  1. app.py +1 -35
app.py CHANGED
@@ -8,7 +8,7 @@ import torch
8
  # import spaces #[uncomment to use ZeroGPU]
9
  from diffusers import (ControlNetModel, StableDiffusionControlNetPipeline,
10
  StableDiffusionPipeline)
11
- from peft import LoraConfig, PeftModel
12
  from PIL import Image
13
 
14
  device = "cuda" if torch.cuda.is_available() else "cpu"
@@ -53,40 +53,6 @@ MODEL_NAME = "CompVis/stable-diffusion-v1-4"
53
  CKPT_DIR = "sd-14-lora-1000"
54
 
55
 
56
- def get_lora_sd_pipeline(
57
- ckpt_dir=CKPT_DIR,
58
- base_model_name_or_path=None,
59
- dtype=torch.float16,
60
- device="cuda",
61
- adapter_name="default",
62
- ):
63
- unet_sub_dir = os.path.join(ckpt_dir, "unet")
64
- text_encoder_sub_dir = os.path.join(ckpt_dir, "text_encoder")
65
- if os.path.exists(text_encoder_sub_dir) and base_model_name_or_path is None:
66
- config = LoraConfig.from_pretrained(text_encoder_sub_dir)
67
- base_model_name_or_path = config.base_model_name_or_path
68
-
69
- if base_model_name_or_path is None:
70
- raise ValueError("Please specify the base model name or path")
71
-
72
- pipe = StableDiffusionPipeline.from_pretrained(
73
- base_model_name_or_path, torch_dtype=dtype
74
- ).to(device)
75
- pipe.unet = PeftModel.from_pretrained(
76
- pipe.unet, unet_sub_dir, adapter_name=adapter_name
77
- )
78
-
79
- if os.path.exists(text_encoder_sub_dir):
80
- pipe.text_encoder = PeftModel.from_pretrained(
81
- pipe.text_encoder, text_encoder_sub_dir, adapter_name=adapter_name
82
- )
83
-
84
- if dtype in (torch.float16, torch.bfloat16):
85
- pipe.unet.half()
86
- pipe.text_encoder.half()
87
-
88
- return pipe
89
-
90
 
91
  # @spaces.GPU #[uncomment to use ZeroGPU]
92
  def infer(
 
8
  # import spaces #[uncomment to use ZeroGPU]
9
  from diffusers import (ControlNetModel, StableDiffusionControlNetPipeline,
10
  StableDiffusionPipeline)
11
+ from peft import PeftModel
12
  from PIL import Image
13
 
14
  device = "cuda" if torch.cuda.is_available() else "cpu"
 
53
  CKPT_DIR = "sd-14-lora-1000"
54
 
55
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
56
 
57
  # @spaces.GPU #[uncomment to use ZeroGPU]
58
  def infer(