multimodalart HF staff commited on
Commit
089520b
1 Parent(s): 1589466

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -0
app.py CHANGED
@@ -23,6 +23,7 @@ unet = pipe.unet
23
  lora_path = hf_hub_download(repo_id="SvenN/sdxl-emoji", filename="lora.safetensors", repo_type="model")
24
  embeddings_path = hf_hub_download(repo_id="SvenN/sdxl-emoji", filename="embeddings.pti", repo_type="model")
25
 
 
26
  tensors = load_file(lora_path)
27
  unet_lora_attn_procs = {}
28
  name_rank_map = {}
@@ -60,11 +61,14 @@ for name, attn_processor in unet.attn_processors.items():
60
 
61
  unet.set_attn_processor(unet_lora_attn_procs)
62
  unet.load_state_dict(tensors, strict=False)
 
63
 
 
64
  handler = TokenEmbeddingsHandler(
65
  [pipe.text_encoder, pipe.text_encoder_2], [pipe.tokenizer, pipe.tokenizer_2]
66
  )
67
  handler.load_embeddings(embeddings_path)
 
68
 
69
  MAX_SEED = np.iinfo(np.int32).max
70
  MAX_IMAGE_SIZE = 1024
 
23
  lora_path = hf_hub_download(repo_id="SvenN/sdxl-emoji", filename="lora.safetensors", repo_type="model")
24
  embeddings_path = hf_hub_download(repo_id="SvenN/sdxl-emoji", filename="embeddings.pti", repo_type="model")
25
 
26
+ #### Loading LoRA keys into the UNet ####
27
  tensors = load_file(lora_path)
28
  unet_lora_attn_procs = {}
29
  name_rank_map = {}
 
61
 
62
  unet.set_attn_processor(unet_lora_attn_procs)
63
  unet.load_state_dict(tensors, strict=False)
64
+ #### End loading LoRA keys into the UNet
65
 
66
+ ### Start loading Embeddings into the text encoder ###
67
  handler = TokenEmbeddingsHandler(
68
  [pipe.text_encoder, pipe.text_encoder_2], [pipe.tokenizer, pipe.tokenizer_2]
69
  )
70
  handler.load_embeddings(embeddings_path)
71
+ ### End loading embeddings into the text encoder ###
72
 
73
  MAX_SEED = np.iinfo(np.int32).max
74
  MAX_IMAGE_SIZE = 1024