Spaces:
Running
on
T4
Running
on
T4
chore: sync with upstream
Browse files
app.py
CHANGED
@@ -113,6 +113,9 @@ lora_cache = {
|
|
113 |
base_name: LoRANetwork(text_encoder, unet)
|
114 |
}
|
115 |
|
|
|
|
|
|
|
116 |
def setup_model(name, lora_state=None, lora_scale=1.0):
|
117 |
global pipe
|
118 |
|
@@ -169,10 +172,6 @@ def error_str(error, title="Error"):
|
|
169 |
else ""
|
170 |
)
|
171 |
|
172 |
-
|
173 |
-
te_base_weight_length = text_encoder.get_input_embeddings().weight.data.shape[0]
|
174 |
-
original_prepare_for_tokenization = tokenizer.prepare_for_tokenization
|
175 |
-
|
176 |
def make_token_names(embs):
|
177 |
all_tokens = []
|
178 |
for name, vec in embs.items():
|
|
|
113 |
base_name: LoRANetwork(text_encoder, unet)
|
114 |
}
|
115 |
|
116 |
+
te_base_weight_length = text_encoder.get_input_embeddings().weight.data.shape[0]
|
117 |
+
original_prepare_for_tokenization = tokenizer.prepare_for_tokenization
|
118 |
+
|
119 |
def setup_model(name, lora_state=None, lora_scale=1.0):
|
120 |
global pipe
|
121 |
|
|
|
172 |
else ""
|
173 |
)
|
174 |
|
|
|
|
|
|
|
|
|
175 |
def make_token_names(embs):
|
176 |
all_tokens = []
|
177 |
for name, vec in embs.items():
|