bluestarburst commited on
Commit
4642a69
1 Parent(s): 48571f9

Upload folder using huggingface_hub

Browse files
animatediff/utils/convert_from_ckpt.py CHANGED
@@ -160,6 +160,10 @@ def assign_to_checkpoint(
160
  """
161
  assert isinstance(paths, list), "Paths should be a list of dicts containing 'old' and 'new' keys."
162
 
 
 
 
 
163
  # Splits the attention layers into three variables.
164
  if attention_paths_to_split is not None:
165
  for path, path_map in attention_paths_to_split.items():
@@ -625,7 +629,10 @@ def convert_ldm_vae_checkpoint(checkpoint, config):
625
  mid_attentions = [key for key in vae_state_dict if "encoder.mid.attn" in key]
626
  paths = renew_vae_attention_paths(mid_attentions)
627
  meta_path = {"old": "mid.attn_1", "new": "mid_block.attentions.0"}
628
- assign_to_checkpoint(paths, new_checkpoint, vae_state_dict, additional_replacements=[meta_path], config=config)
 
 
 
629
  conv_attn_to_linear(new_checkpoint)
630
 
631
  for i in range(num_up_blocks):
@@ -658,7 +665,10 @@ def convert_ldm_vae_checkpoint(checkpoint, config):
658
  mid_attentions = [key for key in vae_state_dict if "decoder.mid.attn" in key]
659
  paths = renew_vae_attention_paths(mid_attentions)
660
  meta_path = {"old": "mid.attn_1", "new": "mid_block.attentions.0"}
661
- assign_to_checkpoint(paths, new_checkpoint, vae_state_dict, additional_replacements=[meta_path], config=config)
 
 
 
662
  conv_attn_to_linear(new_checkpoint)
663
  return new_checkpoint
664
 
 
160
  """
161
  assert isinstance(paths, list), "Paths should be a list of dicts containing 'old' and 'new' keys."
162
 
163
+
164
+
165
+
166
+
167
  # Splits the attention layers into three variables.
168
  if attention_paths_to_split is not None:
169
  for path, path_map in attention_paths_to_split.items():
 
629
  mid_attentions = [key for key in vae_state_dict if "encoder.mid.attn" in key]
630
  paths = renew_vae_attention_paths(mid_attentions)
631
  meta_path = {"old": "mid.attn_1", "new": "mid_block.attentions.0"}
632
+ oldKey = {"old": "key", "new": "to_k"}
633
+ oldQuery = {"old": "query", "new": "to_q"}
634
+ oldValue = {"old": "value", "new": "to_v"}
635
+ assign_to_checkpoint(paths, new_checkpoint, vae_state_dict, additional_replacements=[meta_path, oldKey, oldQuery, oldValue], config=config)
636
  conv_attn_to_linear(new_checkpoint)
637
 
638
  for i in range(num_up_blocks):
 
665
  mid_attentions = [key for key in vae_state_dict if "decoder.mid.attn" in key]
666
  paths = renew_vae_attention_paths(mid_attentions)
667
  meta_path = {"old": "mid.attn_1", "new": "mid_block.attentions.0"}
668
+ oldKey = {"old": "key", "new": "to_k"}
669
+ oldQuery = {"old": "query", "new": "to_q"}
670
+ oldValue = {"old": "value", "new": "to_v"}
671
+ assign_to_checkpoint(paths, new_checkpoint, vae_state_dict, additional_replacements=[meta_path, oldKey, oldQuery, oldValue], config=config)
672
  conv_attn_to_linear(new_checkpoint)
673
  return new_checkpoint
674
 
data/data.ipynb CHANGED
@@ -2436,6 +2436,35 @@
2436
  "source": [
2437
  "!ffmpeg -i output/0.mp4"
2438
  ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2439
  }
2440
  ],
2441
  "metadata": {
 
2436
  "source": [
2437
  "!ffmpeg -i output/0.mp4"
2438
  ]
2439
+ },
2440
+ {
2441
+ "cell_type": "code",
2442
+ "execution_count": null,
2443
+ "metadata": {},
2444
+ "outputs": [],
2445
+ "source": [
2446
+ "config_yaml = f\"\"\"\n",
2447
+ "City:\n",
2448
+ " base: \"\"\n",
2449
+ " path: \"\"\n",
2450
+ " motion_module:\n",
2451
+ " - \"/mm.pth\"\n",
2452
+ "\n",
2453
+ "\n",
2454
+ " seed: [1]\n",
2455
+ " steps: 50\n",
2456
+ " guidance_scale: 16\n",
2457
+ "\n",
2458
+ " prompt:\n",
2459
+ " - f\"{test_prompt}, masterpiece, best quality, empty New York City street, tall buildings, no people, high definition\"\n",
2460
+ "\n",
2461
+ " n_prompt:\n",
2462
+ " - \"\"\n",
2463
+ "\"\"\"\n",
2464
+ "\n",
2465
+ "with open(f'configs/prompts/.yaml', 'w') as f:\n",
2466
+ " f.write(config_yaml)"
2467
+ ]
2468
  }
2469
  ],
2470
  "metadata": {