bluestarburst commited on
Commit
96eabf2
1 Parent(s): 4642a69

Upload folder using huggingface_hub

Browse files
animatediff/models/__pycache__/attention.cpython-310.pyc CHANGED
Binary files a/animatediff/models/__pycache__/attention.cpython-310.pyc and b/animatediff/models/__pycache__/attention.cpython-310.pyc differ
 
animatediff/models/__pycache__/motion_module.cpython-310.pyc CHANGED
Binary files a/animatediff/models/__pycache__/motion_module.cpython-310.pyc and b/animatediff/models/__pycache__/motion_module.cpython-310.pyc differ
 
animatediff/models/__pycache__/unet.cpython-310.pyc CHANGED
Binary files a/animatediff/models/__pycache__/unet.cpython-310.pyc and b/animatediff/models/__pycache__/unet.cpython-310.pyc differ
 
animatediff/utils/__pycache__/convert_from_ckpt.cpython-310.pyc CHANGED
Binary files a/animatediff/utils/__pycache__/convert_from_ckpt.cpython-310.pyc and b/animatediff/utils/__pycache__/convert_from_ckpt.cpython-310.pyc differ
 
animatediff/utils/convert_from_ckpt.py CHANGED
@@ -632,7 +632,8 @@ def convert_ldm_vae_checkpoint(checkpoint, config):
632
  oldKey = {"old": "key", "new": "to_k"}
633
  oldQuery = {"old": "query", "new": "to_q"}
634
  oldValue = {"old": "value", "new": "to_v"}
635
- assign_to_checkpoint(paths, new_checkpoint, vae_state_dict, additional_replacements=[meta_path, oldKey, oldQuery, oldValue], config=config)
 
636
  conv_attn_to_linear(new_checkpoint)
637
 
638
  for i in range(num_up_blocks):
@@ -668,7 +669,8 @@ def convert_ldm_vae_checkpoint(checkpoint, config):
668
  oldKey = {"old": "key", "new": "to_k"}
669
  oldQuery = {"old": "query", "new": "to_q"}
670
  oldValue = {"old": "value", "new": "to_v"}
671
- assign_to_checkpoint(paths, new_checkpoint, vae_state_dict, additional_replacements=[meta_path, oldKey, oldQuery, oldValue], config=config)
 
672
  conv_attn_to_linear(new_checkpoint)
673
  return new_checkpoint
674
 
 
632
  oldKey = {"old": "key", "new": "to_k"}
633
  oldQuery = {"old": "query", "new": "to_q"}
634
  oldValue = {"old": "value", "new": "to_v"}
635
+ oldOut = {"old": "proj_attn", "new": "to_out"}
636
+ assign_to_checkpoint(paths, new_checkpoint, vae_state_dict, additional_replacements=[meta_path, oldKey, oldQuery, oldValue, oldOut], config=config)
637
  conv_attn_to_linear(new_checkpoint)
638
 
639
  for i in range(num_up_blocks):
 
669
  oldKey = {"old": "key", "new": "to_k"}
670
  oldQuery = {"old": "query", "new": "to_q"}
671
  oldValue = {"old": "value", "new": "to_v"}
672
+ oldOut = {"old": "proj_attn", "new": "to_out"}
673
+ assign_to_checkpoint(paths, new_checkpoint, vae_state_dict, additional_replacements=[meta_path, oldKey, oldQuery, oldValue, oldOut], config=config)
674
  conv_attn_to_linear(new_checkpoint)
675
  return new_checkpoint
676
 
handler.py CHANGED
@@ -66,7 +66,7 @@ class EndpointHandler():
66
  self.pipeline = AnimationPipeline(
67
  vae=vae, text_encoder=text_encoder, tokenizer=tokenizer, unet=unet,
68
  scheduler=DDIMScheduler(**OmegaConf.to_container(inference_config.noise_scheduler_kwargs.DDIMScheduler))
69
- ).to("cuda")
70
 
71
  # huggingface download motion module from bluestarburst/AnimateDiff-SceneFusion/models/Motion_Module/mm_sd_v15.ckpt
72
 
@@ -130,7 +130,7 @@ class EndpointHandler():
130
  self.pipeline = convert_lora(self.pipeline, state_dict)
131
  # self.pipeline = convert_lora(self.pipeline, state_dict, alpha=model_config.lora_alpha)
132
 
133
- self.pipeline.to("cuda")
134
 
135
  def __call__(self, data : Any):
136
  """
@@ -205,4 +205,4 @@ class EndpointHandler():
205
  # This function will be called during inference time.
206
 
207
 
208
- # new_handler = EndpointHandler()
 
66
  self.pipeline = AnimationPipeline(
67
  vae=vae, text_encoder=text_encoder, tokenizer=tokenizer, unet=unet,
68
  scheduler=DDIMScheduler(**OmegaConf.to_container(inference_config.noise_scheduler_kwargs.DDIMScheduler))
69
+ )
70
 
71
  # huggingface download motion module from bluestarburst/AnimateDiff-SceneFusion/models/Motion_Module/mm_sd_v15.ckpt
72
 
 
130
  self.pipeline = convert_lora(self.pipeline, state_dict)
131
  # self.pipeline = convert_lora(self.pipeline, state_dict, alpha=model_config.lora_alpha)
132
 
133
+ self.pipeline
134
 
135
  def __call__(self, data : Any):
136
  """
 
205
  # This function will be called during inference time.
206
 
207
 
208
+ new_handler = EndpointHandler()