bluestarburst commited on
Commit
1a070e2
1 Parent(s): eea7935

Upload folder using huggingface_hub

Browse files
Files changed (1) hide show
  1. handler.py +5 -5
handler.py CHANGED
@@ -21,17 +21,17 @@ from animatediff.utils.util import load_weights
21
 
22
 
23
  class EndpointHandler():
24
- def __init__(self, model_path: str = "models/StableDiffusion/", inference_config_path: str = "configs/inference/inference-v3.yaml", motion_module: str = "models/Motion_Module/mm_sd_v15.ckpt"):
25
 
26
  # inference_config = OmegaConf.load(inference_config_path)
27
 
28
  inference_config = {'unet_additional_kwargs': {'unet_use_cross_frame_attention': False, 'unet_use_temporal_attention': False, 'use_motion_module': True, 'motion_module_resolutions': [1, 2, 4, 8], 'motion_module_mid_block': False, 'motion_module_decoder_only': False, 'motion_module_type': 'Vanilla', 'motion_module_kwargs': {'num_attention_heads': 8, 'num_transformer_block': 1, 'attention_block_types': ['Temporal_Self', 'Temporal_Self'], 'temporal_position_encoding': True, 'temporal_position_encoding_max_len': 24, 'temporal_attention_dim_div': 1}}, 'noise_scheduler_kwargs': {'DDIMScheduler': {'num_train_timesteps': 1000, 'beta_start': 0.00085, 'beta_end': 0.012, 'beta_schedule': 'linear', 'steps_offset': 1, 'clip_sample': False}, 'EulerAncestralDiscreteScheduler': {'num_train_timesteps': 1000, 'beta_start': 0.00085, 'beta_end': 0.012, 'beta_schedule': 'linear'}, 'KDPM2AncestralDiscreteScheduler': {'num_train_timesteps': 1000, 'beta_start': 0.00085, 'beta_end': 0.012, 'beta_schedule': 'linear'}}}
29
 
30
  ### >>> create validation pipeline >>> ###
31
- tokenizer = CLIPTokenizer.from_pretrained(model_path, subfolder="tokenizer")
32
- text_encoder = CLIPTextModel.from_pretrained(model_path, subfolder="text_encoder")
33
- vae = AutoencoderKL.from_pretrained(model_path, subfolder="vae")
34
- unet = UNet3DConditionModel.from_pretrained_2d(model_path, subfolder="unet", unet_additional_kwargs=OmegaConf.to_container(inference_config.unet_additional_kwargs))
35
 
36
  if is_xformers_available(): unet.enable_xformers_memory_efficient_attention()
37
  else: assert False
 
21
 
22
 
23
  class EndpointHandler():
24
+ def __init__(self, model_path: str = "bluestarburst/AnimateDiff-SceneFusion", inference_config_path: str = "configs/inference/inference-v3.yaml", motion_module: str = "models/Motion_Module/mm_sd_v15.ckpt"):
25
 
26
  # inference_config = OmegaConf.load(inference_config_path)
27
 
28
  inference_config = {'unet_additional_kwargs': {'unet_use_cross_frame_attention': False, 'unet_use_temporal_attention': False, 'use_motion_module': True, 'motion_module_resolutions': [1, 2, 4, 8], 'motion_module_mid_block': False, 'motion_module_decoder_only': False, 'motion_module_type': 'Vanilla', 'motion_module_kwargs': {'num_attention_heads': 8, 'num_transformer_block': 1, 'attention_block_types': ['Temporal_Self', 'Temporal_Self'], 'temporal_position_encoding': True, 'temporal_position_encoding_max_len': 24, 'temporal_attention_dim_div': 1}}, 'noise_scheduler_kwargs': {'DDIMScheduler': {'num_train_timesteps': 1000, 'beta_start': 0.00085, 'beta_end': 0.012, 'beta_schedule': 'linear', 'steps_offset': 1, 'clip_sample': False}, 'EulerAncestralDiscreteScheduler': {'num_train_timesteps': 1000, 'beta_start': 0.00085, 'beta_end': 0.012, 'beta_schedule': 'linear'}, 'KDPM2AncestralDiscreteScheduler': {'num_train_timesteps': 1000, 'beta_start': 0.00085, 'beta_end': 0.012, 'beta_schedule': 'linear'}}}
29
 
30
  ### >>> create validation pipeline >>> ###
31
+ tokenizer = CLIPTokenizer.from_pretrained(model_path, subfolder="models/StableDiffusion/tokenizer")
32
+ text_encoder = CLIPTextModel.from_pretrained(model_path, subfolder="models/StableDiffusion/text_encoder")
33
+ vae = AutoencoderKL.from_pretrained(model_path, subfolder="models/StableDiffusion/vae")
34
+ unet = UNet3DConditionModel.from_pretrained_2d(model_path, subfolder="models/StableDiffusion/unet", unet_additional_kwargs=OmegaConf.to_container(inference_config.unet_additional_kwargs))
35
 
36
  if is_xformers_available(): unet.enable_xformers_memory_efficient_attention()
37
  else: assert False