{ "_class_name": "PriorTransformer", "_diffusers_version": "0.17.0.dev0", "act_fn": "gelu", "additional_embeddings": 0, "attention_head_dim": 64, "clip_embedding_dim": 768, "dropout": 0.0, "embedding_dim": 1024, "has_encoder_hidden_states_proj": false, "has_post_process": false, "has_prd_embedding": false, "has_pre_norm": true, "num_attention_heads": 16, "num_embeddings": 1024, "num_layers": 24, "out_dim": 2048, "time_embed_dim": 4096 }