SoraT2I / transformer /config.json
toilaluan's picture
Upload PixArtSigmaPipeline
f995cfa verified
raw
history blame contribute delete
948 Bytes
{
"_class_name": "PixArtTransformer2DModel",
"_diffusers_version": "0.29.1",
"_name_or_path": "/root/.cache/huggingface/hub/models--PixArt-alpha--PixArt-Sigma-XL-2-1024-MS/snapshots/e102b3591cc82e97071b8b4cb90d834d0c487207/transformer",
"activation_fn": "gelu-approximate",
"attention_bias": true,
"attention_head_dim": 72,
"attention_type": "default",
"caption_channels": 4096,
"cross_attention_dim": 1152,
"double_self_attention": false,
"dropout": 0.0,
"in_channels": 4,
"interpolation_scale": 2,
"norm_elementwise_affine": false,
"norm_eps": 1e-06,
"norm_num_groups": 32,
"norm_type": "ada_norm_single",
"num_attention_heads": 16,
"num_embeds_ada_norm": 1000,
"num_layers": 28,
"num_vector_embeds": null,
"only_cross_attention": false,
"out_channels": 8,
"patch_size": 2,
"sample_size": 128,
"upcast_attention": false,
"use_additional_conditions": false,
"use_linear_projection": false
}