mlgawd commited on
Commit
05a23f3
·
verified ·
1 Parent(s): d8efbf8

Update transformer/config.json

Browse files
Files changed (1) hide show
  1. transformer/config.json +1 -6
transformer/config.json CHANGED
@@ -2,12 +2,7 @@
2
  "_class_name": "FluxTransformer2DModel",
3
  "_diffusers_version": "0.30.0",
4
  "attention_head_dim": 128,
5
- "axes_dims_rope": [
6
- 16,
7
- 56,
8
- 56
9
- ],
10
- "guidance_embeds": false,
11
  "in_channels": 64,
12
  "joint_attention_dim": 4096,
13
  "num_attention_heads": 24,
 
2
  "_class_name": "FluxTransformer2DModel",
3
  "_diffusers_version": "0.30.0",
4
  "attention_head_dim": 128,
5
+ "guidance_embeds": true,
 
 
 
 
 
6
  "in_channels": 64,
7
  "joint_attention_dim": 4096,
8
  "num_attention_heads": 24,