AbstractPhil commited on
Commit
e36bb34
·
verified ·
1 Parent(s): fa8608b

Checkpoint step 402996

Browse files
Files changed (1) hide show
  1. config.json +12 -1
config.json CHANGED
@@ -3,6 +3,7 @@
3
  "num_attention_heads": 4,
4
  "attention_head_dim": 128,
5
  "in_channels": 16,
 
6
  "joint_attention_dim": 768,
7
  "pooled_projection_dim": 768,
8
  "num_double_layers": 15,
@@ -13,5 +14,15 @@
13
  56,
14
  56
15
  ],
16
- "guidance_embeds": true
 
 
 
 
 
 
 
 
 
 
17
  }
 
3
  "num_attention_heads": 4,
4
  "attention_head_dim": 128,
5
  "in_channels": 16,
6
+ "patch_size": 1,
7
  "joint_attention_dim": 768,
8
  "pooled_projection_dim": 768,
9
  "num_double_layers": 15,
 
14
  56,
15
  56
16
  ],
17
+ "use_lune_expert": true,
18
+ "lune_expert_dim": 1280,
19
+ "lune_hidden_dim": 512,
20
+ "lune_dropout": 0.1,
21
+ "freeze_lune": false,
22
+ "use_sol_prior": true,
23
+ "sol_spatial_size": 8,
24
+ "sol_hidden_dim": 256,
25
+ "sol_geometric_weight": 0.7,
26
+ "freeze_sol": false,
27
+ "use_t5_vec": true
28
  }