erbacher commited on
Commit
91a81f0
1 Parent(s): d35ace7

Upload config

Browse files
Files changed (1) hide show
  1. config.json +43 -0
config.json ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_commit_hash": null,
3
+ "attn_dim_head": 32,
4
+ "channels": 1,
5
+ "codebook_size": 512,
6
+ "fsq_levels": [
7
+ 7,
8
+ 5,
9
+ 5,
10
+ 5,
11
+ 5
12
+ ],
13
+ "image_size": 256,
14
+ "init_dim": 64,
15
+ "layers": [
16
+ "residual",
17
+ "residual",
18
+ "compress_space",
19
+ "residual",
20
+ "residual",
21
+ "compress_space",
22
+ "residual",
23
+ "residual",
24
+ "compress_space",
25
+ "residual",
26
+ "residual",
27
+ "compress_space",
28
+ "residual",
29
+ "residual"
30
+ ],
31
+ "model_type": "pdetokenizer",
32
+ "num_codebooks": 1,
33
+ "num_groups": 8,
34
+ "pad_mode": "circular",
35
+ "perceptual_loss_weight": 0,
36
+ "quantization_type": "vq",
37
+ "temporal_compression": false,
38
+ "transformers_version": null,
39
+ "use_batch_norm": false,
40
+ "use_gan": false,
41
+ "use_revin": false,
42
+ "use_style": false
43
+ }