xyfJASON commited on
Commit
541561b
1 Parent(s): 8f3ce12

Update config file

Browse files
ddpm_cifar10/config-2023-03-09-14-06-16.yaml CHANGED
@@ -1,51 +1,55 @@
 
1
  data:
2
- dataroot: /data/fengxin/xuyifeng/data/CIFAR-10/
 
 
 
3
  img_channels: 3
4
- img_size: 32
5
- name: CIFAR-10
6
  num_classes: 10
7
  dataloader:
8
- micro_batch: 0
9
  num_workers: 4
10
  pin_memory: true
11
  prefetch_factor: 2
12
- diffusion:
13
- beta_end: 0.02
14
- beta_schedule: linear
15
- beta_start: 0.0001
16
- objective: pred_eps
17
- total_steps: 1000
18
- var_type: fixed_large
19
  model:
20
- dim: 128
21
- dim_mults:
22
- - 1
23
- - 2
24
- - 2
25
- - 2
26
- dropout: 0.1
27
- ema_decay: 0.9999
28
- ema_gradual: true
29
- in_channels: 3
30
- n_heads: 1
31
- num_res_blocks: 2
32
- out_channels: 3
33
- type: unet
34
- use_attn:
35
- - false
36
- - true
37
- - false
38
- - false
39
- seed: 2022
 
 
 
 
 
 
 
40
  train:
 
41
  batch_size: 128
 
42
  clip_grad_norm: 1.0
43
- n_samples: 64
44
- n_steps: 800000
45
- optim:
46
- lr: 0.0002
47
- type: Adam
48
  print_freq: 400
49
- resume: null
50
- sample_freq: 5000
51
  save_freq: 10000
 
 
 
 
 
 
 
1
+ seed: 2022
2
  data:
3
+ target: datasets.cifar10.CIFAR10
4
+ params:
5
+ root: /data/fengxin/xuyifeng/data/CIFAR-10/
6
+ img_size: 32
7
  img_channels: 3
 
 
8
  num_classes: 10
9
  dataloader:
 
10
  num_workers: 4
11
  pin_memory: true
12
  prefetch_factor: 2
 
 
 
 
 
 
 
13
  model:
14
+ target: models.unet.UNet
15
+ params:
16
+ in_channels: 3
17
+ out_channels: 3
18
+ dim: 128
19
+ dim_mults:
20
+ - 1
21
+ - 2
22
+ - 2
23
+ - 2
24
+ use_attn:
25
+ - false
26
+ - true
27
+ - false
28
+ - false
29
+ num_res_blocks: 2
30
+ n_heads: 1
31
+ dropout: 0.1
32
+ diffusion:
33
+ target: diffusions.ddpm.DDPM
34
+ params:
35
+ total_steps: 1000
36
+ beta_schedule: linear
37
+ beta_start: 0.0001
38
+ beta_end: 0.02
39
+ objective: pred_eps
40
+ var_type: fixed_large
41
  train:
42
+ n_steps: 800000
43
  batch_size: 128
44
+ micro_batch: 0
45
  clip_grad_norm: 1.0
46
+ ema_decay: 0.9999
47
+ ema_gradual: true
 
 
 
48
  print_freq: 400
 
 
49
  save_freq: 10000
50
+ sample_freq: 5000
51
+ n_samples: 64
52
+ optim:
53
+ target: torch.optim.Adam
54
+ params:
55
+ lr: 0.0002