File size: 1,173 Bytes
166850f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
global:
  name: train-iternet
  phase: train
  stage: train-super
  workdir: workdir
  seed: ~
 
dataset:
  train: {
    roots: ['output_pixelplanet_dataset/'],
    batch_size: 20
  }
  test: {
    roots: ['output_pixelplanet_dataset/'],
    batch_size: 20
  }
  data_aug: True
  multiscales: False
  num_workers: 8

training:
  epochs: 1000
  show_iters: 500
  eval_iters: 500
  # save_iters: 1

optimizer:
  type: Adam
  true_wd: False
  wd: 0.0
  bn_wd: False
  clip_grad: 20
  lr: 0.0001
  args: {
    betas: !!python/tuple [0.9, 0.999], # for default Adam 
  }
  scheduler: {
    periods: [6, 4],
    gamma: 0.1,
  }

model:
  name: 'modules.model_iternet.IterNet'
  iter_size: 3
  ensemble: ''
  use_vision: False
  vision: {
    checkpoint: workdir/train-iternet/best-train-iternet.pth,
    loss_weight: 1.,
    attention: 'position',
    backbone: 'transformer',
    backbone_ln: 3,
    iter_size: 3,
    backbone_alpha_d: 0.5,
  }
  # language: {
  #   checkpoint:  workdir/pretrain-language-model/pretrain-language-model.pth,
  #   num_layers: 4,
  #   loss_weight: 1.,
  #   detach: True,
  #   use_self_attn: False
  # }
  alignment: {
    loss_weight: 1.,
  }