File size: 1,867 Bytes
231edce
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
experiment:
  seed: 88
  save_dir: ../experiments/


data:
  annotations: ../data/train_seg_whole_192_kfold_with_pseudo.csv
  data_dir: ../data/
  input: filename
  target: label
  outer_fold: 0
  dataset:
    name: NumpyChunkSegmentDataset
    params:
      segmentation_format: numpy
      channels: grayscale
      flip: true
      transpose: true
      invert: false
      verbose: true
      num_images: 192
      z_lt: resample_resample
      z_gt: resample_resample
      one_hot_encode: true
      num_classes: 8
      add_foreground_channel: false


transform:
  resize:
    name: resize_ignore_3d
    params:
      imsize: [192, 192, 192]
  augment:
    null
  crop:
    null
  preprocess:
    name: Preprocessor
    params:
      image_range: [0, 255]
      input_range: [0, 1]
      mean: [0.5]
      sdev: [0.5]


task:
  name: SegmentationTask3D
  params:
    chunk_validation: true


model:
  name: NetSegment3D
  params:
    architecture: DeepLabV3Plus_3D
    encoder_name: x3d_l
    encoder_params:
      pretrained: true
      output_stride: 16
      z_strides: [2, 2, 2, 2, 2]
    decoder_params:
      upsampling: 4
    deep_supervision: true
    num_classes: 8
    in_channels: 1
    dropout: 0.2


loss:
  name: SupervisorLoss
  params:
    segmentation_loss: DiceBCELoss
    scale_factors: [0.25, 0.25]
    loss_weights: [1.0, 0.25, 0.25]
    loss_params:
      dice_loss_params:
        mode: multilabel
        exponent: 2
        smooth: 1.0
      bce_loss_params:
        smooth_factor: 0.01
        pos_weight: 1.0
      dice_loss_weight: 1.0
      bce_loss_weight: 0.2


optimizer:
  name: AdamW
  params:
    lr: 3.0e-4
    weight_decay: 5.0e-4


scheduler:
  name: CosineAnnealingLR
  params:
    final_lr: 0.0


train:
  batch_size: 4
  num_epochs: 10


evaluate:
  batch_size: 1
  metrics: [DSC]
  monitor: dsc_ignore_mean
  mode: max