zhyever commited on
Commit
935a102
1 Parent(s): cfbd202

Upload 40 files

Browse files
Files changed (40) hide show
  1. DepthAnything_vitb.pt +3 -0
  2. DepthAnything_vitl.pt +3 -0
  3. DepthAnything_vits.pt +3 -0
  4. ZoeDepthv1.pt +3 -0
  5. depthanything_vitb_u4k/coarse_pretrain/20240315_095516.log +1024 -0
  6. depthanything_vitb_u4k/coarse_pretrain/checkpoint_24.pth +3 -0
  7. depthanything_vitb_u4k/coarse_pretrain/config.py +310 -0
  8. depthanything_vitb_u4k/fine_pretrain/20240315_153036.log +1028 -0
  9. depthanything_vitb_u4k/fine_pretrain/checkpoint_24.pth +3 -0
  10. depthanything_vitb_u4k/fine_pretrain/config.py +314 -0
  11. depthanything_vitb_u4k/patchfusion/20240315_193032.log +0 -0
  12. depthanything_vitb_u4k/patchfusion/checkpoint_16.pth +3 -0
  13. depthanything_vitb_u4k/patchfusion/config.py +341 -0
  14. depthanything_vitl_u4k/coarse_pretrain/20240315_102957.log +0 -0
  15. depthanything_vitl_u4k/coarse_pretrain/checkpoint_24.pth +3 -0
  16. depthanything_vitl_u4k/coarse_pretrain/config.py +310 -0
  17. depthanything_vitl_u4k/fine_pretrain/20240315_140837.log +0 -0
  18. depthanything_vitl_u4k/fine_pretrain/checkpoint_24.pth +3 -0
  19. depthanything_vitl_u4k/fine_pretrain/config.py +314 -0
  20. depthanything_vitl_u4k/patchfusion/20240315_175237.log +0 -0
  21. depthanything_vitl_u4k/patchfusion/checkpoint_16.pth +3 -0
  22. depthanything_vitl_u4k/patchfusion/config.py +347 -0
  23. depthanything_vits_u4k/coarse_pretrain/20240315_002030.log +1024 -0
  24. depthanything_vits_u4k/coarse_pretrain/checkpoint_24.pth +3 -0
  25. depthanything_vits_u4k/coarse_pretrain/config.py +310 -0
  26. depthanything_vits_u4k/fine_pretrain/20240315_035516.log +1028 -0
  27. depthanything_vits_u4k/fine_pretrain/checkpoint_24.pth +3 -0
  28. depthanything_vits_u4k/fine_pretrain/config.py +314 -0
  29. depthanything_vits_u4k/patchfusion/20240315_072915.log +0 -0
  30. depthanything_vits_u4k/patchfusion/checkpoint_16.pth +3 -0
  31. depthanything_vits_u4k/patchfusion/config.py +341 -0
  32. zoedepth_u4k/coarse_pretrain/20240313_154004.log +0 -0
  33. zoedepth_u4k/coarse_pretrain/checkpoint_24.pth +3 -0
  34. zoedepth_u4k/coarse_pretrain/config.py +307 -0
  35. zoedepth_u4k/fine_pretrain/20240313_205222.log +0 -0
  36. zoedepth_u4k/fine_pretrain/checkpoint_24.pth +3 -0
  37. zoedepth_u4k/fine_pretrain/config.py +307 -0
  38. zoedepth_u4k/patchfusion/20240314_171340.log +0 -0
  39. zoedepth_u4k/patchfusion/checkpoint_16.pth +3 -0
  40. zoedepth_u4k/patchfusion/config.py +305 -0
DepthAnything_vitb.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:851ed48c54d3bc91be762169662ae6df3b522d741a326d8d0cfad90deb0dd893
3
+ size 391269536
DepthAnything_vitl.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:09e3c568c99c6437a53ccdbb59b3d2ec7a918ef5f74a775dfca6a14ee3ab3f57
3
+ size 1343379696
DepthAnything_vits.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ecc1f60f6e66ed39c7a3bb6395f1e5fd649f95505833d82903ce79df4eb1a42a
3
+ size 100247904
ZoeDepthv1.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9b6f2a136ea7dfe1368638b8cdee5d01b687158d9e08e7c4ee0bc21613f94dc4
3
+ size 1443428517
depthanything_vitb_u4k/coarse_pretrain/20240315_095516.log ADDED
@@ -0,0 +1,1024 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2024/03/15 09:55:26 - patchstitcher - INFO -
2
+ ------------------------------------------------------------
3
+ System environment:
4
+ sys.platform: linux
5
+ Python: 3.8.18 | packaged by conda-forge | (default, Oct 10 2023, 15:44:36) [GCC 12.3.0]
6
+ CUDA available: True
7
+ numpy_random_seed: 621
8
+ GPU 0,1,2,3: NVIDIA A100-SXM4-80GB
9
+ CUDA_HOME: /sw/rl9g/cuda/11.8/rl9_binary
10
+ NVCC: Cuda compilation tools, release 11.8, V11.8.89
11
+ GCC: gcc (GCC) 11.3.1 20220421 (Red Hat 11.3.1-2)
12
+ PyTorch: 2.1.2
13
+ PyTorch compiling details: PyTorch built with:
14
+ - GCC 9.3
15
+ - C++ Version: 201703
16
+ - Intel(R) oneAPI Math Kernel Library Version 2022.1-Product Build 20220311 for Intel(R) 64 architecture applications
17
+ - Intel(R) MKL-DNN v3.1.1 (Git Hash 64f6bcbcbab628e96f33a62c3e975f8535a7bde4)
18
+ - OpenMP 201511 (a.k.a. OpenMP 4.5)
19
+ - LAPACK is enabled (usually provided by MKL)
20
+ - NNPACK is enabled
21
+ - CPU capability usage: AVX2
22
+ - CUDA Runtime 11.8
23
+ - NVCC architecture flags: -gencode;arch=compute_50,code=sm_50;-gencode;arch=compute_60,code=sm_60;-gencode;arch=compute_61,code=sm_61;-gencode;arch=compute_70,code=sm_70;-gencode;arch=compute_75,code=sm_75;-gencode;arch=compute_80,code=sm_80;-gencode;arch=compute_86,code=sm_86;-gencode;arch=compute_37,code=sm_37;-gencode;arch=compute_90,code=sm_90;-gencode;arch=compute_37,code=compute_37
24
+ - CuDNN 8.7
25
+ - Magma 2.6.1
26
+ - Build settings: BLAS_INFO=mkl, BUILD_TYPE=Release, CUDA_VERSION=11.8, CUDNN_VERSION=8.7.0, CXX_COMPILER=/opt/rh/devtoolset-9/root/usr/bin/c++, CXX_FLAGS= -D_GLIBCXX_USE_CXX11_ABI=0 -fabi-version=11 -fvisibility-inlines-hidden -DUSE_PTHREADPOOL -DNDEBUG -DUSE_KINETO -DLIBKINETO_NOROCTRACER -DUSE_FBGEMM -DUSE_QNNPACK -DUSE_PYTORCH_QNNPACK -DUSE_XNNPACK -DSYMBOLICATE_MOBILE_DEBUG_HANDLE -O2 -fPIC -Wall -Wextra -Werror=return-type -Werror=non-virtual-dtor -Werror=bool-operation -Wnarrowing -Wno-missing-field-initializers -Wno-type-limits -Wno-array-bounds -Wno-unknown-pragmas -Wno-unused-parameter -Wno-unused-function -Wno-unused-result -Wno-strict-overflow -Wno-strict-aliasing -Wno-stringop-overflow -Wno-psabi -Wno-error=pedantic -Wno-error=old-style-cast -Wno-invalid-partial-specialization -Wno-unused-private-field -Wno-aligned-allocation-unavailable -Wno-missing-braces -fdiagnostics-color=always -faligned-new -Wno-unused-but-set-variable -Wno-maybe-uninitialized -fno-math-errno -fno-trapping-math -Werror=format -Werror=cast-function-type -Wno-stringop-overflow, LAPACK_INFO=mkl, PERF_WITH_AVX=1, PERF_WITH_AVX2=1, PERF_WITH_AVX512=1, TORCH_DISABLE_GPU_ASSERTS=ON, TORCH_VERSION=2.1.2, USE_CUDA=ON, USE_CUDNN=ON, USE_EXCEPTION_PTR=1, USE_GFLAGS=OFF, USE_GLOG=OFF, USE_MKL=ON, USE_MKLDNN=ON, USE_MPI=OFF, USE_NCCL=ON, USE_NNPACK=ON, USE_OPENMP=ON, USE_ROCM=OFF,
27
+
28
+ TorchVision: 0.16.2
29
+ OpenCV: 4.8.1
30
+ MMEngine: 0.10.2
31
+
32
+ Runtime environment:
33
+ cudnn_benchmark: True
34
+ mp_cfg: {'mp_start_method': 'forkserver'}
35
+ dist_cfg: {'backend': 'nccl'}
36
+ seed: 621
37
+ Distributed launcher: pytorch
38
+ Distributed training: True
39
+ GPU number: 4
40
+ ------------------------------------------------------------
41
+
42
+ 2024/03/15 09:55:26 - patchstitcher - INFO - Config:
43
+ collect_input_args = [
44
+ 'image_lr',
45
+ 'crops_image_hr',
46
+ 'depth_gt',
47
+ 'crop_depths',
48
+ 'bboxs',
49
+ 'image_hr',
50
+ ]
51
+ convert_syncbn = True
52
+ debug = False
53
+ env_cfg = dict(
54
+ cudnn_benchmark=True,
55
+ dist_cfg=dict(backend='nccl'),
56
+ mp_cfg=dict(mp_start_method='forkserver'))
57
+ find_unused_parameters = True
58
+ general_dataloader = dict(
59
+ batch_size=1,
60
+ dataset=dict(
61
+ dataset_name='', gt_dir=None, rgb_image_dir='', type='ImageDataset'),
62
+ num_workers=2)
63
+ launcher = 'pytorch'
64
+ log_name = 'coarse_pretrain'
65
+ max_depth = 80
66
+ min_depth = 0.001
67
+ model = dict(
68
+ coarse_branch=dict(
69
+ attractor_alpha=1000,
70
+ attractor_gamma=2,
71
+ attractor_kind='mean',
72
+ attractor_type='inv',
73
+ aug=True,
74
+ bin_centers_type='softplus',
75
+ bin_embedding_dim=128,
76
+ clip_grad=0.1,
77
+ dataset='nyu',
78
+ depth_anything=True,
79
+ distributed=True,
80
+ do_resize=False,
81
+ force_keep_ar=True,
82
+ freeze_midas_bn=True,
83
+ gpu='NULL',
84
+ img_size=[
85
+ 392,
86
+ 518,
87
+ ],
88
+ inverse_midas=False,
89
+ log_images_every=0.1,
90
+ max_depth=80,
91
+ max_temp=50.0,
92
+ max_translation=100,
93
+ memory_efficient=True,
94
+ midas_model_type='vitb',
95
+ min_depth=0.001,
96
+ min_temp=0.0212,
97
+ model='zoedepth',
98
+ n_attractors=[
99
+ 16,
100
+ 8,
101
+ 4,
102
+ 1,
103
+ ],
104
+ n_bins=64,
105
+ name='ZoeDepth',
106
+ notes='',
107
+ output_distribution='logbinomial',
108
+ prefetch=False,
109
+ pretrained_resource='local::./work_dir/DepthAnything_vitb.pt',
110
+ print_losses=False,
111
+ project='ZoeDepth',
112
+ random_crop=False,
113
+ random_translate=False,
114
+ root='.',
115
+ save_dir='',
116
+ shared_dict='NULL',
117
+ tags='',
118
+ train_midas=True,
119
+ translate_prob=0.2,
120
+ type='DA-ZoeDepth',
121
+ uid='NULL',
122
+ use_amp=False,
123
+ use_pretrained_midas=True,
124
+ use_shared_dict=False,
125
+ validate_every=0.25,
126
+ version_name='v1',
127
+ workers=16),
128
+ fine_branch=dict(
129
+ attractor_alpha=1000,
130
+ attractor_gamma=2,
131
+ attractor_kind='mean',
132
+ attractor_type='inv',
133
+ aug=True,
134
+ bin_centers_type='softplus',
135
+ bin_embedding_dim=128,
136
+ clip_grad=0.1,
137
+ dataset='nyu',
138
+ depth_anything=True,
139
+ distributed=True,
140
+ do_resize=False,
141
+ force_keep_ar=True,
142
+ freeze_midas_bn=True,
143
+ gpu='NULL',
144
+ img_size=[
145
+ 392,
146
+ 518,
147
+ ],
148
+ inverse_midas=False,
149
+ log_images_every=0.1,
150
+ max_depth=80,
151
+ max_temp=50.0,
152
+ max_translation=100,
153
+ memory_efficient=True,
154
+ midas_model_type='vitb',
155
+ min_depth=0.001,
156
+ min_temp=0.0212,
157
+ model='zoedepth',
158
+ n_attractors=[
159
+ 16,
160
+ 8,
161
+ 4,
162
+ 1,
163
+ ],
164
+ n_bins=64,
165
+ name='ZoeDepth',
166
+ notes='',
167
+ output_distribution='logbinomial',
168
+ prefetch=False,
169
+ pretrained_resource='local::./work_dir/DepthAnything_vitb.pt',
170
+ print_losses=False,
171
+ project='ZoeDepth',
172
+ random_crop=False,
173
+ random_translate=False,
174
+ root='.',
175
+ save_dir='',
176
+ shared_dict='NULL',
177
+ tags='',
178
+ train_midas=True,
179
+ translate_prob=0.2,
180
+ type='DA-ZoeDepth',
181
+ uid='NULL',
182
+ use_amp=False,
183
+ use_pretrained_midas=True,
184
+ use_shared_dict=False,
185
+ validate_every=0.25,
186
+ version_name='v1',
187
+ workers=16),
188
+ max_depth=80,
189
+ min_depth=0.001,
190
+ sigloss=dict(type='SILogLoss'),
191
+ target='coarse',
192
+ type='BaselinePretrain')
193
+ optim_wrapper = dict(
194
+ clip_grad=dict(max_norm=0.1, norm_type=2, type='norm'),
195
+ optimizer=dict(lr=4e-06, type='AdamW', weight_decay=0.01),
196
+ paramwise_cfg=dict(bypass_duplicate=True, custom_keys=dict()))
197
+ param_scheduler = dict(
198
+ base_momentum=0.85,
199
+ cycle_momentum=True,
200
+ div_factor=1,
201
+ final_div_factor=10000,
202
+ max_momentum=0.95,
203
+ pct_start=0.5,
204
+ three_phase=False)
205
+ project = 'patchfusion'
206
+ tags = [
207
+ 'coarse',
208
+ 'da',
209
+ 'vitb',
210
+ ]
211
+ test_in_dataloader = dict(
212
+ batch_size=1,
213
+ dataset=dict(
214
+ data_root='./data/u4k',
215
+ max_depth=80,
216
+ min_depth=0.001,
217
+ mode='infer',
218
+ split='./data/u4k/splits/test.txt',
219
+ transform_cfg=dict(network_process_size=[
220
+ 384,
221
+ 512,
222
+ ]),
223
+ type='UnrealStereo4kDataset'),
224
+ num_workers=2)
225
+ test_out_dataloader = dict(
226
+ batch_size=1,
227
+ dataset=dict(
228
+ data_root='./data/u4k',
229
+ max_depth=80,
230
+ min_depth=0.001,
231
+ mode='infer',
232
+ split='./data/u4k/splits/test_out.txt',
233
+ transform_cfg=dict(network_process_size=[
234
+ 384,
235
+ 512,
236
+ ]),
237
+ type='UnrealStereo4kDataset'),
238
+ num_workers=2)
239
+ train_cfg = dict(
240
+ eval_start=0,
241
+ log_interval=100,
242
+ max_epochs=24,
243
+ save_checkpoint_interval=24,
244
+ train_log_img_interval=500,
245
+ val_interval=2,
246
+ val_log_img_interval=50,
247
+ val_type='epoch_base')
248
+ train_dataloader = dict(
249
+ batch_size=4,
250
+ dataset=dict(
251
+ data_root='./data/u4k',
252
+ max_depth=80,
253
+ min_depth=0.001,
254
+ mode='train',
255
+ resize_mode='depth-anything',
256
+ split='./data/u4k/splits/train.txt',
257
+ transform_cfg=dict(
258
+ degree=1.0, network_process_size=[
259
+ 392,
260
+ 518,
261
+ ], random_crop=True),
262
+ type='UnrealStereo4kDataset'),
263
+ num_workers=4)
264
+ val_dataloader = dict(
265
+ batch_size=1,
266
+ dataset=dict(
267
+ data_root='./data/u4k',
268
+ max_depth=80,
269
+ min_depth=0.001,
270
+ mode='infer',
271
+ resize_mode='depth-anything',
272
+ split='./data/u4k/splits/val.txt',
273
+ transform_cfg=dict(degree=1.0, network_process_size=[
274
+ 392,
275
+ 518,
276
+ ]),
277
+ type='UnrealStereo4kDataset'),
278
+ num_workers=2)
279
+ work_dir = './work_dir/depthanything_vitb_u4k/coarse_pretrain'
280
+ zoe_depth_config = dict(
281
+ attractor_alpha=1000,
282
+ attractor_gamma=2,
283
+ attractor_kind='mean',
284
+ attractor_type='inv',
285
+ aug=True,
286
+ bin_centers_type='softplus',
287
+ bin_embedding_dim=128,
288
+ clip_grad=0.1,
289
+ dataset='nyu',
290
+ depth_anything=True,
291
+ distributed=True,
292
+ do_resize=False,
293
+ force_keep_ar=True,
294
+ freeze_midas_bn=True,
295
+ gpu='NULL',
296
+ img_size=[
297
+ 392,
298
+ 518,
299
+ ],
300
+ inverse_midas=False,
301
+ log_images_every=0.1,
302
+ max_depth=80,
303
+ max_temp=50.0,
304
+ max_translation=100,
305
+ memory_efficient=True,
306
+ midas_model_type='vitb',
307
+ min_depth=0.001,
308
+ min_temp=0.0212,
309
+ model='zoedepth',
310
+ n_attractors=[
311
+ 16,
312
+ 8,
313
+ 4,
314
+ 1,
315
+ ],
316
+ n_bins=64,
317
+ name='ZoeDepth',
318
+ notes='',
319
+ output_distribution='logbinomial',
320
+ prefetch=False,
321
+ pretrained_resource='local::./work_dir/DepthAnything_vitb.pt',
322
+ print_losses=False,
323
+ project='ZoeDepth',
324
+ random_crop=False,
325
+ random_translate=False,
326
+ root='.',
327
+ save_dir='',
328
+ shared_dict='NULL',
329
+ tags='',
330
+ train_midas=True,
331
+ translate_prob=0.2,
332
+ type='DA-ZoeDepth',
333
+ uid='NULL',
334
+ use_amp=False,
335
+ use_pretrained_midas=True,
336
+ use_shared_dict=False,
337
+ validate_every=0.25,
338
+ version_name='v1',
339
+ workers=16)
340
+
341
+ 2024/03/15 09:55:28 - patchstitcher - INFO - Loading deepnet from local::./work_dir/DepthAnything_vitb.pt
342
+ 2024/03/15 09:55:28 - patchstitcher - INFO - Current zoedepth.core.prep.resizer is <class 'torch.nn.modules.linear.Identity'>
343
+ 2024/03/15 09:55:28 - patchstitcher - INFO - DistributedDataParallel(
344
+ (module): BaselinePretrain(
345
+ (coarse_branch): ZoeDepth(
346
+ (core): DepthAnythingCore(
347
+ (core): DPT_DINOv2(
348
+ (pretrained): DinoVisionTransformer(
349
+ (patch_embed): PatchEmbed(
350
+ (proj): Conv2d(3, 768, kernel_size=(14, 14), stride=(14, 14))
351
+ (norm): Identity()
352
+ )
353
+ (blocks): ModuleList(
354
+ (0-11): 12 x NestedTensorBlock(
355
+ (norm1): LayerNorm((768,), eps=1e-06, elementwise_affine=True)
356
+ (attn): MemEffAttention(
357
+ (qkv): Linear(in_features=768, out_features=2304, bias=True)
358
+ (attn_drop): Dropout(p=0.0, inplace=False)
359
+ (proj): Linear(in_features=768, out_features=768, bias=True)
360
+ (proj_drop): Dropout(p=0.0, inplace=False)
361
+ )
362
+ (ls1): LayerScale()
363
+ (drop_path1): Identity()
364
+ (norm2): LayerNorm((768,), eps=1e-06, elementwise_affine=True)
365
+ (mlp): Mlp(
366
+ (fc1): Linear(in_features=768, out_features=3072, bias=True)
367
+ (act): GELU(approximate='none')
368
+ (fc2): Linear(in_features=3072, out_features=768, bias=True)
369
+ (drop): Dropout(p=0.0, inplace=False)
370
+ )
371
+ (ls2): LayerScale()
372
+ (drop_path2): Identity()
373
+ )
374
+ )
375
+ (norm): LayerNorm((768,), eps=1e-06, elementwise_affine=True)
376
+ (head): Identity()
377
+ )
378
+ (depth_head): DPTHead(
379
+ (projects): ModuleList(
380
+ (0): Conv2d(768, 96, kernel_size=(1, 1), stride=(1, 1))
381
+ (1): Conv2d(768, 192, kernel_size=(1, 1), stride=(1, 1))
382
+ (2): Conv2d(768, 384, kernel_size=(1, 1), stride=(1, 1))
383
+ (3): Conv2d(768, 768, kernel_size=(1, 1), stride=(1, 1))
384
+ )
385
+ (resize_layers): ModuleList(
386
+ (0): ConvTranspose2d(96, 96, kernel_size=(4, 4), stride=(4, 4))
387
+ (1): ConvTranspose2d(192, 192, kernel_size=(2, 2), stride=(2, 2))
388
+ (2): Identity()
389
+ (3): Conv2d(768, 768, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1))
390
+ )
391
+ (scratch): Module(
392
+ (layer1_rn): Conv2d(96, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
393
+ (layer2_rn): Conv2d(192, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
394
+ (layer3_rn): Conv2d(384, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
395
+ (layer4_rn): Conv2d(768, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
396
+ (refinenet1): FeatureFusionBlock(
397
+ (out_conv): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1))
398
+ (resConfUnit1): ResidualConvUnit(
399
+ (conv1): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
400
+ (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
401
+ (activation): ReLU()
402
+ (skip_add): FloatFunctional(
403
+ (activation_post_process): Identity()
404
+ )
405
+ )
406
+ (resConfUnit2): ResidualConvUnit(
407
+ (conv1): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
408
+ (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
409
+ (activation): ReLU()
410
+ (skip_add): FloatFunctional(
411
+ (activation_post_process): Identity()
412
+ )
413
+ )
414
+ (skip_add): FloatFunctional(
415
+ (activation_post_process): Identity()
416
+ )
417
+ )
418
+ (refinenet2): FeatureFusionBlock(
419
+ (out_conv): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1))
420
+ (resConfUnit1): ResidualConvUnit(
421
+ (conv1): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
422
+ (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
423
+ (activation): ReLU()
424
+ (skip_add): FloatFunctional(
425
+ (activation_post_process): Identity()
426
+ )
427
+ )
428
+ (resConfUnit2): ResidualConvUnit(
429
+ (conv1): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
430
+ (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
431
+ (activation): ReLU()
432
+ (skip_add): FloatFunctional(
433
+ (activation_post_process): Identity()
434
+ )
435
+ )
436
+ (skip_add): FloatFunctional(
437
+ (activation_post_process): Identity()
438
+ )
439
+ )
440
+ (refinenet3): FeatureFusionBlock(
441
+ (out_conv): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1))
442
+ (resConfUnit1): ResidualConvUnit(
443
+ (conv1): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
444
+ (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
445
+ (activation): ReLU()
446
+ (skip_add): FloatFunctional(
447
+ (activation_post_process): Identity()
448
+ )
449
+ )
450
+ (resConfUnit2): ResidualConvUnit(
451
+ (conv1): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
452
+ (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
453
+ (activation): ReLU()
454
+ (skip_add): FloatFunctional(
455
+ (activation_post_process): Identity()
456
+ )
457
+ )
458
+ (skip_add): FloatFunctional(
459
+ (activation_post_process): Identity()
460
+ )
461
+ )
462
+ (refinenet4): FeatureFusionBlock(
463
+ (out_conv): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1))
464
+ (resConfUnit1): ResidualConvUnit(
465
+ (conv1): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
466
+ (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
467
+ (activation): ReLU()
468
+ (skip_add): FloatFunctional(
469
+ (activation_post_process): Identity()
470
+ )
471
+ )
472
+ (resConfUnit2): ResidualConvUnit(
473
+ (conv1): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
474
+ (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
475
+ (activation): ReLU()
476
+ (skip_add): FloatFunctional(
477
+ (activation_post_process): Identity()
478
+ )
479
+ )
480
+ (skip_add): FloatFunctional(
481
+ (activation_post_process): Identity()
482
+ )
483
+ )
484
+ (output_conv1): Conv2d(128, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
485
+ (output_conv2): Sequential(
486
+ (0): Conv2d(64, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
487
+ (1): ReLU(inplace=True)
488
+ (2): Conv2d(32, 1, kernel_size=(1, 1), stride=(1, 1))
489
+ (3): ReLU(inplace=True)
490
+ (4): Identity()
491
+ )
492
+ )
493
+ )
494
+ )
495
+ )
496
+ (conv2): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1))
497
+ (seed_bin_regressor): SeedBinRegressorUnnormed(
498
+ (_net): Sequential(
499
+ (0): Conv2d(128, 256, kernel_size=(1, 1), stride=(1, 1))
500
+ (1): ReLU(inplace=True)
501
+ (2): Conv2d(256, 64, kernel_size=(1, 1), stride=(1, 1))
502
+ (3): Softplus(beta=1, threshold=20)
503
+ )
504
+ )
505
+ (seed_projector): Projector(
506
+ (_net): Sequential(
507
+ (0): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1))
508
+ (1): ReLU(inplace=True)
509
+ (2): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1))
510
+ )
511
+ )
512
+ (projectors): ModuleList(
513
+ (0-3): 4 x Projector(
514
+ (_net): Sequential(
515
+ (0): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1))
516
+ (1): ReLU(inplace=True)
517
+ (2): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1))
518
+ )
519
+ )
520
+ )
521
+ (attractors): ModuleList(
522
+ (0): AttractorLayerUnnormed(
523
+ (_net): Sequential(
524
+ (0): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1))
525
+ (1): ReLU(inplace=True)
526
+ (2): Conv2d(128, 16, kernel_size=(1, 1), stride=(1, 1))
527
+ (3): Softplus(beta=1, threshold=20)
528
+ )
529
+ )
530
+ (1): AttractorLayerUnnormed(
531
+ (_net): Sequential(
532
+ (0): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1))
533
+ (1): ReLU(inplace=True)
534
+ (2): Conv2d(128, 8, kernel_size=(1, 1), stride=(1, 1))
535
+ (3): Softplus(beta=1, threshold=20)
536
+ )
537
+ )
538
+ (2): AttractorLayerUnnormed(
539
+ (_net): Sequential(
540
+ (0): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1))
541
+ (1): ReLU(inplace=True)
542
+ (2): Conv2d(128, 4, kernel_size=(1, 1), stride=(1, 1))
543
+ (3): Softplus(beta=1, threshold=20)
544
+ )
545
+ )
546
+ (3): AttractorLayerUnnormed(
547
+ (_net): Sequential(
548
+ (0): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1))
549
+ (1): ReLU(inplace=True)
550
+ (2): Conv2d(128, 1, kernel_size=(1, 1), stride=(1, 1))
551
+ (3): Softplus(beta=1, threshold=20)
552
+ )
553
+ )
554
+ )
555
+ (conditional_log_binomial): ConditionalLogBinomial(
556
+ (log_binomial_transform): LogBinomial()
557
+ (mlp): Sequential(
558
+ (0): Conv2d(161, 80, kernel_size=(1, 1), stride=(1, 1))
559
+ (1): GELU(approximate='none')
560
+ (2): Conv2d(80, 4, kernel_size=(1, 1), stride=(1, 1))
561
+ (3): Softplus(beta=1, threshold=20)
562
+ )
563
+ )
564
+ )
565
+ (sigloss): SILogLoss()
566
+ )
567
+ )
568
+ 2024/03/15 09:55:34 - patchstitcher - INFO - successfully init trainer
569
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.cls_token
570
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.pos_embed
571
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.mask_token
572
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.patch_embed.proj.weight
573
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.patch_embed.proj.bias
574
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.0.norm1.weight
575
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.0.norm1.bias
576
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.0.attn.qkv.weight
577
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.0.attn.qkv.bias
578
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.0.attn.proj.weight
579
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.0.attn.proj.bias
580
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.0.ls1.gamma
581
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.0.norm2.weight
582
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.0.norm2.bias
583
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.0.mlp.fc1.weight
584
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.0.mlp.fc1.bias
585
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.0.mlp.fc2.weight
586
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.0.mlp.fc2.bias
587
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.0.ls2.gamma
588
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.1.norm1.weight
589
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.1.norm1.bias
590
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.1.attn.qkv.weight
591
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.1.attn.qkv.bias
592
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.1.attn.proj.weight
593
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.1.attn.proj.bias
594
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.1.ls1.gamma
595
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.1.norm2.weight
596
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.1.norm2.bias
597
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.1.mlp.fc1.weight
598
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.1.mlp.fc1.bias
599
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.1.mlp.fc2.weight
600
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.1.mlp.fc2.bias
601
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.1.ls2.gamma
602
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.2.norm1.weight
603
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.2.norm1.bias
604
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.2.attn.qkv.weight
605
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.2.attn.qkv.bias
606
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.2.attn.proj.weight
607
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.2.attn.proj.bias
608
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.2.ls1.gamma
609
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.2.norm2.weight
610
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.2.norm2.bias
611
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.2.mlp.fc1.weight
612
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.2.mlp.fc1.bias
613
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.2.mlp.fc2.weight
614
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.2.mlp.fc2.bias
615
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.2.ls2.gamma
616
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.3.norm1.weight
617
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.3.norm1.bias
618
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.3.attn.qkv.weight
619
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.3.attn.qkv.bias
620
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.3.attn.proj.weight
621
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.3.attn.proj.bias
622
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.3.ls1.gamma
623
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.3.norm2.weight
624
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.3.norm2.bias
625
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.3.mlp.fc1.weight
626
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.3.mlp.fc1.bias
627
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.3.mlp.fc2.weight
628
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.3.mlp.fc2.bias
629
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.3.ls2.gamma
630
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.4.norm1.weight
631
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.4.norm1.bias
632
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.4.attn.qkv.weight
633
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.4.attn.qkv.bias
634
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.4.attn.proj.weight
635
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.4.attn.proj.bias
636
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.4.ls1.gamma
637
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.4.norm2.weight
638
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.4.norm2.bias
639
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.4.mlp.fc1.weight
640
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.4.mlp.fc1.bias
641
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.4.mlp.fc2.weight
642
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.4.mlp.fc2.bias
643
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.4.ls2.gamma
644
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.5.norm1.weight
645
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.5.norm1.bias
646
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.5.attn.qkv.weight
647
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.5.attn.qkv.bias
648
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.5.attn.proj.weight
649
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.5.attn.proj.bias
650
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.5.ls1.gamma
651
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.5.norm2.weight
652
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.5.norm2.bias
653
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.5.mlp.fc1.weight
654
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.5.mlp.fc1.bias
655
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.5.mlp.fc2.weight
656
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.5.mlp.fc2.bias
657
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.5.ls2.gamma
658
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.6.norm1.weight
659
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.6.norm1.bias
660
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.6.attn.qkv.weight
661
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.6.attn.qkv.bias
662
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.6.attn.proj.weight
663
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.6.attn.proj.bias
664
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.6.ls1.gamma
665
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.6.norm2.weight
666
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.6.norm2.bias
667
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.6.mlp.fc1.weight
668
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.6.mlp.fc1.bias
669
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.6.mlp.fc2.weight
670
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.6.mlp.fc2.bias
671
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.6.ls2.gamma
672
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.7.norm1.weight
673
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.7.norm1.bias
674
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.7.attn.qkv.weight
675
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.7.attn.qkv.bias
676
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.7.attn.proj.weight
677
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.7.attn.proj.bias
678
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.7.ls1.gamma
679
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.7.norm2.weight
680
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.7.norm2.bias
681
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.7.mlp.fc1.weight
682
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.7.mlp.fc1.bias
683
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.7.mlp.fc2.weight
684
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.7.mlp.fc2.bias
685
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.7.ls2.gamma
686
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.8.norm1.weight
687
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.8.norm1.bias
688
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.8.attn.qkv.weight
689
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.8.attn.qkv.bias
690
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.8.attn.proj.weight
691
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.8.attn.proj.bias
692
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.8.ls1.gamma
693
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.8.norm2.weight
694
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.8.norm2.bias
695
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.8.mlp.fc1.weight
696
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.8.mlp.fc1.bias
697
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.8.mlp.fc2.weight
698
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.8.mlp.fc2.bias
699
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.8.ls2.gamma
700
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.9.norm1.weight
701
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.9.norm1.bias
702
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.9.attn.qkv.weight
703
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.9.attn.qkv.bias
704
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.9.attn.proj.weight
705
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.9.attn.proj.bias
706
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.9.ls1.gamma
707
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.9.norm2.weight
708
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.9.norm2.bias
709
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.9.mlp.fc1.weight
710
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.9.mlp.fc1.bias
711
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.9.mlp.fc2.weight
712
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.9.mlp.fc2.bias
713
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.9.ls2.gamma
714
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.10.norm1.weight
715
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.10.norm1.bias
716
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.10.attn.qkv.weight
717
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.10.attn.qkv.bias
718
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.10.attn.proj.weight
719
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.10.attn.proj.bias
720
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.10.ls1.gamma
721
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.10.norm2.weight
722
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.10.norm2.bias
723
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.10.mlp.fc1.weight
724
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.10.mlp.fc1.bias
725
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.10.mlp.fc2.weight
726
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.10.mlp.fc2.bias
727
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.10.ls2.gamma
728
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.11.norm1.weight
729
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.11.norm1.bias
730
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.11.attn.qkv.weight
731
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.11.attn.qkv.bias
732
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.11.attn.proj.weight
733
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.11.attn.proj.bias
734
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.11.ls1.gamma
735
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.11.norm2.weight
736
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.11.norm2.bias
737
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.11.mlp.fc1.weight
738
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.11.mlp.fc1.bias
739
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.11.mlp.fc2.weight
740
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.11.mlp.fc2.bias
741
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.11.ls2.gamma
742
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.norm.weight
743
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.norm.bias
744
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.projects.0.weight
745
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.projects.0.bias
746
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.projects.1.weight
747
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.projects.1.bias
748
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.projects.2.weight
749
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.projects.2.bias
750
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.projects.3.weight
751
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.projects.3.bias
752
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.resize_layers.0.weight
753
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.resize_layers.0.bias
754
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.resize_layers.1.weight
755
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.resize_layers.1.bias
756
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.resize_layers.3.weight
757
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.resize_layers.3.bias
758
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.layer1_rn.weight
759
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.layer2_rn.weight
760
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.layer3_rn.weight
761
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.layer4_rn.weight
762
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet1.out_conv.weight
763
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet1.out_conv.bias
764
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet1.resConfUnit1.conv1.weight
765
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet1.resConfUnit1.conv1.bias
766
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet1.resConfUnit1.conv2.weight
767
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet1.resConfUnit1.conv2.bias
768
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet1.resConfUnit2.conv1.weight
769
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet1.resConfUnit2.conv1.bias
770
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet1.resConfUnit2.conv2.weight
771
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet1.resConfUnit2.conv2.bias
772
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet2.out_conv.weight
773
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet2.out_conv.bias
774
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet2.resConfUnit1.conv1.weight
775
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet2.resConfUnit1.conv1.bias
776
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet2.resConfUnit1.conv2.weight
777
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet2.resConfUnit1.conv2.bias
778
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet2.resConfUnit2.conv1.weight
779
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet2.resConfUnit2.conv1.bias
780
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet2.resConfUnit2.conv2.weight
781
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet2.resConfUnit2.conv2.bias
782
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet3.out_conv.weight
783
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet3.out_conv.bias
784
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet3.resConfUnit1.conv1.weight
785
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet3.resConfUnit1.conv1.bias
786
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet3.resConfUnit1.conv2.weight
787
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet3.resConfUnit1.conv2.bias
788
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet3.resConfUnit2.conv1.weight
789
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet3.resConfUnit2.conv1.bias
790
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet3.resConfUnit2.conv2.weight
791
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet3.resConfUnit2.conv2.bias
792
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet4.out_conv.weight
793
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet4.out_conv.bias
794
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet4.resConfUnit1.conv1.weight
795
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet4.resConfUnit1.conv1.bias
796
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet4.resConfUnit1.conv2.weight
797
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet4.resConfUnit1.conv2.bias
798
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet4.resConfUnit2.conv1.weight
799
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet4.resConfUnit2.conv1.bias
800
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet4.resConfUnit2.conv2.weight
801
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet4.resConfUnit2.conv2.bias
802
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.output_conv1.weight
803
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.output_conv1.bias
804
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.output_conv2.0.weight
805
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.output_conv2.0.bias
806
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.output_conv2.2.weight
807
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.output_conv2.2.bias
808
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.conv2.weight
809
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.conv2.bias
810
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.seed_bin_regressor._net.0.weight
811
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.seed_bin_regressor._net.0.bias
812
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.seed_bin_regressor._net.2.weight
813
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.seed_bin_regressor._net.2.bias
814
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.seed_projector._net.0.weight
815
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.seed_projector._net.0.bias
816
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.seed_projector._net.2.weight
817
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.seed_projector._net.2.bias
818
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.projectors.0._net.0.weight
819
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.projectors.0._net.0.bias
820
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.projectors.0._net.2.weight
821
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.projectors.0._net.2.bias
822
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.projectors.1._net.0.weight
823
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.projectors.1._net.0.bias
824
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.projectors.1._net.2.weight
825
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.projectors.1._net.2.bias
826
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.projectors.2._net.0.weight
827
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.projectors.2._net.0.bias
828
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.projectors.2._net.2.weight
829
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.projectors.2._net.2.bias
830
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.projectors.3._net.0.weight
831
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.projectors.3._net.0.bias
832
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.projectors.3._net.2.weight
833
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.projectors.3._net.2.bias
834
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.attractors.0._net.0.weight
835
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.attractors.0._net.0.bias
836
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.attractors.0._net.2.weight
837
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.attractors.0._net.2.bias
838
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.attractors.1._net.0.weight
839
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.attractors.1._net.0.bias
840
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.attractors.1._net.2.weight
841
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.attractors.1._net.2.bias
842
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.attractors.2._net.0.weight
843
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.attractors.2._net.0.bias
844
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.attractors.2._net.2.weight
845
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.attractors.2._net.2.bias
846
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.attractors.3._net.0.weight
847
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.attractors.3._net.0.bias
848
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.attractors.3._net.2.weight
849
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.attractors.3._net.2.bias
850
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.conditional_log_binomial.mlp.0.weight
851
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.conditional_log_binomial.mlp.0.bias
852
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.conditional_log_binomial.mlp.2.weight
853
+ 2024/03/15 09:55:34 - patchstitcher - INFO - training param: module.coarse_branch.conditional_log_binomial.mlp.2.bias
854
+ 2024/03/15 09:57:50 - patchstitcher - INFO - Epoch: [01/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 1.9729490280151367 - coarse_loss: 1.9729490280151367
855
+ 2024/03/15 09:59:39 - patchstitcher - INFO - Epoch: [01/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 1.6159499883651733 - coarse_loss: 1.6159499883651733
856
+ 2024/03/15 10:01:20 - patchstitcher - INFO - Epoch: [01/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 1.6653645038604736 - coarse_loss: 1.6653645038604736
857
+ 2024/03/15 10:03:08 - patchstitcher - INFO - Epoch: [01/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 1.3738189935684204 - coarse_loss: 1.3738189935684204
858
+ 2024/03/15 10:06:24 - patchstitcher - INFO - Epoch: [02/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 1.0679881572723389 - coarse_loss: 1.0679881572723389
859
+ 2024/03/15 10:08:12 - patchstitcher - INFO - Epoch: [02/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 1.0449714660644531 - coarse_loss: 1.0449714660644531
860
+ 2024/03/15 10:09:57 - patchstitcher - INFO - Epoch: [02/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 1.3200674057006836 - coarse_loss: 1.3200674057006836
861
+ 2024/03/15 10:11:44 - patchstitcher - INFO - Epoch: [02/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 1.2463884353637695 - coarse_loss: 1.2463884353637695
862
+ 2024/03/15 10:13:21 - patchstitcher - INFO - Evaluation Summary:
863
+ +-----------+-----------+----------+----------+-----------+-----------+-----------+-----------+-----------+-----------+
864
+ | a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see |
865
+ +-----------+-----------+----------+----------+-----------+-----------+-----------+-----------+-----------+-----------+
866
+ | 0.9277873 | 0.9864464 | 0.994876 | 0.093889 | 1.7125608 | 0.0411139 | 0.1284599 | 10.310956 | 0.2504752 | 1.2484615 |
867
+ +-----------+-----------+----------+----------+-----------+-----------+-----------+-----------+-----------+-----------+
868
+ 2024/03/15 10:15:11 - patchstitcher - INFO - Epoch: [03/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 1.1642838716506958 - coarse_loss: 1.1642838716506958
869
+ 2024/03/15 10:16:56 - patchstitcher - INFO - Epoch: [03/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 1.1062591075897217 - coarse_loss: 1.1062591075897217
870
+ 2024/03/15 10:18:40 - patchstitcher - INFO - Epoch: [03/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 1.491640329360962 - coarse_loss: 1.491640329360962
871
+ 2024/03/15 10:20:26 - patchstitcher - INFO - Epoch: [03/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 1.0693362951278687 - coarse_loss: 1.0693362951278687
872
+ 2024/03/15 10:23:28 - patchstitcher - INFO - Epoch: [04/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 1.2830930948257446 - coarse_loss: 1.2830930948257446
873
+ 2024/03/15 10:25:13 - patchstitcher - INFO - Epoch: [04/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.8494630455970764 - coarse_loss: 0.8494630455970764
874
+ 2024/03/15 10:26:59 - patchstitcher - INFO - Epoch: [04/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 1.100481390953064 - coarse_loss: 1.100481390953064
875
+ 2024/03/15 10:28:45 - patchstitcher - INFO - Epoch: [04/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.6722239255905151 - coarse_loss: 0.6722239255905151
876
+ 2024/03/15 10:30:18 - patchstitcher - INFO - Evaluation Summary:
877
+ +----------+-----------+-----------+-----------+-----------+----------+-----------+-----------+-----------+-----------+
878
+ | a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see |
879
+ +----------+-----------+-----------+-----------+-----------+----------+-----------+-----------+-----------+-----------+
880
+ | 0.961338 | 0.9893523 | 0.9953463 | 0.0692743 | 1.5390607 | 0.030108 | 0.1050118 | 9.1967623 | 0.1975309 | 1.1110629 |
881
+ +----------+-----------+-----------+-----------+-----------+----------+-----------+-----------+-----------+-----------+
882
+ 2024/03/15 10:32:10 - patchstitcher - INFO - Epoch: [05/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.5996298789978027 - coarse_loss: 0.5996298789978027
883
+ 2024/03/15 10:33:58 - patchstitcher - INFO - Epoch: [05/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.5094271302223206 - coarse_loss: 0.5094271302223206
884
+ 2024/03/15 10:35:48 - patchstitcher - INFO - Epoch: [05/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.7459169626235962 - coarse_loss: 0.7459169626235962
885
+ 2024/03/15 10:37:33 - patchstitcher - INFO - Epoch: [05/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.7367539405822754 - coarse_loss: 0.7367539405822754
886
+ 2024/03/15 10:40:39 - patchstitcher - INFO - Epoch: [06/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 1.3089935779571533 - coarse_loss: 1.3089935779571533
887
+ 2024/03/15 10:42:24 - patchstitcher - INFO - Epoch: [06/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.9458222985267639 - coarse_loss: 0.9458222985267639
888
+ 2024/03/15 10:44:12 - patchstitcher - INFO - Epoch: [06/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.7383743524551392 - coarse_loss: 0.7383743524551392
889
+ 2024/03/15 10:45:59 - patchstitcher - INFO - Epoch: [06/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.6774943470954895 - coarse_loss: 0.6774943470954895
890
+ 2024/03/15 10:47:29 - patchstitcher - INFO - Evaluation Summary:
891
+ +-----------+-----------+-----------+----------+----------+-----------+-----------+-----------+-----------+-----------+
892
+ | a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see |
893
+ +-----------+-----------+-----------+----------+----------+-----------+-----------+-----------+-----------+-----------+
894
+ | 0.9625513 | 0.9896059 | 0.9953454 | 0.076086 | 1.553624 | 0.0339274 | 0.1113379 | 8.9179546 | 0.1912439 | 1.0962123 |
895
+ +-----------+-----------+-----------+----------+----------+-----------+-----------+-----------+-----------+-----------+
896
+ 2024/03/15 10:49:20 - patchstitcher - INFO - Epoch: [07/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.7863880395889282 - coarse_loss: 0.7863880395889282
897
+ 2024/03/15 10:51:04 - patchstitcher - INFO - Epoch: [07/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 1.1585361957550049 - coarse_loss: 1.1585361957550049
898
+ 2024/03/15 10:52:54 - patchstitcher - INFO - Epoch: [07/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 1.1414254903793335 - coarse_loss: 1.1414254903793335
899
+ 2024/03/15 10:54:41 - patchstitcher - INFO - Epoch: [07/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.6607706546783447 - coarse_loss: 0.6607706546783447
900
+ 2024/03/15 10:57:47 - patchstitcher - INFO - Epoch: [08/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.8438395857810974 - coarse_loss: 0.8438395857810974
901
+ 2024/03/15 10:59:37 - patchstitcher - INFO - Epoch: [08/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.931841254234314 - coarse_loss: 0.931841254234314
902
+ 2024/03/15 11:01:23 - patchstitcher - INFO - Epoch: [08/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 1.2649768590927124 - coarse_loss: 1.2649768590927124
903
+ 2024/03/15 11:03:05 - patchstitcher - INFO - Epoch: [08/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 1.356317400932312 - coarse_loss: 1.356317400932312
904
+ 2024/03/15 11:04:39 - patchstitcher - INFO - Evaluation Summary:
905
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+
906
+ | a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see |
907
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+
908
+ | 0.9688624 | 0.9900475 | 0.9955412 | 0.0621825 | 1.4741381 | 0.0269014 | 0.0983563 | 8.5882915 | 0.1738514 | 1.0249666 |
909
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+
910
+ 2024/03/15 11:06:28 - patchstitcher - INFO - Epoch: [09/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 1.1434571743011475 - coarse_loss: 1.1434571743011475
911
+ 2024/03/15 11:08:19 - patchstitcher - INFO - Epoch: [09/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 1.7681660652160645 - coarse_loss: 1.7681660652160645
912
+ 2024/03/15 11:10:04 - patchstitcher - INFO - Epoch: [09/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.8547622561454773 - coarse_loss: 0.8547622561454773
913
+ 2024/03/15 11:11:49 - patchstitcher - INFO - Epoch: [09/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.869714617729187 - coarse_loss: 0.869714617729187
914
+ 2024/03/15 11:14:59 - patchstitcher - INFO - Epoch: [10/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.5332772731781006 - coarse_loss: 0.5332772731781006
915
+ 2024/03/15 11:16:44 - patchstitcher - INFO - Epoch: [10/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.8691495060920715 - coarse_loss: 0.8691495060920715
916
+ 2024/03/15 11:18:28 - patchstitcher - INFO - Epoch: [10/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 1.4371870756149292 - coarse_loss: 1.4371870756149292
917
+ 2024/03/15 11:20:14 - patchstitcher - INFO - Epoch: [10/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.9575653076171875 - coarse_loss: 0.9575653076171875
918
+ 2024/03/15 11:21:45 - patchstitcher - INFO - Evaluation Summary:
919
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+
920
+ | a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see |
921
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+
922
+ | 0.9679335 | 0.9903103 | 0.9957452 | 0.0634565 | 1.4144222 | 0.0269387 | 0.0964634 | 8.5336222 | 0.1681394 | 1.0266862 |
923
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+
924
+ 2024/03/15 11:23:36 - patchstitcher - INFO - Epoch: [11/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.8048105835914612 - coarse_loss: 0.8048105835914612
925
+ 2024/03/15 11:25:22 - patchstitcher - INFO - Epoch: [11/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.8616613149642944 - coarse_loss: 0.8616613149642944
926
+ 2024/03/15 11:27:12 - patchstitcher - INFO - Epoch: [11/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 1.221915364265442 - coarse_loss: 1.221915364265442
927
+ 2024/03/15 11:28:59 - patchstitcher - INFO - Epoch: [11/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.5273403525352478 - coarse_loss: 0.5273403525352478
928
+ 2024/03/15 11:31:59 - patchstitcher - INFO - Epoch: [12/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.6490796208381653 - coarse_loss: 0.6490796208381653
929
+ 2024/03/15 11:33:46 - patchstitcher - INFO - Epoch: [12/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.9228641986846924 - coarse_loss: 0.9228641986846924
930
+ 2024/03/15 11:35:30 - patchstitcher - INFO - Epoch: [12/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.8991217017173767 - coarse_loss: 0.8991217017173767
931
+ 2024/03/15 11:37:21 - patchstitcher - INFO - Epoch: [12/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.778996467590332 - coarse_loss: 0.778996467590332
932
+ 2024/03/15 11:38:51 - patchstitcher - INFO - Evaluation Summary:
933
+ +-----------+-----------+-----------+-----------+-----------+-----------+----------+-----------+-----------+-----------+
934
+ | a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see |
935
+ +-----------+-----------+-----------+-----------+-----------+-----------+----------+-----------+-----------+-----------+
936
+ | 0.9700605 | 0.9907225 | 0.9956863 | 0.0593423 | 1.3817834 | 0.0258237 | 0.095056 | 8.4508466 | 0.1639893 | 1.0006335 |
937
+ +-----------+-----------+-----------+-----------+-----------+-----------+----------+-----------+-----------+-----------+
938
+ 2024/03/15 11:40:42 - patchstitcher - INFO - Epoch: [13/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 1.2246499061584473 - coarse_loss: 1.2246499061584473
939
+ 2024/03/15 11:42:33 - patchstitcher - INFO - Epoch: [13/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 1.055445671081543 - coarse_loss: 1.055445671081543
940
+ 2024/03/15 11:44:18 - patchstitcher - INFO - Epoch: [13/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.8403045535087585 - coarse_loss: 0.8403045535087585
941
+ 2024/03/15 11:46:03 - patchstitcher - INFO - Epoch: [13/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.7852007150650024 - coarse_loss: 0.7852007150650024
942
+ 2024/03/15 11:49:05 - patchstitcher - INFO - Epoch: [14/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.5313113331794739 - coarse_loss: 0.5313113331794739
943
+ 2024/03/15 11:50:53 - patchstitcher - INFO - Epoch: [14/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.803260326385498 - coarse_loss: 0.803260326385498
944
+ 2024/03/15 11:52:35 - patchstitcher - INFO - Epoch: [14/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.6353864669799805 - coarse_loss: 0.6353864669799805
945
+ 2024/03/15 11:54:22 - patchstitcher - INFO - Epoch: [14/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.603277862071991 - coarse_loss: 0.603277862071991
946
+ 2024/03/15 11:55:54 - patchstitcher - INFO - Evaluation Summary:
947
+ +-----------+-----------+-----------+-----------+-----------+----------+-----------+-----------+-----------+-----------+
948
+ | a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see |
949
+ +-----------+-----------+-----------+-----------+-----------+----------+-----------+-----------+-----------+-----------+
950
+ | 0.9716077 | 0.9908243 | 0.9958379 | 0.0603097 | 1.3795547 | 0.025826 | 0.0942337 | 8.2481922 | 0.1615328 | 1.0314286 |
951
+ +-----------+-----------+-----------+-----------+-----------+----------+-----------+-----------+-----------+-----------+
952
+ 2024/03/15 11:57:46 - patchstitcher - INFO - Epoch: [15/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.68681800365448 - coarse_loss: 0.68681800365448
953
+ 2024/03/15 11:59:38 - patchstitcher - INFO - Epoch: [15/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.8562105894088745 - coarse_loss: 0.8562105894088745
954
+ 2024/03/15 12:01:24 - patchstitcher - INFO - Epoch: [15/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 1.0672423839569092 - coarse_loss: 1.0672423839569092
955
+ 2024/03/15 12:03:05 - patchstitcher - INFO - Epoch: [15/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.7026287317276001 - coarse_loss: 0.7026287317276001
956
+ 2024/03/15 12:06:08 - patchstitcher - INFO - Epoch: [16/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.9886091947555542 - coarse_loss: 0.9886091947555542
957
+ 2024/03/15 12:07:54 - patchstitcher - INFO - Epoch: [16/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.6522326469421387 - coarse_loss: 0.6522326469421387
958
+ 2024/03/15 12:09:39 - patchstitcher - INFO - Epoch: [16/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.9577221870422363 - coarse_loss: 0.9577221870422363
959
+ 2024/03/15 12:11:22 - patchstitcher - INFO - Epoch: [16/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 1.7307658195495605 - coarse_loss: 1.7307658195495605
960
+ 2024/03/15 12:12:51 - patchstitcher - INFO - Evaluation Summary:
961
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+
962
+ | a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see |
963
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+
964
+ | 0.9747152 | 0.9908944 | 0.9959154 | 0.0511857 | 1.3574797 | 0.0221211 | 0.0867927 | 7.9538576 | 0.1511261 | 1.0003225 |
965
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+
966
+ 2024/03/15 12:14:43 - patchstitcher - INFO - Epoch: [17/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.5646010637283325 - coarse_loss: 0.5646010637283325
967
+ 2024/03/15 12:16:28 - patchstitcher - INFO - Epoch: [17/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.8057535290718079 - coarse_loss: 0.8057535290718079
968
+ 2024/03/15 12:18:17 - patchstitcher - INFO - Epoch: [17/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 1.11107337474823 - coarse_loss: 1.11107337474823
969
+ 2024/03/15 12:20:01 - patchstitcher - INFO - Epoch: [17/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 1.187990427017212 - coarse_loss: 1.187990427017212
970
+ 2024/03/15 12:23:09 - patchstitcher - INFO - Epoch: [18/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.6382083892822266 - coarse_loss: 0.6382083892822266
971
+ 2024/03/15 12:24:49 - patchstitcher - INFO - Epoch: [18/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.5392951965332031 - coarse_loss: 0.5392951965332031
972
+ 2024/03/15 12:26:37 - patchstitcher - INFO - Epoch: [18/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.8188748359680176 - coarse_loss: 0.8188748359680176
973
+ 2024/03/15 12:28:18 - patchstitcher - INFO - Epoch: [18/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 1.0811210870742798 - coarse_loss: 1.0811210870742798
974
+ 2024/03/15 12:29:49 - patchstitcher - INFO - Evaluation Summary:
975
+ +----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+
976
+ | a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see |
977
+ +----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+
978
+ | 0.975323 | 0.9911468 | 0.9959684 | 0.0483459 | 1.3259571 | 0.0207656 | 0.0842995 | 7.8959624 | 0.1478599 | 0.9762505 |
979
+ +----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+
980
+ 2024/03/15 12:31:43 - patchstitcher - INFO - Epoch: [19/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.5005310773849487 - coarse_loss: 0.5005310773849487
981
+ 2024/03/15 12:33:28 - patchstitcher - INFO - Epoch: [19/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.5474035143852234 - coarse_loss: 0.5474035143852234
982
+ 2024/03/15 12:35:16 - patchstitcher - INFO - Epoch: [19/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.7799822092056274 - coarse_loss: 0.7799822092056274
983
+ 2024/03/15 12:37:02 - patchstitcher - INFO - Epoch: [19/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.5381927490234375 - coarse_loss: 0.5381927490234375
984
+ 2024/03/15 12:40:07 - patchstitcher - INFO - Epoch: [20/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 1.1203773021697998 - coarse_loss: 1.1203773021697998
985
+ 2024/03/15 12:41:51 - patchstitcher - INFO - Epoch: [20/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.5552318096160889 - coarse_loss: 0.5552318096160889
986
+ 2024/03/15 12:43:35 - patchstitcher - INFO - Epoch: [20/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.4946790933609009 - coarse_loss: 0.4946790933609009
987
+ 2024/03/15 12:45:21 - patchstitcher - INFO - Epoch: [20/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.829839825630188 - coarse_loss: 0.829839825630188
988
+ 2024/03/15 12:46:50 - patchstitcher - INFO - Evaluation Summary:
989
+ +-----------+-----------+-----------+-----------+-----------+----------+-----------+-----------+-----------+-----------+
990
+ | a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see |
991
+ +-----------+-----------+-----------+-----------+-----------+----------+-----------+-----------+-----------+-----------+
992
+ | 0.9759003 | 0.9912674 | 0.9959566 | 0.0472804 | 1.3156906 | 0.020464 | 0.0841626 | 7.7711489 | 0.1448604 | 0.9643456 |
993
+ +-----------+-----------+-----------+-----------+-----------+----------+-----------+-----------+-----------+-----------+
994
+ 2024/03/15 12:48:43 - patchstitcher - INFO - Epoch: [21/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.8187640905380249 - coarse_loss: 0.8187640905380249
995
+ 2024/03/15 12:50:30 - patchstitcher - INFO - Epoch: [21/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.5510168671607971 - coarse_loss: 0.5510168671607971
996
+ 2024/03/15 12:52:22 - patchstitcher - INFO - Epoch: [21/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.5071703791618347 - coarse_loss: 0.5071703791618347
997
+ 2024/03/15 12:54:08 - patchstitcher - INFO - Epoch: [21/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 1.6241310834884644 - coarse_loss: 1.6241310834884644
998
+ 2024/03/15 12:57:18 - patchstitcher - INFO - Epoch: [22/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.9662288427352905 - coarse_loss: 0.9662288427352905
999
+ 2024/03/15 12:59:03 - patchstitcher - INFO - Epoch: [22/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.915822446346283 - coarse_loss: 0.915822446346283
1000
+ 2024/03/15 13:00:45 - patchstitcher - INFO - Epoch: [22/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.48746258020401 - coarse_loss: 0.48746258020401
1001
+ 2024/03/15 13:02:29 - patchstitcher - INFO - Epoch: [22/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.7346612811088562 - coarse_loss: 0.7346612811088562
1002
+ 2024/03/15 13:04:01 - patchstitcher - INFO - Evaluation Summary:
1003
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+----------+-----------+-----------+
1004
+ | a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see |
1005
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+----------+-----------+-----------+
1006
+ | 0.9762025 | 0.9913185 | 0.9959977 | 0.0456843 | 1.3065255 | 0.0197035 | 0.0823783 | 7.684332 | 0.1431234 | 0.9606835 |
1007
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+----------+-----------+-----------+
1008
+ 2024/03/15 13:05:51 - patchstitcher - INFO - Epoch: [23/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.5825149416923523 - coarse_loss: 0.5825149416923523
1009
+ 2024/03/15 13:07:38 - patchstitcher - INFO - Epoch: [23/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 1.0635181665420532 - coarse_loss: 1.0635181665420532
1010
+ 2024/03/15 13:09:24 - patchstitcher - INFO - Epoch: [23/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 1.632516622543335 - coarse_loss: 1.632516622543335
1011
+ 2024/03/15 13:11:08 - patchstitcher - INFO - Epoch: [23/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 1.353378415107727 - coarse_loss: 1.353378415107727
1012
+ 2024/03/15 13:14:18 - patchstitcher - INFO - Epoch: [24/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.8277870416641235 - coarse_loss: 0.8277870416641235
1013
+ 2024/03/15 13:16:02 - patchstitcher - INFO - Epoch: [24/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.5105581283569336 - coarse_loss: 0.5105581283569336
1014
+ 2024/03/15 13:17:45 - patchstitcher - INFO - Epoch: [24/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.43523621559143066 - coarse_loss: 0.43523621559143066
1015
+ 2024/03/15 13:19:31 - patchstitcher - INFO - Epoch: [24/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.40485745668411255 - coarse_loss: 0.40485745668411255
1016
+ 2024/03/15 13:21:02 - patchstitcher - INFO - Evaluation Summary:
1017
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+
1018
+ | a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see |
1019
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+
1020
+ | 0.9762546 | 0.9913396 | 0.9959976 | 0.0452784 | 1.2974494 | 0.0194901 | 0.0821238 | 7.7005432 | 0.1431584 | 0.9635146 |
1021
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+
1022
+ 2024/03/15 13:21:02 - patchstitcher - INFO - Saving ckp, but use the inner get_save_dict fuction to get model_dict
1023
+ 2024/03/15 13:21:02 - patchstitcher - INFO - For saving space. Would you like to save base model several times? :>
1024
+ 2024/03/15 13:21:03 - patchstitcher - INFO - save checkpoint_24.pth at ./work_dir/depthanything_vitb_u4k/coarse_pretrain
depthanything_vitb_u4k/coarse_pretrain/checkpoint_24.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4fa7eccecb3ba6b7f7e7aabcb8e1cc7be703da3d6eaff316bf22237a616b2afb
3
+ size 1171453994
depthanything_vitb_u4k/coarse_pretrain/config.py ADDED
@@ -0,0 +1,310 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ collect_input_args = [
2
+ 'image_lr',
3
+ 'crops_image_hr',
4
+ 'depth_gt',
5
+ 'crop_depths',
6
+ 'bboxs',
7
+ 'image_hr',
8
+ ]
9
+ convert_syncbn = True
10
+ debug = False
11
+ env_cfg = dict(
12
+ cudnn_benchmark=True,
13
+ dist_cfg=dict(backend='nccl'),
14
+ mp_cfg=dict(mp_start_method='forkserver'))
15
+ find_unused_parameters = True
16
+ general_dataloader = dict(
17
+ batch_size=1,
18
+ dataset=dict(
19
+ dataset_name='', gt_dir=None, rgb_image_dir='', type='ImageDataset'),
20
+ num_workers=2)
21
+ launcher = 'pytorch'
22
+ log_name = 'coarse_pretrain'
23
+ max_depth = 80
24
+ min_depth = 0.001
25
+ model = dict(
26
+ coarse_branch=dict(
27
+ attractor_alpha=1000,
28
+ attractor_gamma=2,
29
+ attractor_kind='mean',
30
+ attractor_type='inv',
31
+ aug=True,
32
+ bin_centers_type='softplus',
33
+ bin_embedding_dim=128,
34
+ clip_grad=0.1,
35
+ dataset='nyu',
36
+ depth_anything=True,
37
+ distributed=True,
38
+ do_resize=False,
39
+ force_keep_ar=True,
40
+ freeze_midas_bn=True,
41
+ gpu='NULL',
42
+ img_size=[
43
+ 392,
44
+ 518,
45
+ ],
46
+ inverse_midas=False,
47
+ log_images_every=0.1,
48
+ max_depth=80,
49
+ max_temp=50.0,
50
+ max_translation=100,
51
+ memory_efficient=True,
52
+ midas_model_type='vitb',
53
+ min_depth=0.001,
54
+ min_temp=0.0212,
55
+ model='zoedepth',
56
+ n_attractors=[
57
+ 16,
58
+ 8,
59
+ 4,
60
+ 1,
61
+ ],
62
+ n_bins=64,
63
+ name='ZoeDepth',
64
+ notes='',
65
+ output_distribution='logbinomial',
66
+ prefetch=False,
67
+ pretrained_resource='local::./work_dir/DepthAnything_vitb.pt',
68
+ print_losses=False,
69
+ project='ZoeDepth',
70
+ random_crop=False,
71
+ random_translate=False,
72
+ root='.',
73
+ save_dir='',
74
+ shared_dict='NULL',
75
+ tags='',
76
+ train_midas=True,
77
+ translate_prob=0.2,
78
+ type='DA-ZoeDepth',
79
+ uid='NULL',
80
+ use_amp=False,
81
+ use_pretrained_midas=True,
82
+ use_shared_dict=False,
83
+ validate_every=0.25,
84
+ version_name='v1',
85
+ workers=16),
86
+ fine_branch=dict(
87
+ attractor_alpha=1000,
88
+ attractor_gamma=2,
89
+ attractor_kind='mean',
90
+ attractor_type='inv',
91
+ aug=True,
92
+ bin_centers_type='softplus',
93
+ bin_embedding_dim=128,
94
+ clip_grad=0.1,
95
+ dataset='nyu',
96
+ depth_anything=True,
97
+ distributed=True,
98
+ do_resize=False,
99
+ force_keep_ar=True,
100
+ freeze_midas_bn=True,
101
+ gpu='NULL',
102
+ img_size=[
103
+ 392,
104
+ 518,
105
+ ],
106
+ inverse_midas=False,
107
+ log_images_every=0.1,
108
+ max_depth=80,
109
+ max_temp=50.0,
110
+ max_translation=100,
111
+ memory_efficient=True,
112
+ midas_model_type='vitb',
113
+ min_depth=0.001,
114
+ min_temp=0.0212,
115
+ model='zoedepth',
116
+ n_attractors=[
117
+ 16,
118
+ 8,
119
+ 4,
120
+ 1,
121
+ ],
122
+ n_bins=64,
123
+ name='ZoeDepth',
124
+ notes='',
125
+ output_distribution='logbinomial',
126
+ prefetch=False,
127
+ pretrained_resource='local::./work_dir/DepthAnything_vitb.pt',
128
+ print_losses=False,
129
+ project='ZoeDepth',
130
+ random_crop=False,
131
+ random_translate=False,
132
+ root='.',
133
+ save_dir='',
134
+ shared_dict='NULL',
135
+ tags='',
136
+ train_midas=True,
137
+ translate_prob=0.2,
138
+ type='DA-ZoeDepth',
139
+ uid='NULL',
140
+ use_amp=False,
141
+ use_pretrained_midas=True,
142
+ use_shared_dict=False,
143
+ validate_every=0.25,
144
+ version_name='v1',
145
+ workers=16),
146
+ max_depth=80,
147
+ min_depth=0.001,
148
+ sigloss=dict(type='SILogLoss'),
149
+ target='coarse',
150
+ type='BaselinePretrain')
151
+ optim_wrapper = dict(
152
+ clip_grad=dict(max_norm=0.1, norm_type=2, type='norm'),
153
+ optimizer=dict(lr=4e-06, type='AdamW', weight_decay=0.01),
154
+ paramwise_cfg=dict(bypass_duplicate=True, custom_keys=dict()))
155
+ param_scheduler = dict(
156
+ base_momentum=0.85,
157
+ cycle_momentum=True,
158
+ div_factor=1,
159
+ final_div_factor=10000,
160
+ max_momentum=0.95,
161
+ pct_start=0.5,
162
+ three_phase=False)
163
+ project = 'patchfusion'
164
+ resume = False
165
+ tags = [
166
+ 'coarse',
167
+ 'da',
168
+ 'vitb',
169
+ ]
170
+ test_in_dataloader = dict(
171
+ batch_size=1,
172
+ dataset=dict(
173
+ data_root='./data/u4k',
174
+ max_depth=80,
175
+ min_depth=0.001,
176
+ mode='infer',
177
+ split='./data/u4k/splits/test.txt',
178
+ transform_cfg=dict(network_process_size=[
179
+ 384,
180
+ 512,
181
+ ]),
182
+ type='UnrealStereo4kDataset'),
183
+ num_workers=2)
184
+ test_out_dataloader = dict(
185
+ batch_size=1,
186
+ dataset=dict(
187
+ data_root='./data/u4k',
188
+ max_depth=80,
189
+ min_depth=0.001,
190
+ mode='infer',
191
+ split='./data/u4k/splits/test_out.txt',
192
+ transform_cfg=dict(network_process_size=[
193
+ 384,
194
+ 512,
195
+ ]),
196
+ type='UnrealStereo4kDataset'),
197
+ num_workers=2)
198
+ train_cfg = dict(
199
+ eval_start=0,
200
+ log_interval=100,
201
+ max_epochs=24,
202
+ save_checkpoint_interval=24,
203
+ train_log_img_interval=500,
204
+ val_interval=2,
205
+ val_log_img_interval=50,
206
+ val_type='epoch_base')
207
+ train_dataloader = dict(
208
+ batch_size=4,
209
+ dataset=dict(
210
+ data_root='./data/u4k',
211
+ max_depth=80,
212
+ min_depth=0.001,
213
+ mode='train',
214
+ resize_mode='depth-anything',
215
+ split='./data/u4k/splits/train.txt',
216
+ transform_cfg=dict(
217
+ degree=1.0,
218
+ network_process_size=[
219
+ 392,
220
+ 518,
221
+ ],
222
+ random_crop=True,
223
+ random_crop_size=(
224
+ 540,
225
+ 960,
226
+ )),
227
+ type='UnrealStereo4kDataset'),
228
+ num_workers=4)
229
+ val_dataloader = dict(
230
+ batch_size=1,
231
+ dataset=dict(
232
+ data_root='./data/u4k',
233
+ max_depth=80,
234
+ min_depth=0.001,
235
+ mode='infer',
236
+ resize_mode='depth-anything',
237
+ split='./data/u4k/splits/val.txt',
238
+ transform_cfg=dict(
239
+ degree=1.0,
240
+ network_process_size=[
241
+ 392,
242
+ 518,
243
+ ],
244
+ random_crop_size=(
245
+ 540,
246
+ 960,
247
+ )),
248
+ type='UnrealStereo4kDataset'),
249
+ num_workers=2)
250
+ work_dir = './work_dir/depthanything_vitb_u4k/coarse_pretrain'
251
+ zoe_depth_config = dict(
252
+ attractor_alpha=1000,
253
+ attractor_gamma=2,
254
+ attractor_kind='mean',
255
+ attractor_type='inv',
256
+ aug=True,
257
+ bin_centers_type='softplus',
258
+ bin_embedding_dim=128,
259
+ clip_grad=0.1,
260
+ dataset='nyu',
261
+ depth_anything=True,
262
+ distributed=True,
263
+ do_resize=False,
264
+ force_keep_ar=True,
265
+ freeze_midas_bn=True,
266
+ gpu='NULL',
267
+ img_size=[
268
+ 392,
269
+ 518,
270
+ ],
271
+ inverse_midas=False,
272
+ log_images_every=0.1,
273
+ max_depth=80,
274
+ max_temp=50.0,
275
+ max_translation=100,
276
+ memory_efficient=True,
277
+ midas_model_type='vitb',
278
+ min_depth=0.001,
279
+ min_temp=0.0212,
280
+ model='zoedepth',
281
+ n_attractors=[
282
+ 16,
283
+ 8,
284
+ 4,
285
+ 1,
286
+ ],
287
+ n_bins=64,
288
+ name='ZoeDepth',
289
+ notes='',
290
+ output_distribution='logbinomial',
291
+ prefetch=False,
292
+ pretrained_resource='local::./work_dir/DepthAnything_vitb.pt',
293
+ print_losses=False,
294
+ project='ZoeDepth',
295
+ random_crop=False,
296
+ random_translate=False,
297
+ root='.',
298
+ save_dir='',
299
+ shared_dict='NULL',
300
+ tags='',
301
+ train_midas=True,
302
+ translate_prob=0.2,
303
+ type='DA-ZoeDepth',
304
+ uid='NULL',
305
+ use_amp=False,
306
+ use_pretrained_midas=True,
307
+ use_shared_dict=False,
308
+ validate_every=0.25,
309
+ version_name='v1',
310
+ workers=16)
depthanything_vitb_u4k/fine_pretrain/20240315_153036.log ADDED
@@ -0,0 +1,1028 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2024/03/15 15:30:44 - patchstitcher - INFO -
2
+ ------------------------------------------------------------
3
+ System environment:
4
+ sys.platform: linux
5
+ Python: 3.8.18 | packaged by conda-forge | (default, Oct 10 2023, 15:44:36) [GCC 12.3.0]
6
+ CUDA available: True
7
+ numpy_random_seed: 621
8
+ GPU 0,1,2,3: NVIDIA A100-SXM4-80GB
9
+ CUDA_HOME: /sw/rl9g/cuda/11.8/rl9_binary
10
+ NVCC: Cuda compilation tools, release 11.8, V11.8.89
11
+ GCC: gcc (GCC) 11.3.1 20220421 (Red Hat 11.3.1-2)
12
+ PyTorch: 2.1.2
13
+ PyTorch compiling details: PyTorch built with:
14
+ - GCC 9.3
15
+ - C++ Version: 201703
16
+ - Intel(R) oneAPI Math Kernel Library Version 2022.1-Product Build 20220311 for Intel(R) 64 architecture applications
17
+ - Intel(R) MKL-DNN v3.1.1 (Git Hash 64f6bcbcbab628e96f33a62c3e975f8535a7bde4)
18
+ - OpenMP 201511 (a.k.a. OpenMP 4.5)
19
+ - LAPACK is enabled (usually provided by MKL)
20
+ - NNPACK is enabled
21
+ - CPU capability usage: AVX2
22
+ - CUDA Runtime 11.8
23
+ - NVCC architecture flags: -gencode;arch=compute_50,code=sm_50;-gencode;arch=compute_60,code=sm_60;-gencode;arch=compute_61,code=sm_61;-gencode;arch=compute_70,code=sm_70;-gencode;arch=compute_75,code=sm_75;-gencode;arch=compute_80,code=sm_80;-gencode;arch=compute_86,code=sm_86;-gencode;arch=compute_37,code=sm_37;-gencode;arch=compute_90,code=sm_90;-gencode;arch=compute_37,code=compute_37
24
+ - CuDNN 8.7
25
+ - Magma 2.6.1
26
+ - Build settings: BLAS_INFO=mkl, BUILD_TYPE=Release, CUDA_VERSION=11.8, CUDNN_VERSION=8.7.0, CXX_COMPILER=/opt/rh/devtoolset-9/root/usr/bin/c++, CXX_FLAGS= -D_GLIBCXX_USE_CXX11_ABI=0 -fabi-version=11 -fvisibility-inlines-hidden -DUSE_PTHREADPOOL -DNDEBUG -DUSE_KINETO -DLIBKINETO_NOROCTRACER -DUSE_FBGEMM -DUSE_QNNPACK -DUSE_PYTORCH_QNNPACK -DUSE_XNNPACK -DSYMBOLICATE_MOBILE_DEBUG_HANDLE -O2 -fPIC -Wall -Wextra -Werror=return-type -Werror=non-virtual-dtor -Werror=bool-operation -Wnarrowing -Wno-missing-field-initializers -Wno-type-limits -Wno-array-bounds -Wno-unknown-pragmas -Wno-unused-parameter -Wno-unused-function -Wno-unused-result -Wno-strict-overflow -Wno-strict-aliasing -Wno-stringop-overflow -Wno-psabi -Wno-error=pedantic -Wno-error=old-style-cast -Wno-invalid-partial-specialization -Wno-unused-private-field -Wno-aligned-allocation-unavailable -Wno-missing-braces -fdiagnostics-color=always -faligned-new -Wno-unused-but-set-variable -Wno-maybe-uninitialized -fno-math-errno -fno-trapping-math -Werror=format -Werror=cast-function-type -Wno-stringop-overflow, LAPACK_INFO=mkl, PERF_WITH_AVX=1, PERF_WITH_AVX2=1, PERF_WITH_AVX512=1, TORCH_DISABLE_GPU_ASSERTS=ON, TORCH_VERSION=2.1.2, USE_CUDA=ON, USE_CUDNN=ON, USE_EXCEPTION_PTR=1, USE_GFLAGS=OFF, USE_GLOG=OFF, USE_MKL=ON, USE_MKLDNN=ON, USE_MPI=OFF, USE_NCCL=ON, USE_NNPACK=ON, USE_OPENMP=ON, USE_ROCM=OFF,
27
+
28
+ TorchVision: 0.16.2
29
+ OpenCV: 4.8.1
30
+ MMEngine: 0.10.2
31
+
32
+ Runtime environment:
33
+ cudnn_benchmark: True
34
+ mp_cfg: {'mp_start_method': 'forkserver'}
35
+ dist_cfg: {'backend': 'nccl'}
36
+ seed: 621
37
+ Distributed launcher: pytorch
38
+ Distributed training: True
39
+ GPU number: 4
40
+ ------------------------------------------------------------
41
+
42
+ 2024/03/15 15:30:44 - patchstitcher - INFO - Config:
43
+ collect_input_args = [
44
+ 'image_lr',
45
+ 'crops_image_hr',
46
+ 'depth_gt',
47
+ 'crop_depths',
48
+ 'bboxs',
49
+ 'image_hr',
50
+ ]
51
+ convert_syncbn = True
52
+ debug = False
53
+ env_cfg = dict(
54
+ cudnn_benchmark=True,
55
+ dist_cfg=dict(backend='nccl'),
56
+ mp_cfg=dict(mp_start_method='forkserver'))
57
+ find_unused_parameters = True
58
+ general_dataloader = dict(
59
+ batch_size=1,
60
+ dataset=dict(
61
+ dataset_name='', gt_dir=None, rgb_image_dir='', type='ImageDataset'),
62
+ num_workers=2)
63
+ launcher = 'pytorch'
64
+ log_name = 'fine_pretrain'
65
+ max_depth = 80
66
+ min_depth = 0.001
67
+ model = dict(
68
+ coarse_branch=dict(
69
+ attractor_alpha=1000,
70
+ attractor_gamma=2,
71
+ attractor_kind='mean',
72
+ attractor_type='inv',
73
+ aug=True,
74
+ bin_centers_type='softplus',
75
+ bin_embedding_dim=128,
76
+ clip_grad=0.1,
77
+ dataset='nyu',
78
+ depth_anything=True,
79
+ distributed=True,
80
+ do_resize=False,
81
+ force_keep_ar=True,
82
+ freeze_midas_bn=True,
83
+ gpu='NULL',
84
+ img_size=[
85
+ 392,
86
+ 518,
87
+ ],
88
+ inverse_midas=False,
89
+ log_images_every=0.1,
90
+ max_depth=80,
91
+ max_temp=50.0,
92
+ max_translation=100,
93
+ memory_efficient=True,
94
+ midas_model_type='vitb',
95
+ min_depth=0.001,
96
+ min_temp=0.0212,
97
+ model='zoedepth',
98
+ n_attractors=[
99
+ 16,
100
+ 8,
101
+ 4,
102
+ 1,
103
+ ],
104
+ n_bins=64,
105
+ name='ZoeDepth',
106
+ notes='',
107
+ output_distribution='logbinomial',
108
+ prefetch=False,
109
+ pretrained_resource='local::./work_dir/DepthAnything_vitb.pt',
110
+ print_losses=False,
111
+ project='ZoeDepth',
112
+ random_crop=False,
113
+ random_translate=False,
114
+ root='.',
115
+ save_dir='',
116
+ shared_dict='NULL',
117
+ tags='',
118
+ train_midas=True,
119
+ translate_prob=0.2,
120
+ type='DA-ZoeDepth',
121
+ uid='NULL',
122
+ use_amp=False,
123
+ use_pretrained_midas=True,
124
+ use_shared_dict=False,
125
+ validate_every=0.25,
126
+ version_name='v1',
127
+ workers=16),
128
+ fine_branch=dict(
129
+ attractor_alpha=1000,
130
+ attractor_gamma=2,
131
+ attractor_kind='mean',
132
+ attractor_type='inv',
133
+ aug=True,
134
+ bin_centers_type='softplus',
135
+ bin_embedding_dim=128,
136
+ clip_grad=0.1,
137
+ dataset='nyu',
138
+ depth_anything=True,
139
+ distributed=True,
140
+ do_resize=False,
141
+ force_keep_ar=True,
142
+ freeze_midas_bn=True,
143
+ gpu='NULL',
144
+ img_size=[
145
+ 392,
146
+ 518,
147
+ ],
148
+ inverse_midas=False,
149
+ log_images_every=0.1,
150
+ max_depth=80,
151
+ max_temp=50.0,
152
+ max_translation=100,
153
+ memory_efficient=True,
154
+ midas_model_type='vitb',
155
+ min_depth=0.001,
156
+ min_temp=0.0212,
157
+ model='zoedepth',
158
+ n_attractors=[
159
+ 16,
160
+ 8,
161
+ 4,
162
+ 1,
163
+ ],
164
+ n_bins=64,
165
+ name='ZoeDepth',
166
+ notes='',
167
+ output_distribution='logbinomial',
168
+ prefetch=False,
169
+ pretrained_resource='local::./work_dir/DepthAnything_vitb.pt',
170
+ print_losses=False,
171
+ project='ZoeDepth',
172
+ random_crop=False,
173
+ random_translate=False,
174
+ root='.',
175
+ save_dir='',
176
+ shared_dict='NULL',
177
+ tags='',
178
+ train_midas=True,
179
+ translate_prob=0.2,
180
+ type='DA-ZoeDepth',
181
+ uid='NULL',
182
+ use_amp=False,
183
+ use_pretrained_midas=True,
184
+ use_shared_dict=False,
185
+ validate_every=0.25,
186
+ version_name='v1',
187
+ workers=16),
188
+ max_depth=80,
189
+ min_depth=0.001,
190
+ patch_process_shape=(
191
+ 392,
192
+ 518,
193
+ ),
194
+ sigloss=dict(type='SILogLoss'),
195
+ target='fine',
196
+ type='BaselinePretrain')
197
+ optim_wrapper = dict(
198
+ clip_grad=dict(max_norm=0.1, norm_type=2, type='norm'),
199
+ optimizer=dict(lr=4e-06, type='AdamW', weight_decay=0.01),
200
+ paramwise_cfg=dict(bypass_duplicate=True, custom_keys=dict()))
201
+ param_scheduler = dict(
202
+ base_momentum=0.85,
203
+ cycle_momentum=True,
204
+ div_factor=1,
205
+ final_div_factor=10000,
206
+ max_momentum=0.95,
207
+ pct_start=0.5,
208
+ three_phase=False)
209
+ project = 'patchfusion'
210
+ tags = [
211
+ 'fine',
212
+ 'da',
213
+ 'vitb',
214
+ ]
215
+ test_in_dataloader = dict(
216
+ batch_size=1,
217
+ dataset=dict(
218
+ data_root='./data/u4k',
219
+ max_depth=80,
220
+ min_depth=0.001,
221
+ mode='infer',
222
+ split='./data/u4k/splits/test.txt',
223
+ transform_cfg=dict(network_process_size=[
224
+ 384,
225
+ 512,
226
+ ]),
227
+ type='UnrealStereo4kDataset'),
228
+ num_workers=2)
229
+ test_out_dataloader = dict(
230
+ batch_size=1,
231
+ dataset=dict(
232
+ data_root='./data/u4k',
233
+ max_depth=80,
234
+ min_depth=0.001,
235
+ mode='infer',
236
+ split='./data/u4k/splits/test_out.txt',
237
+ transform_cfg=dict(network_process_size=[
238
+ 384,
239
+ 512,
240
+ ]),
241
+ type='UnrealStereo4kDataset'),
242
+ num_workers=2)
243
+ train_cfg = dict(
244
+ eval_start=0,
245
+ log_interval=100,
246
+ max_epochs=24,
247
+ save_checkpoint_interval=24,
248
+ train_log_img_interval=500,
249
+ val_interval=2,
250
+ val_log_img_interval=50,
251
+ val_type='epoch_base')
252
+ train_dataloader = dict(
253
+ batch_size=4,
254
+ dataset=dict(
255
+ data_root='./data/u4k',
256
+ max_depth=80,
257
+ min_depth=0.001,
258
+ mode='train',
259
+ resize_mode='depth-anything',
260
+ split='./data/u4k/splits/train.txt',
261
+ transform_cfg=dict(
262
+ degree=1.0, network_process_size=[
263
+ 392,
264
+ 518,
265
+ ], random_crop=True),
266
+ type='UnrealStereo4kDataset'),
267
+ num_workers=4)
268
+ val_dataloader = dict(
269
+ batch_size=1,
270
+ dataset=dict(
271
+ data_root='./data/u4k',
272
+ max_depth=80,
273
+ min_depth=0.001,
274
+ mode='infer',
275
+ resize_mode='depth-anything',
276
+ split='./data/u4k/splits/val.txt',
277
+ transform_cfg=dict(degree=1.0, network_process_size=[
278
+ 392,
279
+ 518,
280
+ ]),
281
+ type='UnrealStereo4kDataset'),
282
+ num_workers=2)
283
+ work_dir = './work_dir/depthanything_vitb_u4k/fine_pretrain'
284
+ zoe_depth_config = dict(
285
+ attractor_alpha=1000,
286
+ attractor_gamma=2,
287
+ attractor_kind='mean',
288
+ attractor_type='inv',
289
+ aug=True,
290
+ bin_centers_type='softplus',
291
+ bin_embedding_dim=128,
292
+ clip_grad=0.1,
293
+ dataset='nyu',
294
+ depth_anything=True,
295
+ distributed=True,
296
+ do_resize=False,
297
+ force_keep_ar=True,
298
+ freeze_midas_bn=True,
299
+ gpu='NULL',
300
+ img_size=[
301
+ 392,
302
+ 518,
303
+ ],
304
+ inverse_midas=False,
305
+ log_images_every=0.1,
306
+ max_depth=80,
307
+ max_temp=50.0,
308
+ max_translation=100,
309
+ memory_efficient=True,
310
+ midas_model_type='vitb',
311
+ min_depth=0.001,
312
+ min_temp=0.0212,
313
+ model='zoedepth',
314
+ n_attractors=[
315
+ 16,
316
+ 8,
317
+ 4,
318
+ 1,
319
+ ],
320
+ n_bins=64,
321
+ name='ZoeDepth',
322
+ notes='',
323
+ output_distribution='logbinomial',
324
+ prefetch=False,
325
+ pretrained_resource='local::./work_dir/DepthAnything_vitb.pt',
326
+ print_losses=False,
327
+ project='ZoeDepth',
328
+ random_crop=False,
329
+ random_translate=False,
330
+ root='.',
331
+ save_dir='',
332
+ shared_dict='NULL',
333
+ tags='',
334
+ train_midas=True,
335
+ translate_prob=0.2,
336
+ type='DA-ZoeDepth',
337
+ uid='NULL',
338
+ use_amp=False,
339
+ use_pretrained_midas=True,
340
+ use_shared_dict=False,
341
+ validate_every=0.25,
342
+ version_name='v1',
343
+ workers=16)
344
+
345
+ 2024/03/15 15:30:45 - patchstitcher - INFO - Loading deepnet from local::./work_dir/DepthAnything_vitb.pt
346
+ 2024/03/15 15:30:45 - patchstitcher - INFO - Current zoedepth.core.prep.resizer is <class 'torch.nn.modules.linear.Identity'>
347
+ 2024/03/15 15:30:45 - patchstitcher - INFO - DistributedDataParallel(
348
+ (module): BaselinePretrain(
349
+ (fine_branch): ZoeDepth(
350
+ (core): DepthAnythingCore(
351
+ (core): DPT_DINOv2(
352
+ (pretrained): DinoVisionTransformer(
353
+ (patch_embed): PatchEmbed(
354
+ (proj): Conv2d(3, 768, kernel_size=(14, 14), stride=(14, 14))
355
+ (norm): Identity()
356
+ )
357
+ (blocks): ModuleList(
358
+ (0-11): 12 x NestedTensorBlock(
359
+ (norm1): LayerNorm((768,), eps=1e-06, elementwise_affine=True)
360
+ (attn): MemEffAttention(
361
+ (qkv): Linear(in_features=768, out_features=2304, bias=True)
362
+ (attn_drop): Dropout(p=0.0, inplace=False)
363
+ (proj): Linear(in_features=768, out_features=768, bias=True)
364
+ (proj_drop): Dropout(p=0.0, inplace=False)
365
+ )
366
+ (ls1): LayerScale()
367
+ (drop_path1): Identity()
368
+ (norm2): LayerNorm((768,), eps=1e-06, elementwise_affine=True)
369
+ (mlp): Mlp(
370
+ (fc1): Linear(in_features=768, out_features=3072, bias=True)
371
+ (act): GELU(approximate='none')
372
+ (fc2): Linear(in_features=3072, out_features=768, bias=True)
373
+ (drop): Dropout(p=0.0, inplace=False)
374
+ )
375
+ (ls2): LayerScale()
376
+ (drop_path2): Identity()
377
+ )
378
+ )
379
+ (norm): LayerNorm((768,), eps=1e-06, elementwise_affine=True)
380
+ (head): Identity()
381
+ )
382
+ (depth_head): DPTHead(
383
+ (projects): ModuleList(
384
+ (0): Conv2d(768, 96, kernel_size=(1, 1), stride=(1, 1))
385
+ (1): Conv2d(768, 192, kernel_size=(1, 1), stride=(1, 1))
386
+ (2): Conv2d(768, 384, kernel_size=(1, 1), stride=(1, 1))
387
+ (3): Conv2d(768, 768, kernel_size=(1, 1), stride=(1, 1))
388
+ )
389
+ (resize_layers): ModuleList(
390
+ (0): ConvTranspose2d(96, 96, kernel_size=(4, 4), stride=(4, 4))
391
+ (1): ConvTranspose2d(192, 192, kernel_size=(2, 2), stride=(2, 2))
392
+ (2): Identity()
393
+ (3): Conv2d(768, 768, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1))
394
+ )
395
+ (scratch): Module(
396
+ (layer1_rn): Conv2d(96, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
397
+ (layer2_rn): Conv2d(192, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
398
+ (layer3_rn): Conv2d(384, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
399
+ (layer4_rn): Conv2d(768, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
400
+ (refinenet1): FeatureFusionBlock(
401
+ (out_conv): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1))
402
+ (resConfUnit1): ResidualConvUnit(
403
+ (conv1): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
404
+ (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
405
+ (activation): ReLU()
406
+ (skip_add): FloatFunctional(
407
+ (activation_post_process): Identity()
408
+ )
409
+ )
410
+ (resConfUnit2): ResidualConvUnit(
411
+ (conv1): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
412
+ (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
413
+ (activation): ReLU()
414
+ (skip_add): FloatFunctional(
415
+ (activation_post_process): Identity()
416
+ )
417
+ )
418
+ (skip_add): FloatFunctional(
419
+ (activation_post_process): Identity()
420
+ )
421
+ )
422
+ (refinenet2): FeatureFusionBlock(
423
+ (out_conv): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1))
424
+ (resConfUnit1): ResidualConvUnit(
425
+ (conv1): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
426
+ (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
427
+ (activation): ReLU()
428
+ (skip_add): FloatFunctional(
429
+ (activation_post_process): Identity()
430
+ )
431
+ )
432
+ (resConfUnit2): ResidualConvUnit(
433
+ (conv1): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
434
+ (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
435
+ (activation): ReLU()
436
+ (skip_add): FloatFunctional(
437
+ (activation_post_process): Identity()
438
+ )
439
+ )
440
+ (skip_add): FloatFunctional(
441
+ (activation_post_process): Identity()
442
+ )
443
+ )
444
+ (refinenet3): FeatureFusionBlock(
445
+ (out_conv): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1))
446
+ (resConfUnit1): ResidualConvUnit(
447
+ (conv1): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
448
+ (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
449
+ (activation): ReLU()
450
+ (skip_add): FloatFunctional(
451
+ (activation_post_process): Identity()
452
+ )
453
+ )
454
+ (resConfUnit2): ResidualConvUnit(
455
+ (conv1): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
456
+ (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
457
+ (activation): ReLU()
458
+ (skip_add): FloatFunctional(
459
+ (activation_post_process): Identity()
460
+ )
461
+ )
462
+ (skip_add): FloatFunctional(
463
+ (activation_post_process): Identity()
464
+ )
465
+ )
466
+ (refinenet4): FeatureFusionBlock(
467
+ (out_conv): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1))
468
+ (resConfUnit1): ResidualConvUnit(
469
+ (conv1): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
470
+ (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
471
+ (activation): ReLU()
472
+ (skip_add): FloatFunctional(
473
+ (activation_post_process): Identity()
474
+ )
475
+ )
476
+ (resConfUnit2): ResidualConvUnit(
477
+ (conv1): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
478
+ (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
479
+ (activation): ReLU()
480
+ (skip_add): FloatFunctional(
481
+ (activation_post_process): Identity()
482
+ )
483
+ )
484
+ (skip_add): FloatFunctional(
485
+ (activation_post_process): Identity()
486
+ )
487
+ )
488
+ (output_conv1): Conv2d(128, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
489
+ (output_conv2): Sequential(
490
+ (0): Conv2d(64, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
491
+ (1): ReLU(inplace=True)
492
+ (2): Conv2d(32, 1, kernel_size=(1, 1), stride=(1, 1))
493
+ (3): ReLU(inplace=True)
494
+ (4): Identity()
495
+ )
496
+ )
497
+ )
498
+ )
499
+ )
500
+ (conv2): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1))
501
+ (seed_bin_regressor): SeedBinRegressorUnnormed(
502
+ (_net): Sequential(
503
+ (0): Conv2d(128, 256, kernel_size=(1, 1), stride=(1, 1))
504
+ (1): ReLU(inplace=True)
505
+ (2): Conv2d(256, 64, kernel_size=(1, 1), stride=(1, 1))
506
+ (3): Softplus(beta=1, threshold=20)
507
+ )
508
+ )
509
+ (seed_projector): Projector(
510
+ (_net): Sequential(
511
+ (0): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1))
512
+ (1): ReLU(inplace=True)
513
+ (2): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1))
514
+ )
515
+ )
516
+ (projectors): ModuleList(
517
+ (0-3): 4 x Projector(
518
+ (_net): Sequential(
519
+ (0): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1))
520
+ (1): ReLU(inplace=True)
521
+ (2): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1))
522
+ )
523
+ )
524
+ )
525
+ (attractors): ModuleList(
526
+ (0): AttractorLayerUnnormed(
527
+ (_net): Sequential(
528
+ (0): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1))
529
+ (1): ReLU(inplace=True)
530
+ (2): Conv2d(128, 16, kernel_size=(1, 1), stride=(1, 1))
531
+ (3): Softplus(beta=1, threshold=20)
532
+ )
533
+ )
534
+ (1): AttractorLayerUnnormed(
535
+ (_net): Sequential(
536
+ (0): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1))
537
+ (1): ReLU(inplace=True)
538
+ (2): Conv2d(128, 8, kernel_size=(1, 1), stride=(1, 1))
539
+ (3): Softplus(beta=1, threshold=20)
540
+ )
541
+ )
542
+ (2): AttractorLayerUnnormed(
543
+ (_net): Sequential(
544
+ (0): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1))
545
+ (1): ReLU(inplace=True)
546
+ (2): Conv2d(128, 4, kernel_size=(1, 1), stride=(1, 1))
547
+ (3): Softplus(beta=1, threshold=20)
548
+ )
549
+ )
550
+ (3): AttractorLayerUnnormed(
551
+ (_net): Sequential(
552
+ (0): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1))
553
+ (1): ReLU(inplace=True)
554
+ (2): Conv2d(128, 1, kernel_size=(1, 1), stride=(1, 1))
555
+ (3): Softplus(beta=1, threshold=20)
556
+ )
557
+ )
558
+ )
559
+ (conditional_log_binomial): ConditionalLogBinomial(
560
+ (log_binomial_transform): LogBinomial()
561
+ (mlp): Sequential(
562
+ (0): Conv2d(161, 80, kernel_size=(1, 1), stride=(1, 1))
563
+ (1): GELU(approximate='none')
564
+ (2): Conv2d(80, 4, kernel_size=(1, 1), stride=(1, 1))
565
+ (3): Softplus(beta=1, threshold=20)
566
+ )
567
+ )
568
+ )
569
+ (sigloss): SILogLoss()
570
+ )
571
+ )
572
+ 2024/03/15 15:30:51 - patchstitcher - INFO - successfully init trainer
573
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.cls_token
574
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.pos_embed
575
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.mask_token
576
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.patch_embed.proj.weight
577
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.patch_embed.proj.bias
578
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.0.norm1.weight
579
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.0.norm1.bias
580
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.0.attn.qkv.weight
581
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.0.attn.qkv.bias
582
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.0.attn.proj.weight
583
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.0.attn.proj.bias
584
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.0.ls1.gamma
585
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.0.norm2.weight
586
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.0.norm2.bias
587
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.0.mlp.fc1.weight
588
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.0.mlp.fc1.bias
589
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.0.mlp.fc2.weight
590
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.0.mlp.fc2.bias
591
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.0.ls2.gamma
592
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.1.norm1.weight
593
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.1.norm1.bias
594
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.1.attn.qkv.weight
595
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.1.attn.qkv.bias
596
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.1.attn.proj.weight
597
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.1.attn.proj.bias
598
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.1.ls1.gamma
599
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.1.norm2.weight
600
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.1.norm2.bias
601
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.1.mlp.fc1.weight
602
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.1.mlp.fc1.bias
603
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.1.mlp.fc2.weight
604
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.1.mlp.fc2.bias
605
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.1.ls2.gamma
606
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.2.norm1.weight
607
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.2.norm1.bias
608
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.2.attn.qkv.weight
609
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.2.attn.qkv.bias
610
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.2.attn.proj.weight
611
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.2.attn.proj.bias
612
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.2.ls1.gamma
613
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.2.norm2.weight
614
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.2.norm2.bias
615
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.2.mlp.fc1.weight
616
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.2.mlp.fc1.bias
617
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.2.mlp.fc2.weight
618
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.2.mlp.fc2.bias
619
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.2.ls2.gamma
620
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.3.norm1.weight
621
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.3.norm1.bias
622
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.3.attn.qkv.weight
623
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.3.attn.qkv.bias
624
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.3.attn.proj.weight
625
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.3.attn.proj.bias
626
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.3.ls1.gamma
627
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.3.norm2.weight
628
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.3.norm2.bias
629
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.3.mlp.fc1.weight
630
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.3.mlp.fc1.bias
631
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.3.mlp.fc2.weight
632
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.3.mlp.fc2.bias
633
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.3.ls2.gamma
634
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.4.norm1.weight
635
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.4.norm1.bias
636
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.4.attn.qkv.weight
637
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.4.attn.qkv.bias
638
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.4.attn.proj.weight
639
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.4.attn.proj.bias
640
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.4.ls1.gamma
641
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.4.norm2.weight
642
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.4.norm2.bias
643
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.4.mlp.fc1.weight
644
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.4.mlp.fc1.bias
645
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.4.mlp.fc2.weight
646
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.4.mlp.fc2.bias
647
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.4.ls2.gamma
648
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.5.norm1.weight
649
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.5.norm1.bias
650
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.5.attn.qkv.weight
651
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.5.attn.qkv.bias
652
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.5.attn.proj.weight
653
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.5.attn.proj.bias
654
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.5.ls1.gamma
655
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.5.norm2.weight
656
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.5.norm2.bias
657
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.5.mlp.fc1.weight
658
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.5.mlp.fc1.bias
659
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.5.mlp.fc2.weight
660
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.5.mlp.fc2.bias
661
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.5.ls2.gamma
662
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.6.norm1.weight
663
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.6.norm1.bias
664
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.6.attn.qkv.weight
665
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.6.attn.qkv.bias
666
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.6.attn.proj.weight
667
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.6.attn.proj.bias
668
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.6.ls1.gamma
669
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.6.norm2.weight
670
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.6.norm2.bias
671
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.6.mlp.fc1.weight
672
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.6.mlp.fc1.bias
673
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.6.mlp.fc2.weight
674
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.6.mlp.fc2.bias
675
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.6.ls2.gamma
676
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.7.norm1.weight
677
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.7.norm1.bias
678
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.7.attn.qkv.weight
679
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.7.attn.qkv.bias
680
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.7.attn.proj.weight
681
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.7.attn.proj.bias
682
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.7.ls1.gamma
683
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.7.norm2.weight
684
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.7.norm2.bias
685
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.7.mlp.fc1.weight
686
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.7.mlp.fc1.bias
687
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.7.mlp.fc2.weight
688
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.7.mlp.fc2.bias
689
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.7.ls2.gamma
690
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.8.norm1.weight
691
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.8.norm1.bias
692
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.8.attn.qkv.weight
693
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.8.attn.qkv.bias
694
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.8.attn.proj.weight
695
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.8.attn.proj.bias
696
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.8.ls1.gamma
697
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.8.norm2.weight
698
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.8.norm2.bias
699
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.8.mlp.fc1.weight
700
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.8.mlp.fc1.bias
701
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.8.mlp.fc2.weight
702
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.8.mlp.fc2.bias
703
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.8.ls2.gamma
704
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.9.norm1.weight
705
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.9.norm1.bias
706
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.9.attn.qkv.weight
707
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.9.attn.qkv.bias
708
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.9.attn.proj.weight
709
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.9.attn.proj.bias
710
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.9.ls1.gamma
711
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.9.norm2.weight
712
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.9.norm2.bias
713
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.9.mlp.fc1.weight
714
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.9.mlp.fc1.bias
715
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.9.mlp.fc2.weight
716
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.9.mlp.fc2.bias
717
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.9.ls2.gamma
718
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.10.norm1.weight
719
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.10.norm1.bias
720
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.10.attn.qkv.weight
721
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.10.attn.qkv.bias
722
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.10.attn.proj.weight
723
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.10.attn.proj.bias
724
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.10.ls1.gamma
725
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.10.norm2.weight
726
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.10.norm2.bias
727
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.10.mlp.fc1.weight
728
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.10.mlp.fc1.bias
729
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.10.mlp.fc2.weight
730
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.10.mlp.fc2.bias
731
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.10.ls2.gamma
732
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.11.norm1.weight
733
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.11.norm1.bias
734
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.11.attn.qkv.weight
735
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.11.attn.qkv.bias
736
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.11.attn.proj.weight
737
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.11.attn.proj.bias
738
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.11.ls1.gamma
739
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.11.norm2.weight
740
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.11.norm2.bias
741
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.11.mlp.fc1.weight
742
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.11.mlp.fc1.bias
743
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.11.mlp.fc2.weight
744
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.11.mlp.fc2.bias
745
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.11.ls2.gamma
746
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.norm.weight
747
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.norm.bias
748
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.projects.0.weight
749
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.projects.0.bias
750
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.projects.1.weight
751
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.projects.1.bias
752
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.projects.2.weight
753
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.projects.2.bias
754
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.projects.3.weight
755
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.projects.3.bias
756
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.resize_layers.0.weight
757
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.resize_layers.0.bias
758
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.resize_layers.1.weight
759
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.resize_layers.1.bias
760
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.resize_layers.3.weight
761
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.resize_layers.3.bias
762
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.layer1_rn.weight
763
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.layer2_rn.weight
764
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.layer3_rn.weight
765
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.layer4_rn.weight
766
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet1.out_conv.weight
767
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet1.out_conv.bias
768
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet1.resConfUnit1.conv1.weight
769
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet1.resConfUnit1.conv1.bias
770
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet1.resConfUnit1.conv2.weight
771
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet1.resConfUnit1.conv2.bias
772
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet1.resConfUnit2.conv1.weight
773
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet1.resConfUnit2.conv1.bias
774
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet1.resConfUnit2.conv2.weight
775
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet1.resConfUnit2.conv2.bias
776
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet2.out_conv.weight
777
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet2.out_conv.bias
778
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet2.resConfUnit1.conv1.weight
779
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet2.resConfUnit1.conv1.bias
780
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet2.resConfUnit1.conv2.weight
781
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet2.resConfUnit1.conv2.bias
782
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet2.resConfUnit2.conv1.weight
783
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet2.resConfUnit2.conv1.bias
784
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet2.resConfUnit2.conv2.weight
785
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet2.resConfUnit2.conv2.bias
786
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet3.out_conv.weight
787
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet3.out_conv.bias
788
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet3.resConfUnit1.conv1.weight
789
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet3.resConfUnit1.conv1.bias
790
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet3.resConfUnit1.conv2.weight
791
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet3.resConfUnit1.conv2.bias
792
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet3.resConfUnit2.conv1.weight
793
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet3.resConfUnit2.conv1.bias
794
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet3.resConfUnit2.conv2.weight
795
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet3.resConfUnit2.conv2.bias
796
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet4.out_conv.weight
797
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet4.out_conv.bias
798
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet4.resConfUnit1.conv1.weight
799
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet4.resConfUnit1.conv1.bias
800
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet4.resConfUnit1.conv2.weight
801
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet4.resConfUnit1.conv2.bias
802
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet4.resConfUnit2.conv1.weight
803
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet4.resConfUnit2.conv1.bias
804
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet4.resConfUnit2.conv2.weight
805
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet4.resConfUnit2.conv2.bias
806
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.output_conv1.weight
807
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.output_conv1.bias
808
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.output_conv2.0.weight
809
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.output_conv2.0.bias
810
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.output_conv2.2.weight
811
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.output_conv2.2.bias
812
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.conv2.weight
813
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.conv2.bias
814
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.seed_bin_regressor._net.0.weight
815
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.seed_bin_regressor._net.0.bias
816
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.seed_bin_regressor._net.2.weight
817
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.seed_bin_regressor._net.2.bias
818
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.seed_projector._net.0.weight
819
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.seed_projector._net.0.bias
820
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.seed_projector._net.2.weight
821
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.seed_projector._net.2.bias
822
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.projectors.0._net.0.weight
823
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.projectors.0._net.0.bias
824
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.projectors.0._net.2.weight
825
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.projectors.0._net.2.bias
826
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.projectors.1._net.0.weight
827
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.projectors.1._net.0.bias
828
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.projectors.1._net.2.weight
829
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.projectors.1._net.2.bias
830
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.projectors.2._net.0.weight
831
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.projectors.2._net.0.bias
832
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.projectors.2._net.2.weight
833
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.projectors.2._net.2.bias
834
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.projectors.3._net.0.weight
835
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.projectors.3._net.0.bias
836
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.projectors.3._net.2.weight
837
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.projectors.3._net.2.bias
838
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.attractors.0._net.0.weight
839
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.attractors.0._net.0.bias
840
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.attractors.0._net.2.weight
841
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.attractors.0._net.2.bias
842
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.attractors.1._net.0.weight
843
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.attractors.1._net.0.bias
844
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.attractors.1._net.2.weight
845
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.attractors.1._net.2.bias
846
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.attractors.2._net.0.weight
847
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.attractors.2._net.0.bias
848
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.attractors.2._net.2.weight
849
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.attractors.2._net.2.bias
850
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.attractors.3._net.0.weight
851
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.attractors.3._net.0.bias
852
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.attractors.3._net.2.weight
853
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.attractors.3._net.2.bias
854
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.conditional_log_binomial.mlp.0.weight
855
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.conditional_log_binomial.mlp.0.bias
856
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.conditional_log_binomial.mlp.2.weight
857
+ 2024/03/15 15:30:51 - patchstitcher - INFO - training param: module.fine_branch.conditional_log_binomial.mlp.2.bias
858
+ 2024/03/15 15:33:25 - patchstitcher - INFO - Epoch: [01/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 2.288588523864746 - fine_loss: 2.288588523864746
859
+ 2024/03/15 15:35:13 - patchstitcher - INFO - Epoch: [01/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 1.749260425567627 - fine_loss: 1.749260425567627
860
+ 2024/03/15 15:36:58 - patchstitcher - INFO - Epoch: [01/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 2.603142499923706 - fine_loss: 2.603142499923706
861
+ 2024/03/15 15:38:59 - patchstitcher - INFO - Epoch: [01/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 3.0235860347747803 - fine_loss: 3.0235860347747803
862
+ 2024/03/15 15:42:38 - patchstitcher - INFO - Epoch: [02/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 2.2628891468048096 - fine_loss: 2.2628891468048096
863
+ 2024/03/15 15:44:44 - patchstitcher - INFO - Epoch: [02/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 2.2125635147094727 - fine_loss: 2.2125635147094727
864
+ 2024/03/15 15:46:44 - patchstitcher - INFO - Epoch: [02/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 1.884977102279663 - fine_loss: 1.884977102279663
865
+ 2024/03/15 15:48:46 - patchstitcher - INFO - Epoch: [02/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 3.667808771133423 - fine_loss: 3.667808771133423
866
+ 2024/03/15 15:50:43 - patchstitcher - INFO - Evaluation Summary:
867
+ +-----------+-----------+-----------+-----------+----------+-----------+-----------+------------+-----------+-----------+
868
+ | a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see |
869
+ +-----------+-----------+-----------+-----------+----------+-----------+-----------+------------+-----------+-----------+
870
+ | 0.7653929 | 0.9569647 | 0.9891034 | 0.1631364 | 2.063872 | 0.0675193 | 0.2015772 | 17.5721867 | 0.3284417 | 1.5396647 |
871
+ +-----------+-----------+-----------+-----------+----------+-----------+-----------+------------+-----------+-----------+
872
+ 2024/03/15 15:52:52 - patchstitcher - INFO - Epoch: [03/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 1.9002115726470947 - fine_loss: 1.9002115726470947
873
+ 2024/03/15 15:54:51 - patchstitcher - INFO - Epoch: [03/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 1.533200979232788 - fine_loss: 1.533200979232788
874
+ 2024/03/15 15:56:53 - patchstitcher - INFO - Epoch: [03/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 1.3708069324493408 - fine_loss: 1.3708069324493408
875
+ 2024/03/15 15:58:56 - patchstitcher - INFO - Epoch: [03/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 1.3536834716796875 - fine_loss: 1.3536834716796875
876
+ 2024/03/15 16:02:35 - patchstitcher - INFO - Epoch: [04/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 1.4067535400390625 - fine_loss: 1.4067535400390625
877
+ 2024/03/15 16:04:38 - patchstitcher - INFO - Epoch: [04/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 1.571197509765625 - fine_loss: 1.571197509765625
878
+ 2024/03/15 16:06:40 - patchstitcher - INFO - Epoch: [04/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 2.9749035835266113 - fine_loss: 2.9749035835266113
879
+ 2024/03/15 16:08:48 - patchstitcher - INFO - Epoch: [04/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.893333911895752 - fine_loss: 0.893333911895752
880
+ 2024/03/15 16:10:40 - patchstitcher - INFO - Evaluation Summary:
881
+ +-----------+-----------+----------+-----------+-----------+-----------+----------+------------+-----------+-----------+
882
+ | a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see |
883
+ +-----------+-----------+----------+-----------+-----------+-----------+----------+------------+-----------+-----------+
884
+ | 0.8439181 | 0.9733375 | 0.992747 | 0.1316369 | 1.8230734 | 0.0558847 | 0.171333 | 15.4284363 | 0.2575101 | 1.3799866 |
885
+ +-----------+-----------+----------+-----------+-----------+-----------+----------+------------+-----------+-----------+
886
+ 2024/03/15 16:12:51 - patchstitcher - INFO - Epoch: [05/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 1.5204694271087646 - fine_loss: 1.5204694271087646
887
+ 2024/03/15 16:14:53 - patchstitcher - INFO - Epoch: [05/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 1.0538222789764404 - fine_loss: 1.0538222789764404
888
+ 2024/03/15 16:17:00 - patchstitcher - INFO - Epoch: [05/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 1.246050477027893 - fine_loss: 1.246050477027893
889
+ 2024/03/15 16:19:04 - patchstitcher - INFO - Epoch: [05/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 1.4139764308929443 - fine_loss: 1.4139764308929443
890
+ 2024/03/15 16:22:40 - patchstitcher - INFO - Epoch: [06/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 1.5990095138549805 - fine_loss: 1.5990095138549805
891
+ 2024/03/15 16:24:45 - patchstitcher - INFO - Epoch: [06/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 1.4719877243041992 - fine_loss: 1.4719877243041992
892
+ 2024/03/15 16:26:49 - patchstitcher - INFO - Epoch: [06/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.998321533203125 - fine_loss: 0.998321533203125
893
+ 2024/03/15 16:28:52 - patchstitcher - INFO - Epoch: [06/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 1.2637615203857422 - fine_loss: 1.2637615203857422
894
+ 2024/03/15 16:30:46 - patchstitcher - INFO - Evaluation Summary:
895
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+------------+-----------+-----------+
896
+ | a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see |
897
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+------------+-----------+-----------+
898
+ | 0.8831826 | 0.9846013 | 0.9953048 | 0.1145366 | 1.6448599 | 0.0488564 | 0.1510406 | 14.0402038 | 0.2199031 | 1.3085128 |
899
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+------------+-----------+-----------+
900
+ 2024/03/15 16:32:53 - patchstitcher - INFO - Epoch: [07/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 1.65132737159729 - fine_loss: 1.65132737159729
901
+ 2024/03/15 16:34:56 - patchstitcher - INFO - Epoch: [07/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 1.4322144985198975 - fine_loss: 1.4322144985198975
902
+ 2024/03/15 16:37:04 - patchstitcher - INFO - Epoch: [07/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 1.034339427947998 - fine_loss: 1.034339427947998
903
+ 2024/03/15 16:39:08 - patchstitcher - INFO - Epoch: [07/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 1.0732086896896362 - fine_loss: 1.0732086896896362
904
+ 2024/03/15 16:42:43 - patchstitcher - INFO - Epoch: [08/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 1.3489086627960205 - fine_loss: 1.3489086627960205
905
+ 2024/03/15 16:44:47 - patchstitcher - INFO - Epoch: [08/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 1.4356486797332764 - fine_loss: 1.4356486797332764
906
+ 2024/03/15 16:46:50 - patchstitcher - INFO - Epoch: [08/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.6865524649620056 - fine_loss: 0.6865524649620056
907
+ 2024/03/15 16:48:50 - patchstitcher - INFO - Epoch: [08/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 1.4590085744857788 - fine_loss: 1.4590085744857788
908
+ 2024/03/15 16:50:41 - patchstitcher - INFO - Evaluation Summary:
909
+ +-----------+-----------+-----------+-----------+-----------+-----------+----------+------------+-----------+-----------+
910
+ | a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see |
911
+ +-----------+-----------+-----------+-----------+-----------+-----------+----------+------------+-----------+-----------+
912
+ | 0.8921932 | 0.9874671 | 0.9972081 | 0.1083586 | 1.6257898 | 0.0457595 | 0.142043 | 12.7745355 | 0.2076856 | 1.2743567 |
913
+ +-----------+-----------+-----------+-----------+-----------+-----------+----------+------------+-----------+-----------+
914
+ 2024/03/15 16:52:44 - patchstitcher - INFO - Epoch: [09/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 1.008254885673523 - fine_loss: 1.008254885673523
915
+ 2024/03/15 16:54:54 - patchstitcher - INFO - Epoch: [09/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.8210620880126953 - fine_loss: 0.8210620880126953
916
+ 2024/03/15 16:56:55 - patchstitcher - INFO - Epoch: [09/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 1.8681334257125854 - fine_loss: 1.8681334257125854
917
+ 2024/03/15 16:58:59 - patchstitcher - INFO - Epoch: [09/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.9568914771080017 - fine_loss: 0.9568914771080017
918
+ 2024/03/15 17:02:34 - patchstitcher - INFO - Epoch: [10/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 1.5452194213867188 - fine_loss: 1.5452194213867188
919
+ 2024/03/15 17:04:40 - patchstitcher - INFO - Epoch: [10/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.9237810373306274 - fine_loss: 0.9237810373306274
920
+ 2024/03/15 17:06:43 - patchstitcher - INFO - Epoch: [10/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 1.4192367792129517 - fine_loss: 1.4192367792129517
921
+ 2024/03/15 17:08:47 - patchstitcher - INFO - Epoch: [10/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 1.1616711616516113 - fine_loss: 1.1616711616516113
922
+ 2024/03/15 17:10:40 - patchstitcher - INFO - Evaluation Summary:
923
+ +-----------+-----------+----------+-----------+----------+-----------+-----------+------------+-----------+-----------+
924
+ | a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see |
925
+ +-----------+-----------+----------+-----------+----------+-----------+-----------+------------+-----------+-----------+
926
+ | 0.9095374 | 0.9878494 | 0.996491 | 0.1000458 | 1.529536 | 0.0445519 | 0.1377915 | 12.2980782 | 0.1741764 | 1.1720957 |
927
+ +-----------+-----------+----------+-----------+----------+-----------+-----------+------------+-----------+-----------+
928
+ 2024/03/15 17:12:48 - patchstitcher - INFO - Epoch: [11/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 1.2545241117477417 - fine_loss: 1.2545241117477417
929
+ 2024/03/15 17:14:52 - patchstitcher - INFO - Epoch: [11/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.9477699398994446 - fine_loss: 0.9477699398994446
930
+ 2024/03/15 17:16:59 - patchstitcher - INFO - Epoch: [11/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 1.3806159496307373 - fine_loss: 1.3806159496307373
931
+ 2024/03/15 17:19:02 - patchstitcher - INFO - Epoch: [11/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 1.12031888961792 - fine_loss: 1.12031888961792
932
+ 2024/03/15 17:22:38 - patchstitcher - INFO - Epoch: [12/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.9633316993713379 - fine_loss: 0.9633316993713379
933
+ 2024/03/15 17:24:38 - patchstitcher - INFO - Epoch: [12/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.9473192691802979 - fine_loss: 0.9473192691802979
934
+ 2024/03/15 17:26:38 - patchstitcher - INFO - Epoch: [12/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.8891739845275879 - fine_loss: 0.8891739845275879
935
+ 2024/03/15 17:28:46 - patchstitcher - INFO - Epoch: [12/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.9305822849273682 - fine_loss: 0.9305822849273682
936
+ 2024/03/15 17:30:43 - patchstitcher - INFO - Evaluation Summary:
937
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+
938
+ | a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see |
939
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+
940
+ | 0.9285209 | 0.9902661 | 0.9963124 | 0.0922186 | 1.4988106 | 0.0394503 | 0.1265562 | 11.929424 | 0.1792194 | 1.2142439 |
941
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+
942
+ 2024/03/15 17:32:52 - patchstitcher - INFO - Epoch: [13/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 1.26497220993042 - fine_loss: 1.26497220993042
943
+ 2024/03/15 17:35:00 - patchstitcher - INFO - Epoch: [13/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 1.580217957496643 - fine_loss: 1.580217957496643
944
+ 2024/03/15 17:36:59 - patchstitcher - INFO - Epoch: [13/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.6395942568778992 - fine_loss: 0.6395942568778992
945
+ 2024/03/15 17:39:02 - patchstitcher - INFO - Epoch: [13/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.32594698667526245 - fine_loss: 0.32594698667526245
946
+ 2024/03/15 17:42:34 - patchstitcher - INFO - Epoch: [14/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.924031674861908 - fine_loss: 0.924031674861908
947
+ 2024/03/15 17:44:36 - patchstitcher - INFO - Epoch: [14/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.985018253326416 - fine_loss: 0.985018253326416
948
+ 2024/03/15 17:46:38 - patchstitcher - INFO - Epoch: [14/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 1.0442320108413696 - fine_loss: 1.0442320108413696
949
+ 2024/03/15 17:48:43 - patchstitcher - INFO - Epoch: [14/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.5068702101707458 - fine_loss: 0.5068702101707458
950
+ 2024/03/15 17:50:33 - patchstitcher - INFO - Evaluation Summary:
951
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+------------+-----------+-----------+
952
+ | a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see |
953
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+------------+-----------+-----------+
954
+ | 0.9381619 | 0.9895476 | 0.9972216 | 0.0913334 | 1.5578288 | 0.0391697 | 0.1243245 | 11.1463653 | 0.1706981 | 1.1217431 |
955
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+------------+-----------+-----------+
956
+ 2024/03/15 17:52:46 - patchstitcher - INFO - Epoch: [15/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 1.1108862161636353 - fine_loss: 1.1108862161636353
957
+ 2024/03/15 17:54:52 - patchstitcher - INFO - Epoch: [15/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.9237959980964661 - fine_loss: 0.9237959980964661
958
+ 2024/03/15 17:56:56 - patchstitcher - INFO - Epoch: [15/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 1.5644421577453613 - fine_loss: 1.5644421577453613
959
+ 2024/03/15 17:58:54 - patchstitcher - INFO - Epoch: [15/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.7902756929397583 - fine_loss: 0.7902756929397583
960
+ 2024/03/15 18:02:26 - patchstitcher - INFO - Epoch: [16/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.966326117515564 - fine_loss: 0.966326117515564
961
+ 2024/03/15 18:04:32 - patchstitcher - INFO - Epoch: [16/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.9776898622512817 - fine_loss: 0.9776898622512817
962
+ 2024/03/15 18:06:33 - patchstitcher - INFO - Epoch: [16/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.6681317090988159 - fine_loss: 0.6681317090988159
963
+ 2024/03/15 18:08:34 - patchstitcher - INFO - Epoch: [16/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.80037522315979 - fine_loss: 0.80037522315979
964
+ 2024/03/15 18:10:20 - patchstitcher - INFO - Evaluation Summary:
965
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+------------+-----------+-----------+
966
+ | a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see |
967
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+------------+-----------+-----------+
968
+ | 0.9538666 | 0.9917138 | 0.9972104 | 0.0811061 | 1.3823568 | 0.0351258 | 0.1140013 | 10.5376763 | 0.1382621 | 1.0577048 |
969
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+------------+-----------+-----------+
970
+ 2024/03/15 18:12:28 - patchstitcher - INFO - Epoch: [17/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 1.147787094116211 - fine_loss: 1.147787094116211
971
+ 2024/03/15 18:14:30 - patchstitcher - INFO - Epoch: [17/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.7300316691398621 - fine_loss: 0.7300316691398621
972
+ 2024/03/15 18:16:37 - patchstitcher - INFO - Epoch: [17/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.7750428318977356 - fine_loss: 0.7750428318977356
973
+ 2024/03/15 18:18:37 - patchstitcher - INFO - Epoch: [17/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 1.50600266456604 - fine_loss: 1.50600266456604
974
+ 2024/03/15 18:22:20 - patchstitcher - INFO - Epoch: [18/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.8911293745040894 - fine_loss: 0.8911293745040894
975
+ 2024/03/15 18:24:18 - patchstitcher - INFO - Epoch: [18/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.5605521202087402 - fine_loss: 0.5605521202087402
976
+ 2024/03/15 18:26:21 - patchstitcher - INFO - Epoch: [18/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 1.6763710975646973 - fine_loss: 1.6763710975646973
977
+ 2024/03/15 18:28:20 - patchstitcher - INFO - Epoch: [18/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.6500707864761353 - fine_loss: 0.6500707864761353
978
+ 2024/03/15 18:30:14 - patchstitcher - INFO - Evaluation Summary:
979
+ +-----------+----------+-----------+-----------+-----------+-----------+-----------+------------+-----------+-----------+
980
+ | a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see |
981
+ +-----------+----------+-----------+-----------+-----------+-----------+-----------+------------+-----------+-----------+
982
+ | 0.9562948 | 0.990871 | 0.9974688 | 0.0761721 | 1.3729287 | 0.0331131 | 0.1092103 | 10.1530306 | 0.1366973 | 1.0216396 |
983
+ +-----------+----------+-----------+-----------+-----------+-----------+-----------+------------+-----------+-----------+
984
+ 2024/03/15 18:32:23 - patchstitcher - INFO - Epoch: [19/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.5755664110183716 - fine_loss: 0.5755664110183716
985
+ 2024/03/15 18:34:28 - patchstitcher - INFO - Epoch: [19/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 1.2044012546539307 - fine_loss: 1.2044012546539307
986
+ 2024/03/15 18:36:33 - patchstitcher - INFO - Epoch: [19/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 1.266536831855774 - fine_loss: 1.266536831855774
987
+ 2024/03/15 18:38:35 - patchstitcher - INFO - Epoch: [19/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.7211558818817139 - fine_loss: 0.7211558818817139
988
+ 2024/03/15 18:42:13 - patchstitcher - INFO - Epoch: [20/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.6136915683746338 - fine_loss: 0.6136915683746338
989
+ 2024/03/15 18:44:12 - patchstitcher - INFO - Epoch: [20/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.4747104048728943 - fine_loss: 0.4747104048728943
990
+ 2024/03/15 18:46:16 - patchstitcher - INFO - Epoch: [20/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.5850560069084167 - fine_loss: 0.5850560069084167
991
+ 2024/03/15 18:48:21 - patchstitcher - INFO - Epoch: [20/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.37204447388648987 - fine_loss: 0.37204447388648987
992
+ 2024/03/15 18:50:16 - patchstitcher - INFO - Evaluation Summary:
993
+ +-----------+-----------+----------+-----------+-----------+-----------+-----------+-----------+-----------+----------+
994
+ | a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see |
995
+ +-----------+-----------+----------+-----------+-----------+-----------+-----------+-----------+-----------+----------+
996
+ | 0.9645657 | 0.9920502 | 0.997654 | 0.0686085 | 1.2732928 | 0.0299144 | 0.1009926 | 9.6382305 | 0.1200509 | 0.993343 |
997
+ +-----------+-----------+----------+-----------+-----------+-----------+-----------+-----------+-----------+----------+
998
+ 2024/03/15 18:52:27 - patchstitcher - INFO - Epoch: [21/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.6047840714454651 - fine_loss: 0.6047840714454651
999
+ 2024/03/15 18:54:31 - patchstitcher - INFO - Epoch: [21/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.5551916360855103 - fine_loss: 0.5551916360855103
1000
+ 2024/03/15 18:56:37 - patchstitcher - INFO - Epoch: [21/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.32560303807258606 - fine_loss: 0.32560303807258606
1001
+ 2024/03/15 18:58:40 - patchstitcher - INFO - Epoch: [21/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 1.7431879043579102 - fine_loss: 1.7431879043579102
1002
+ 2024/03/15 19:02:20 - patchstitcher - INFO - Epoch: [22/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.7936936020851135 - fine_loss: 0.7936936020851135
1003
+ 2024/03/15 19:04:21 - patchstitcher - INFO - Epoch: [22/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.6791415214538574 - fine_loss: 0.6791415214538574
1004
+ 2024/03/15 19:06:23 - patchstitcher - INFO - Epoch: [22/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.6265323758125305 - fine_loss: 0.6265323758125305
1005
+ 2024/03/15 19:08:25 - patchstitcher - INFO - Epoch: [22/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.6945874691009521 - fine_loss: 0.6945874691009521
1006
+ 2024/03/15 19:10:17 - patchstitcher - INFO - Evaluation Summary:
1007
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+
1008
+ | a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see |
1009
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+
1010
+ | 0.9671118 | 0.9931541 | 0.9976758 | 0.0652155 | 1.2549019 | 0.0282474 | 0.0973396 | 9.2669667 | 0.1172386 | 0.9884787 |
1011
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+
1012
+ 2024/03/15 19:12:25 - patchstitcher - INFO - Epoch: [23/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 1.2996392250061035 - fine_loss: 1.2996392250061035
1013
+ 2024/03/15 19:14:26 - patchstitcher - INFO - Epoch: [23/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.674423098564148 - fine_loss: 0.674423098564148
1014
+ 2024/03/15 19:16:29 - patchstitcher - INFO - Epoch: [23/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 2.0330402851104736 - fine_loss: 2.0330402851104736
1015
+ 2024/03/15 19:18:34 - patchstitcher - INFO - Epoch: [23/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 1.1583242416381836 - fine_loss: 1.1583242416381836
1016
+ 2024/03/15 19:22:12 - patchstitcher - INFO - Epoch: [24/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.8227792978286743 - fine_loss: 0.8227792978286743
1017
+ 2024/03/15 19:24:12 - patchstitcher - INFO - Epoch: [24/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.6849284172058105 - fine_loss: 0.6849284172058105
1018
+ 2024/03/15 19:26:14 - patchstitcher - INFO - Epoch: [24/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.5954287648200989 - fine_loss: 0.5954287648200989
1019
+ 2024/03/15 19:28:20 - patchstitcher - INFO - Epoch: [24/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.38687634468078613 - fine_loss: 0.38687634468078613
1020
+ 2024/03/15 19:30:07 - patchstitcher - INFO - Evaluation Summary:
1021
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+----------+-----------+-----------+
1022
+ | a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see |
1023
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+----------+-----------+-----------+
1024
+ | 0.9687062 | 0.9931654 | 0.9976169 | 0.0635503 | 1.2467909 | 0.0277027 | 0.0958232 | 9.191893 | 0.1155029 | 0.9803023 |
1025
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+----------+-----------+-----------+
1026
+ 2024/03/15 19:30:07 - patchstitcher - INFO - Saving ckp, but use the inner get_save_dict fuction to get model_dict
1027
+ 2024/03/15 19:30:07 - patchstitcher - INFO - For saving space. Would you like to save base model several times? :>
1028
+ 2024/03/15 19:30:08 - patchstitcher - INFO - save checkpoint_24.pth at ./work_dir/depthanything_vitb_u4k/fine_pretrain
depthanything_vitb_u4k/fine_pretrain/checkpoint_24.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a2b0ca89e141a9a52626174d614584fccb47e140090a495cc5822803dac7018c
3
+ size 1171453994
depthanything_vitb_u4k/fine_pretrain/config.py ADDED
@@ -0,0 +1,314 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ collect_input_args = [
2
+ 'image_lr',
3
+ 'crops_image_hr',
4
+ 'depth_gt',
5
+ 'crop_depths',
6
+ 'bboxs',
7
+ 'image_hr',
8
+ ]
9
+ convert_syncbn = True
10
+ debug = False
11
+ env_cfg = dict(
12
+ cudnn_benchmark=True,
13
+ dist_cfg=dict(backend='nccl'),
14
+ mp_cfg=dict(mp_start_method='forkserver'))
15
+ find_unused_parameters = True
16
+ general_dataloader = dict(
17
+ batch_size=1,
18
+ dataset=dict(
19
+ dataset_name='', gt_dir=None, rgb_image_dir='', type='ImageDataset'),
20
+ num_workers=2)
21
+ launcher = 'pytorch'
22
+ log_name = 'fine_pretrain'
23
+ max_depth = 80
24
+ min_depth = 0.001
25
+ model = dict(
26
+ coarse_branch=dict(
27
+ attractor_alpha=1000,
28
+ attractor_gamma=2,
29
+ attractor_kind='mean',
30
+ attractor_type='inv',
31
+ aug=True,
32
+ bin_centers_type='softplus',
33
+ bin_embedding_dim=128,
34
+ clip_grad=0.1,
35
+ dataset='nyu',
36
+ depth_anything=True,
37
+ distributed=True,
38
+ do_resize=False,
39
+ force_keep_ar=True,
40
+ freeze_midas_bn=True,
41
+ gpu='NULL',
42
+ img_size=[
43
+ 392,
44
+ 518,
45
+ ],
46
+ inverse_midas=False,
47
+ log_images_every=0.1,
48
+ max_depth=80,
49
+ max_temp=50.0,
50
+ max_translation=100,
51
+ memory_efficient=True,
52
+ midas_model_type='vitb',
53
+ min_depth=0.001,
54
+ min_temp=0.0212,
55
+ model='zoedepth',
56
+ n_attractors=[
57
+ 16,
58
+ 8,
59
+ 4,
60
+ 1,
61
+ ],
62
+ n_bins=64,
63
+ name='ZoeDepth',
64
+ notes='',
65
+ output_distribution='logbinomial',
66
+ prefetch=False,
67
+ pretrained_resource='local::./work_dir/DepthAnything_vitb.pt',
68
+ print_losses=False,
69
+ project='ZoeDepth',
70
+ random_crop=False,
71
+ random_translate=False,
72
+ root='.',
73
+ save_dir='',
74
+ shared_dict='NULL',
75
+ tags='',
76
+ train_midas=True,
77
+ translate_prob=0.2,
78
+ type='DA-ZoeDepth',
79
+ uid='NULL',
80
+ use_amp=False,
81
+ use_pretrained_midas=True,
82
+ use_shared_dict=False,
83
+ validate_every=0.25,
84
+ version_name='v1',
85
+ workers=16),
86
+ fine_branch=dict(
87
+ attractor_alpha=1000,
88
+ attractor_gamma=2,
89
+ attractor_kind='mean',
90
+ attractor_type='inv',
91
+ aug=True,
92
+ bin_centers_type='softplus',
93
+ bin_embedding_dim=128,
94
+ clip_grad=0.1,
95
+ dataset='nyu',
96
+ depth_anything=True,
97
+ distributed=True,
98
+ do_resize=False,
99
+ force_keep_ar=True,
100
+ freeze_midas_bn=True,
101
+ gpu='NULL',
102
+ img_size=[
103
+ 392,
104
+ 518,
105
+ ],
106
+ inverse_midas=False,
107
+ log_images_every=0.1,
108
+ max_depth=80,
109
+ max_temp=50.0,
110
+ max_translation=100,
111
+ memory_efficient=True,
112
+ midas_model_type='vitb',
113
+ min_depth=0.001,
114
+ min_temp=0.0212,
115
+ model='zoedepth',
116
+ n_attractors=[
117
+ 16,
118
+ 8,
119
+ 4,
120
+ 1,
121
+ ],
122
+ n_bins=64,
123
+ name='ZoeDepth',
124
+ notes='',
125
+ output_distribution='logbinomial',
126
+ prefetch=False,
127
+ pretrained_resource='local::./work_dir/DepthAnything_vitb.pt',
128
+ print_losses=False,
129
+ project='ZoeDepth',
130
+ random_crop=False,
131
+ random_translate=False,
132
+ root='.',
133
+ save_dir='',
134
+ shared_dict='NULL',
135
+ tags='',
136
+ train_midas=True,
137
+ translate_prob=0.2,
138
+ type='DA-ZoeDepth',
139
+ uid='NULL',
140
+ use_amp=False,
141
+ use_pretrained_midas=True,
142
+ use_shared_dict=False,
143
+ validate_every=0.25,
144
+ version_name='v1',
145
+ workers=16),
146
+ max_depth=80,
147
+ min_depth=0.001,
148
+ patch_process_shape=(
149
+ 392,
150
+ 518,
151
+ ),
152
+ sigloss=dict(type='SILogLoss'),
153
+ target='fine',
154
+ type='BaselinePretrain')
155
+ optim_wrapper = dict(
156
+ clip_grad=dict(max_norm=0.1, norm_type=2, type='norm'),
157
+ optimizer=dict(lr=4e-06, type='AdamW', weight_decay=0.01),
158
+ paramwise_cfg=dict(bypass_duplicate=True, custom_keys=dict()))
159
+ param_scheduler = dict(
160
+ base_momentum=0.85,
161
+ cycle_momentum=True,
162
+ div_factor=1,
163
+ final_div_factor=10000,
164
+ max_momentum=0.95,
165
+ pct_start=0.5,
166
+ three_phase=False)
167
+ project = 'patchfusion'
168
+ resume = False
169
+ tags = [
170
+ 'fine',
171
+ 'da',
172
+ 'vitb',
173
+ ]
174
+ test_in_dataloader = dict(
175
+ batch_size=1,
176
+ dataset=dict(
177
+ data_root='./data/u4k',
178
+ max_depth=80,
179
+ min_depth=0.001,
180
+ mode='infer',
181
+ split='./data/u4k/splits/test.txt',
182
+ transform_cfg=dict(network_process_size=[
183
+ 384,
184
+ 512,
185
+ ]),
186
+ type='UnrealStereo4kDataset'),
187
+ num_workers=2)
188
+ test_out_dataloader = dict(
189
+ batch_size=1,
190
+ dataset=dict(
191
+ data_root='./data/u4k',
192
+ max_depth=80,
193
+ min_depth=0.001,
194
+ mode='infer',
195
+ split='./data/u4k/splits/test_out.txt',
196
+ transform_cfg=dict(network_process_size=[
197
+ 384,
198
+ 512,
199
+ ]),
200
+ type='UnrealStereo4kDataset'),
201
+ num_workers=2)
202
+ train_cfg = dict(
203
+ eval_start=0,
204
+ log_interval=100,
205
+ max_epochs=24,
206
+ save_checkpoint_interval=24,
207
+ train_log_img_interval=500,
208
+ val_interval=2,
209
+ val_log_img_interval=50,
210
+ val_type='epoch_base')
211
+ train_dataloader = dict(
212
+ batch_size=4,
213
+ dataset=dict(
214
+ data_root='./data/u4k',
215
+ max_depth=80,
216
+ min_depth=0.001,
217
+ mode='train',
218
+ resize_mode='depth-anything',
219
+ split='./data/u4k/splits/train.txt',
220
+ transform_cfg=dict(
221
+ degree=1.0,
222
+ network_process_size=[
223
+ 392,
224
+ 518,
225
+ ],
226
+ random_crop=True,
227
+ random_crop_size=(
228
+ 540,
229
+ 960,
230
+ )),
231
+ type='UnrealStereo4kDataset'),
232
+ num_workers=4)
233
+ val_dataloader = dict(
234
+ batch_size=1,
235
+ dataset=dict(
236
+ data_root='./data/u4k',
237
+ max_depth=80,
238
+ min_depth=0.001,
239
+ mode='infer',
240
+ resize_mode='depth-anything',
241
+ split='./data/u4k/splits/val.txt',
242
+ transform_cfg=dict(
243
+ degree=1.0,
244
+ network_process_size=[
245
+ 392,
246
+ 518,
247
+ ],
248
+ random_crop_size=(
249
+ 540,
250
+ 960,
251
+ )),
252
+ type='UnrealStereo4kDataset'),
253
+ num_workers=2)
254
+ work_dir = './work_dir/depthanything_vitb_u4k/fine_pretrain'
255
+ zoe_depth_config = dict(
256
+ attractor_alpha=1000,
257
+ attractor_gamma=2,
258
+ attractor_kind='mean',
259
+ attractor_type='inv',
260
+ aug=True,
261
+ bin_centers_type='softplus',
262
+ bin_embedding_dim=128,
263
+ clip_grad=0.1,
264
+ dataset='nyu',
265
+ depth_anything=True,
266
+ distributed=True,
267
+ do_resize=False,
268
+ force_keep_ar=True,
269
+ freeze_midas_bn=True,
270
+ gpu='NULL',
271
+ img_size=[
272
+ 392,
273
+ 518,
274
+ ],
275
+ inverse_midas=False,
276
+ log_images_every=0.1,
277
+ max_depth=80,
278
+ max_temp=50.0,
279
+ max_translation=100,
280
+ memory_efficient=True,
281
+ midas_model_type='vitb',
282
+ min_depth=0.001,
283
+ min_temp=0.0212,
284
+ model='zoedepth',
285
+ n_attractors=[
286
+ 16,
287
+ 8,
288
+ 4,
289
+ 1,
290
+ ],
291
+ n_bins=64,
292
+ name='ZoeDepth',
293
+ notes='',
294
+ output_distribution='logbinomial',
295
+ prefetch=False,
296
+ pretrained_resource='local::./work_dir/DepthAnything_vitb.pt',
297
+ print_losses=False,
298
+ project='ZoeDepth',
299
+ random_crop=False,
300
+ random_translate=False,
301
+ root='.',
302
+ save_dir='',
303
+ shared_dict='NULL',
304
+ tags='',
305
+ train_midas=True,
306
+ translate_prob=0.2,
307
+ type='DA-ZoeDepth',
308
+ uid='NULL',
309
+ use_amp=False,
310
+ use_pretrained_midas=True,
311
+ use_shared_dict=False,
312
+ validate_every=0.25,
313
+ version_name='v1',
314
+ workers=16)
depthanything_vitb_u4k/patchfusion/20240315_193032.log ADDED
The diff for this file is too large to render. See raw diff
 
depthanything_vitb_u4k/patchfusion/checkpoint_16.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f104b19568f39e85783de4cd4ecf032ee24152e8daae929db458b111aef6ea20
3
+ size 417857453
depthanything_vitb_u4k/patchfusion/config.py ADDED
@@ -0,0 +1,341 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ collect_input_args = [
2
+ 'image_lr',
3
+ 'crops_image_hr',
4
+ 'depth_gt',
5
+ 'crop_depths',
6
+ 'bboxs',
7
+ 'image_hr',
8
+ ]
9
+ convert_syncbn = True
10
+ debug = False
11
+ env_cfg = dict(
12
+ cudnn_benchmark=True,
13
+ dist_cfg=dict(backend='nccl'),
14
+ mp_cfg=dict(mp_start_method='forkserver'))
15
+ find_unused_parameters = True
16
+ general_dataloader = dict(
17
+ batch_size=1,
18
+ dataset=dict(
19
+ dataset_name='', gt_dir=None, rgb_image_dir='', type='ImageDataset'),
20
+ num_workers=2)
21
+ launcher = 'pytorch'
22
+ log_name = 'patchfusion'
23
+ max_depth = 80
24
+ min_depth = 0.001
25
+ model = dict(
26
+ coarse_branch=dict(
27
+ attractor_alpha=1000,
28
+ attractor_gamma=2,
29
+ attractor_kind='mean',
30
+ attractor_type='inv',
31
+ aug=True,
32
+ bin_centers_type='softplus',
33
+ bin_embedding_dim=128,
34
+ clip_grad=0.1,
35
+ dataset='nyu',
36
+ depth_anything=True,
37
+ distributed=True,
38
+ do_resize=False,
39
+ force_keep_ar=True,
40
+ freeze_midas_bn=True,
41
+ gpu='NULL',
42
+ img_size=[
43
+ 392,
44
+ 518,
45
+ ],
46
+ inverse_midas=False,
47
+ log_images_every=0.1,
48
+ max_depth=80,
49
+ max_temp=50.0,
50
+ max_translation=100,
51
+ memory_efficient=True,
52
+ midas_model_type='vitb',
53
+ min_depth=0.001,
54
+ min_temp=0.0212,
55
+ model='zoedepth',
56
+ n_attractors=[
57
+ 16,
58
+ 8,
59
+ 4,
60
+ 1,
61
+ ],
62
+ n_bins=64,
63
+ name='ZoeDepth',
64
+ notes='',
65
+ output_distribution='logbinomial',
66
+ prefetch=False,
67
+ pretrained_resource='local::./work_dir/DepthAnything_vitb.pt',
68
+ print_losses=False,
69
+ project='ZoeDepth',
70
+ random_crop=False,
71
+ random_translate=False,
72
+ root='.',
73
+ save_dir='',
74
+ shared_dict='NULL',
75
+ tags='',
76
+ train_midas=True,
77
+ translate_prob=0.2,
78
+ type='DA-ZoeDepth',
79
+ uid='NULL',
80
+ use_amp=False,
81
+ use_pretrained_midas=True,
82
+ use_shared_dict=False,
83
+ validate_every=0.25,
84
+ version_name='v1',
85
+ workers=16),
86
+ fine_branch=dict(
87
+ attractor_alpha=1000,
88
+ attractor_gamma=2,
89
+ attractor_kind='mean',
90
+ attractor_type='inv',
91
+ aug=True,
92
+ bin_centers_type='softplus',
93
+ bin_embedding_dim=128,
94
+ clip_grad=0.1,
95
+ dataset='nyu',
96
+ depth_anything=True,
97
+ distributed=True,
98
+ do_resize=False,
99
+ force_keep_ar=True,
100
+ freeze_midas_bn=True,
101
+ gpu='NULL',
102
+ img_size=[
103
+ 392,
104
+ 518,
105
+ ],
106
+ inverse_midas=False,
107
+ log_images_every=0.1,
108
+ max_depth=80,
109
+ max_temp=50.0,
110
+ max_translation=100,
111
+ memory_efficient=True,
112
+ midas_model_type='vitb',
113
+ min_depth=0.001,
114
+ min_temp=0.0212,
115
+ model='zoedepth',
116
+ n_attractors=[
117
+ 16,
118
+ 8,
119
+ 4,
120
+ 1,
121
+ ],
122
+ n_bins=64,
123
+ name='ZoeDepth',
124
+ notes='',
125
+ output_distribution='logbinomial',
126
+ prefetch=False,
127
+ pretrained_resource='local::./work_dir/DepthAnything_vitb.pt',
128
+ print_losses=False,
129
+ project='ZoeDepth',
130
+ random_crop=False,
131
+ random_translate=False,
132
+ root='.',
133
+ save_dir='',
134
+ shared_dict='NULL',
135
+ tags='',
136
+ train_midas=True,
137
+ translate_prob=0.2,
138
+ type='DA-ZoeDepth',
139
+ uid='NULL',
140
+ use_amp=False,
141
+ use_pretrained_midas=True,
142
+ use_shared_dict=False,
143
+ validate_every=0.25,
144
+ version_name='v1',
145
+ workers=16),
146
+ guided_fusion=dict(
147
+ g2l=True,
148
+ in_channels=[
149
+ 32,
150
+ 128,
151
+ 128,
152
+ 128,
153
+ 128,
154
+ 128,
155
+ ],
156
+ n_channels=5,
157
+ num_patches=[
158
+ 203056,
159
+ 66304,
160
+ 16576,
161
+ 4144,
162
+ 1036,
163
+ 266,
164
+ ],
165
+ patch_process_shape=(
166
+ 392,
167
+ 518,
168
+ ),
169
+ type='GuidedFusionPatchFusion'),
170
+ max_depth=80,
171
+ min_depth=0.001,
172
+ patch_process_shape=(
173
+ 392,
174
+ 518,
175
+ ),
176
+ pretrain_model=[
177
+ './work_dir/depthanything_vitb_u4k/coarse_pretrain/checkpoint_24.pth',
178
+ './work_dir/depthanything_vitb_u4k/fine_pretrain/checkpoint_24.pth',
179
+ ],
180
+ sigloss=dict(type='SILogLoss'),
181
+ type='PatchFusion')
182
+ optim_wrapper = dict(
183
+ clip_grad=dict(max_norm=0.1, norm_type=2, type='norm'),
184
+ optimizer=dict(lr=0.0001, type='AdamW', weight_decay=0.001),
185
+ paramwise_cfg=dict(bypass_duplicate=True, custom_keys=dict()))
186
+ param_scheduler = dict(
187
+ base_momentum=0.85,
188
+ cycle_momentum=True,
189
+ div_factor=10,
190
+ final_div_factor=10000,
191
+ max_momentum=0.95,
192
+ pct_start=0.25,
193
+ three_phase=False)
194
+ project = 'patchfusion'
195
+ resume = False
196
+ tags = [
197
+ 'patchfusion',
198
+ 'da',
199
+ 'vitb',
200
+ ]
201
+ test_in_dataloader = dict(
202
+ batch_size=1,
203
+ dataset=dict(
204
+ data_root='./data/u4k',
205
+ max_depth=80,
206
+ min_depth=0.001,
207
+ mode='infer',
208
+ split='./data/u4k/splits/test.txt',
209
+ transform_cfg=dict(network_process_size=[
210
+ 384,
211
+ 512,
212
+ ]),
213
+ type='UnrealStereo4kDataset'),
214
+ num_workers=2)
215
+ test_out_dataloader = dict(
216
+ batch_size=1,
217
+ dataset=dict(
218
+ data_root='./data/u4k',
219
+ max_depth=80,
220
+ min_depth=0.001,
221
+ mode='infer',
222
+ split='./data/u4k/splits/test_out.txt',
223
+ transform_cfg=dict(network_process_size=[
224
+ 384,
225
+ 512,
226
+ ]),
227
+ type='UnrealStereo4kDataset'),
228
+ num_workers=2)
229
+ train_cfg = dict(
230
+ eval_start=0,
231
+ log_interval=100,
232
+ max_epochs=16,
233
+ save_checkpoint_interval=16,
234
+ train_log_img_interval=500,
235
+ val_interval=2,
236
+ val_log_img_interval=50,
237
+ val_type='epoch_base')
238
+ train_dataloader = dict(
239
+ batch_size=4,
240
+ dataset=dict(
241
+ data_root='./data/u4k',
242
+ max_depth=80,
243
+ min_depth=0.001,
244
+ mode='train',
245
+ resize_mode='depth-anything',
246
+ split='./data/u4k/splits/train.txt',
247
+ transform_cfg=dict(
248
+ degree=1.0,
249
+ network_process_size=[
250
+ 392,
251
+ 518,
252
+ ],
253
+ random_crop=True,
254
+ random_crop_size=(
255
+ 540,
256
+ 960,
257
+ )),
258
+ type='UnrealStereo4kDataset'),
259
+ num_workers=4)
260
+ val_dataloader = dict(
261
+ batch_size=1,
262
+ dataset=dict(
263
+ data_root='./data/u4k',
264
+ max_depth=80,
265
+ min_depth=0.001,
266
+ mode='infer',
267
+ resize_mode='depth-anything',
268
+ split='./data/u4k/splits/val.txt',
269
+ transform_cfg=dict(
270
+ degree=1.0,
271
+ network_process_size=[
272
+ 392,
273
+ 518,
274
+ ],
275
+ random_crop_size=(
276
+ 540,
277
+ 960,
278
+ )),
279
+ type='UnrealStereo4kDataset'),
280
+ num_workers=2)
281
+ work_dir = './work_dir/depthanything_vitb_u4k/patchfusion'
282
+ zoe_depth_config = dict(
283
+ attractor_alpha=1000,
284
+ attractor_gamma=2,
285
+ attractor_kind='mean',
286
+ attractor_type='inv',
287
+ aug=True,
288
+ bin_centers_type='softplus',
289
+ bin_embedding_dim=128,
290
+ clip_grad=0.1,
291
+ dataset='nyu',
292
+ depth_anything=True,
293
+ distributed=True,
294
+ do_resize=False,
295
+ force_keep_ar=True,
296
+ freeze_midas_bn=True,
297
+ gpu='NULL',
298
+ img_size=[
299
+ 392,
300
+ 518,
301
+ ],
302
+ inverse_midas=False,
303
+ log_images_every=0.1,
304
+ max_depth=80,
305
+ max_temp=50.0,
306
+ max_translation=100,
307
+ memory_efficient=True,
308
+ midas_model_type='vitb',
309
+ min_depth=0.001,
310
+ min_temp=0.0212,
311
+ model='zoedepth',
312
+ n_attractors=[
313
+ 16,
314
+ 8,
315
+ 4,
316
+ 1,
317
+ ],
318
+ n_bins=64,
319
+ name='ZoeDepth',
320
+ notes='',
321
+ output_distribution='logbinomial',
322
+ prefetch=False,
323
+ pretrained_resource='local::./work_dir/DepthAnything_vitb.pt',
324
+ print_losses=False,
325
+ project='ZoeDepth',
326
+ random_crop=False,
327
+ random_translate=False,
328
+ root='.',
329
+ save_dir='',
330
+ shared_dict='NULL',
331
+ tags='',
332
+ train_midas=True,
333
+ translate_prob=0.2,
334
+ type='DA-ZoeDepth',
335
+ uid='NULL',
336
+ use_amp=False,
337
+ use_pretrained_midas=True,
338
+ use_shared_dict=False,
339
+ validate_every=0.25,
340
+ version_name='v1',
341
+ workers=16)
depthanything_vitl_u4k/coarse_pretrain/20240315_102957.log ADDED
The diff for this file is too large to render. See raw diff
 
depthanything_vitl_u4k/coarse_pretrain/checkpoint_24.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7347385b649cf4a99cbd1cad579bdcdd51cab915bfd283031e55f7e718178f68
3
+ size 4020717194
depthanything_vitl_u4k/coarse_pretrain/config.py ADDED
@@ -0,0 +1,310 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ collect_input_args = [
2
+ 'image_lr',
3
+ 'crops_image_hr',
4
+ 'depth_gt',
5
+ 'crop_depths',
6
+ 'bboxs',
7
+ 'image_hr',
8
+ ]
9
+ convert_syncbn = True
10
+ debug = False
11
+ env_cfg = dict(
12
+ cudnn_benchmark=True,
13
+ dist_cfg=dict(backend='nccl'),
14
+ mp_cfg=dict(mp_start_method='forkserver'))
15
+ find_unused_parameters = True
16
+ general_dataloader = dict(
17
+ batch_size=1,
18
+ dataset=dict(
19
+ dataset_name='', gt_dir=None, rgb_image_dir='', type='ImageDataset'),
20
+ num_workers=2)
21
+ launcher = 'pytorch'
22
+ log_name = 'coarse_pretrain'
23
+ max_depth = 80
24
+ min_depth = 0.001
25
+ model = dict(
26
+ coarse_branch=dict(
27
+ attractor_alpha=1000,
28
+ attractor_gamma=2,
29
+ attractor_kind='mean',
30
+ attractor_type='inv',
31
+ aug=True,
32
+ bin_centers_type='softplus',
33
+ bin_embedding_dim=128,
34
+ clip_grad=0.1,
35
+ dataset='nyu',
36
+ depth_anything=True,
37
+ distributed=True,
38
+ do_resize=False,
39
+ force_keep_ar=True,
40
+ freeze_midas_bn=True,
41
+ gpu='NULL',
42
+ img_size=[
43
+ 392,
44
+ 518,
45
+ ],
46
+ inverse_midas=False,
47
+ log_images_every=0.1,
48
+ max_depth=80,
49
+ max_temp=50.0,
50
+ max_translation=100,
51
+ memory_efficient=True,
52
+ midas_model_type='vitl',
53
+ min_depth=0.001,
54
+ min_temp=0.0212,
55
+ model='zoedepth',
56
+ n_attractors=[
57
+ 16,
58
+ 8,
59
+ 4,
60
+ 1,
61
+ ],
62
+ n_bins=64,
63
+ name='ZoeDepth',
64
+ notes='',
65
+ output_distribution='logbinomial',
66
+ prefetch=False,
67
+ pretrained_resource='local::./work_dir/DepthAnything_vitl.pt',
68
+ print_losses=False,
69
+ project='ZoeDepth',
70
+ random_crop=False,
71
+ random_translate=False,
72
+ root='.',
73
+ save_dir='',
74
+ shared_dict='NULL',
75
+ tags='',
76
+ train_midas=True,
77
+ translate_prob=0.2,
78
+ type='DA-ZoeDepth',
79
+ uid='NULL',
80
+ use_amp=False,
81
+ use_pretrained_midas=True,
82
+ use_shared_dict=False,
83
+ validate_every=0.25,
84
+ version_name='v1',
85
+ workers=16),
86
+ fine_branch=dict(
87
+ attractor_alpha=1000,
88
+ attractor_gamma=2,
89
+ attractor_kind='mean',
90
+ attractor_type='inv',
91
+ aug=True,
92
+ bin_centers_type='softplus',
93
+ bin_embedding_dim=128,
94
+ clip_grad=0.1,
95
+ dataset='nyu',
96
+ depth_anything=True,
97
+ distributed=True,
98
+ do_resize=False,
99
+ force_keep_ar=True,
100
+ freeze_midas_bn=True,
101
+ gpu='NULL',
102
+ img_size=[
103
+ 392,
104
+ 518,
105
+ ],
106
+ inverse_midas=False,
107
+ log_images_every=0.1,
108
+ max_depth=80,
109
+ max_temp=50.0,
110
+ max_translation=100,
111
+ memory_efficient=True,
112
+ midas_model_type='vitl',
113
+ min_depth=0.001,
114
+ min_temp=0.0212,
115
+ model='zoedepth',
116
+ n_attractors=[
117
+ 16,
118
+ 8,
119
+ 4,
120
+ 1,
121
+ ],
122
+ n_bins=64,
123
+ name='ZoeDepth',
124
+ notes='',
125
+ output_distribution='logbinomial',
126
+ prefetch=False,
127
+ pretrained_resource='local::./work_dir/DepthAnything_vitl.pt',
128
+ print_losses=False,
129
+ project='ZoeDepth',
130
+ random_crop=False,
131
+ random_translate=False,
132
+ root='.',
133
+ save_dir='',
134
+ shared_dict='NULL',
135
+ tags='',
136
+ train_midas=True,
137
+ translate_prob=0.2,
138
+ type='DA-ZoeDepth',
139
+ uid='NULL',
140
+ use_amp=False,
141
+ use_pretrained_midas=True,
142
+ use_shared_dict=False,
143
+ validate_every=0.25,
144
+ version_name='v1',
145
+ workers=16),
146
+ max_depth=80,
147
+ min_depth=0.001,
148
+ sigloss=dict(type='SILogLoss'),
149
+ target='coarse',
150
+ type='BaselinePretrain')
151
+ optim_wrapper = dict(
152
+ clip_grad=dict(max_norm=0.1, norm_type=2, type='norm'),
153
+ optimizer=dict(lr=4e-06, type='AdamW', weight_decay=0.01),
154
+ paramwise_cfg=dict(bypass_duplicate=True, custom_keys=dict()))
155
+ param_scheduler = dict(
156
+ base_momentum=0.85,
157
+ cycle_momentum=True,
158
+ div_factor=1,
159
+ final_div_factor=10000,
160
+ max_momentum=0.95,
161
+ pct_start=0.5,
162
+ three_phase=False)
163
+ project = 'patchfusion'
164
+ resume = False
165
+ tags = [
166
+ 'coarse',
167
+ 'da',
168
+ 'vitl',
169
+ ]
170
+ test_in_dataloader = dict(
171
+ batch_size=1,
172
+ dataset=dict(
173
+ data_root='./data/u4k',
174
+ max_depth=80,
175
+ min_depth=0.001,
176
+ mode='infer',
177
+ split='./data/u4k/splits/test.txt',
178
+ transform_cfg=dict(network_process_size=[
179
+ 384,
180
+ 512,
181
+ ]),
182
+ type='UnrealStereo4kDataset'),
183
+ num_workers=2)
184
+ test_out_dataloader = dict(
185
+ batch_size=1,
186
+ dataset=dict(
187
+ data_root='./data/u4k',
188
+ max_depth=80,
189
+ min_depth=0.001,
190
+ mode='infer',
191
+ split='./data/u4k/splits/test_out.txt',
192
+ transform_cfg=dict(network_process_size=[
193
+ 384,
194
+ 512,
195
+ ]),
196
+ type='UnrealStereo4kDataset'),
197
+ num_workers=2)
198
+ train_cfg = dict(
199
+ eval_start=0,
200
+ log_interval=100,
201
+ max_epochs=24,
202
+ save_checkpoint_interval=24,
203
+ train_log_img_interval=500,
204
+ val_interval=2,
205
+ val_log_img_interval=50,
206
+ val_type='epoch_base')
207
+ train_dataloader = dict(
208
+ batch_size=4,
209
+ dataset=dict(
210
+ data_root='./data/u4k',
211
+ max_depth=80,
212
+ min_depth=0.001,
213
+ mode='train',
214
+ resize_mode='depth-anything',
215
+ split='./data/u4k/splits/train.txt',
216
+ transform_cfg=dict(
217
+ degree=1.0,
218
+ network_process_size=[
219
+ 392,
220
+ 518,
221
+ ],
222
+ random_crop=True,
223
+ random_crop_size=(
224
+ 540,
225
+ 960,
226
+ )),
227
+ type='UnrealStereo4kDataset'),
228
+ num_workers=4)
229
+ val_dataloader = dict(
230
+ batch_size=1,
231
+ dataset=dict(
232
+ data_root='./data/u4k',
233
+ max_depth=80,
234
+ min_depth=0.001,
235
+ mode='infer',
236
+ resize_mode='depth-anything',
237
+ split='./data/u4k/splits/val.txt',
238
+ transform_cfg=dict(
239
+ degree=1.0,
240
+ network_process_size=[
241
+ 392,
242
+ 518,
243
+ ],
244
+ random_crop_size=(
245
+ 540,
246
+ 960,
247
+ )),
248
+ type='UnrealStereo4kDataset'),
249
+ num_workers=2)
250
+ work_dir = './work_dir/depthanything_vitl_u4k/coarse_pretrain'
251
+ zoe_depth_config = dict(
252
+ attractor_alpha=1000,
253
+ attractor_gamma=2,
254
+ attractor_kind='mean',
255
+ attractor_type='inv',
256
+ aug=True,
257
+ bin_centers_type='softplus',
258
+ bin_embedding_dim=128,
259
+ clip_grad=0.1,
260
+ dataset='nyu',
261
+ depth_anything=True,
262
+ distributed=True,
263
+ do_resize=False,
264
+ force_keep_ar=True,
265
+ freeze_midas_bn=True,
266
+ gpu='NULL',
267
+ img_size=[
268
+ 392,
269
+ 518,
270
+ ],
271
+ inverse_midas=False,
272
+ log_images_every=0.1,
273
+ max_depth=80,
274
+ max_temp=50.0,
275
+ max_translation=100,
276
+ memory_efficient=True,
277
+ midas_model_type='vitl',
278
+ min_depth=0.001,
279
+ min_temp=0.0212,
280
+ model='zoedepth',
281
+ n_attractors=[
282
+ 16,
283
+ 8,
284
+ 4,
285
+ 1,
286
+ ],
287
+ n_bins=64,
288
+ name='ZoeDepth',
289
+ notes='',
290
+ output_distribution='logbinomial',
291
+ prefetch=False,
292
+ pretrained_resource='local::./work_dir/DepthAnything_vitl.pt',
293
+ print_losses=False,
294
+ project='ZoeDepth',
295
+ random_crop=False,
296
+ random_translate=False,
297
+ root='.',
298
+ save_dir='',
299
+ shared_dict='NULL',
300
+ tags='',
301
+ train_midas=True,
302
+ translate_prob=0.2,
303
+ type='DA-ZoeDepth',
304
+ uid='NULL',
305
+ use_amp=False,
306
+ use_pretrained_midas=True,
307
+ use_shared_dict=False,
308
+ validate_every=0.25,
309
+ version_name='v1',
310
+ workers=16)
depthanything_vitl_u4k/fine_pretrain/20240315_140837.log ADDED
The diff for this file is too large to render. See raw diff
 
depthanything_vitl_u4k/fine_pretrain/checkpoint_24.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:37181232060bc2b0fd663cf3fc008dda37b262f680a915689f6e55f072648fc7
3
+ size 4020717194
depthanything_vitl_u4k/fine_pretrain/config.py ADDED
@@ -0,0 +1,314 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ collect_input_args = [
2
+ 'image_lr',
3
+ 'crops_image_hr',
4
+ 'depth_gt',
5
+ 'crop_depths',
6
+ 'bboxs',
7
+ 'image_hr',
8
+ ]
9
+ convert_syncbn = True
10
+ debug = False
11
+ env_cfg = dict(
12
+ cudnn_benchmark=True,
13
+ dist_cfg=dict(backend='nccl'),
14
+ mp_cfg=dict(mp_start_method='forkserver'))
15
+ find_unused_parameters = True
16
+ general_dataloader = dict(
17
+ batch_size=1,
18
+ dataset=dict(
19
+ dataset_name='', gt_dir=None, rgb_image_dir='', type='ImageDataset'),
20
+ num_workers=2)
21
+ launcher = 'pytorch'
22
+ log_name = 'fine_pretrain'
23
+ max_depth = 80
24
+ min_depth = 0.001
25
+ model = dict(
26
+ coarse_branch=dict(
27
+ attractor_alpha=1000,
28
+ attractor_gamma=2,
29
+ attractor_kind='mean',
30
+ attractor_type='inv',
31
+ aug=True,
32
+ bin_centers_type='softplus',
33
+ bin_embedding_dim=128,
34
+ clip_grad=0.1,
35
+ dataset='nyu',
36
+ depth_anything=True,
37
+ distributed=True,
38
+ do_resize=False,
39
+ force_keep_ar=True,
40
+ freeze_midas_bn=True,
41
+ gpu='NULL',
42
+ img_size=[
43
+ 392,
44
+ 518,
45
+ ],
46
+ inverse_midas=False,
47
+ log_images_every=0.1,
48
+ max_depth=80,
49
+ max_temp=50.0,
50
+ max_translation=100,
51
+ memory_efficient=True,
52
+ midas_model_type='vitl',
53
+ min_depth=0.001,
54
+ min_temp=0.0212,
55
+ model='zoedepth',
56
+ n_attractors=[
57
+ 16,
58
+ 8,
59
+ 4,
60
+ 1,
61
+ ],
62
+ n_bins=64,
63
+ name='ZoeDepth',
64
+ notes='',
65
+ output_distribution='logbinomial',
66
+ prefetch=False,
67
+ pretrained_resource='local::./work_dir/DepthAnything_vitl.pt',
68
+ print_losses=False,
69
+ project='ZoeDepth',
70
+ random_crop=False,
71
+ random_translate=False,
72
+ root='.',
73
+ save_dir='',
74
+ shared_dict='NULL',
75
+ tags='',
76
+ train_midas=True,
77
+ translate_prob=0.2,
78
+ type='DA-ZoeDepth',
79
+ uid='NULL',
80
+ use_amp=False,
81
+ use_pretrained_midas=True,
82
+ use_shared_dict=False,
83
+ validate_every=0.25,
84
+ version_name='v1',
85
+ workers=16),
86
+ fine_branch=dict(
87
+ attractor_alpha=1000,
88
+ attractor_gamma=2,
89
+ attractor_kind='mean',
90
+ attractor_type='inv',
91
+ aug=True,
92
+ bin_centers_type='softplus',
93
+ bin_embedding_dim=128,
94
+ clip_grad=0.1,
95
+ dataset='nyu',
96
+ depth_anything=True,
97
+ distributed=True,
98
+ do_resize=False,
99
+ force_keep_ar=True,
100
+ freeze_midas_bn=True,
101
+ gpu='NULL',
102
+ img_size=[
103
+ 392,
104
+ 518,
105
+ ],
106
+ inverse_midas=False,
107
+ log_images_every=0.1,
108
+ max_depth=80,
109
+ max_temp=50.0,
110
+ max_translation=100,
111
+ memory_efficient=True,
112
+ midas_model_type='vitl',
113
+ min_depth=0.001,
114
+ min_temp=0.0212,
115
+ model='zoedepth',
116
+ n_attractors=[
117
+ 16,
118
+ 8,
119
+ 4,
120
+ 1,
121
+ ],
122
+ n_bins=64,
123
+ name='ZoeDepth',
124
+ notes='',
125
+ output_distribution='logbinomial',
126
+ prefetch=False,
127
+ pretrained_resource='local::./work_dir/DepthAnything_vitl.pt',
128
+ print_losses=False,
129
+ project='ZoeDepth',
130
+ random_crop=False,
131
+ random_translate=False,
132
+ root='.',
133
+ save_dir='',
134
+ shared_dict='NULL',
135
+ tags='',
136
+ train_midas=True,
137
+ translate_prob=0.2,
138
+ type='DA-ZoeDepth',
139
+ uid='NULL',
140
+ use_amp=False,
141
+ use_pretrained_midas=True,
142
+ use_shared_dict=False,
143
+ validate_every=0.25,
144
+ version_name='v1',
145
+ workers=16),
146
+ max_depth=80,
147
+ min_depth=0.001,
148
+ patch_process_shape=(
149
+ 392,
150
+ 518,
151
+ ),
152
+ sigloss=dict(type='SILogLoss'),
153
+ target='fine',
154
+ type='BaselinePretrain')
155
+ optim_wrapper = dict(
156
+ clip_grad=dict(max_norm=0.1, norm_type=2, type='norm'),
157
+ optimizer=dict(lr=4e-06, type='AdamW', weight_decay=0.01),
158
+ paramwise_cfg=dict(bypass_duplicate=True, custom_keys=dict()))
159
+ param_scheduler = dict(
160
+ base_momentum=0.85,
161
+ cycle_momentum=True,
162
+ div_factor=1,
163
+ final_div_factor=10000,
164
+ max_momentum=0.95,
165
+ pct_start=0.5,
166
+ three_phase=False)
167
+ project = 'patchfusion'
168
+ resume = False
169
+ tags = [
170
+ 'fine',
171
+ 'da',
172
+ 'vitl',
173
+ ]
174
+ test_in_dataloader = dict(
175
+ batch_size=1,
176
+ dataset=dict(
177
+ data_root='./data/u4k',
178
+ max_depth=80,
179
+ min_depth=0.001,
180
+ mode='infer',
181
+ split='./data/u4k/splits/test.txt',
182
+ transform_cfg=dict(network_process_size=[
183
+ 384,
184
+ 512,
185
+ ]),
186
+ type='UnrealStereo4kDataset'),
187
+ num_workers=2)
188
+ test_out_dataloader = dict(
189
+ batch_size=1,
190
+ dataset=dict(
191
+ data_root='./data/u4k',
192
+ max_depth=80,
193
+ min_depth=0.001,
194
+ mode='infer',
195
+ split='./data/u4k/splits/test_out.txt',
196
+ transform_cfg=dict(network_process_size=[
197
+ 384,
198
+ 512,
199
+ ]),
200
+ type='UnrealStereo4kDataset'),
201
+ num_workers=2)
202
+ train_cfg = dict(
203
+ eval_start=0,
204
+ log_interval=100,
205
+ max_epochs=24,
206
+ save_checkpoint_interval=24,
207
+ train_log_img_interval=500,
208
+ val_interval=2,
209
+ val_log_img_interval=50,
210
+ val_type='epoch_base')
211
+ train_dataloader = dict(
212
+ batch_size=4,
213
+ dataset=dict(
214
+ data_root='./data/u4k',
215
+ max_depth=80,
216
+ min_depth=0.001,
217
+ mode='train',
218
+ resize_mode='depth-anything',
219
+ split='./data/u4k/splits/train.txt',
220
+ transform_cfg=dict(
221
+ degree=1.0,
222
+ network_process_size=[
223
+ 392,
224
+ 518,
225
+ ],
226
+ random_crop=True,
227
+ random_crop_size=(
228
+ 540,
229
+ 960,
230
+ )),
231
+ type='UnrealStereo4kDataset'),
232
+ num_workers=4)
233
+ val_dataloader = dict(
234
+ batch_size=1,
235
+ dataset=dict(
236
+ data_root='./data/u4k',
237
+ max_depth=80,
238
+ min_depth=0.001,
239
+ mode='infer',
240
+ resize_mode='depth-anything',
241
+ split='./data/u4k/splits/val.txt',
242
+ transform_cfg=dict(
243
+ degree=1.0,
244
+ network_process_size=[
245
+ 392,
246
+ 518,
247
+ ],
248
+ random_crop_size=(
249
+ 540,
250
+ 960,
251
+ )),
252
+ type='UnrealStereo4kDataset'),
253
+ num_workers=2)
254
+ work_dir = './work_dir/depthanything_vitl_u4k/fine_pretrain'
255
+ zoe_depth_config = dict(
256
+ attractor_alpha=1000,
257
+ attractor_gamma=2,
258
+ attractor_kind='mean',
259
+ attractor_type='inv',
260
+ aug=True,
261
+ bin_centers_type='softplus',
262
+ bin_embedding_dim=128,
263
+ clip_grad=0.1,
264
+ dataset='nyu',
265
+ depth_anything=True,
266
+ distributed=True,
267
+ do_resize=False,
268
+ force_keep_ar=True,
269
+ freeze_midas_bn=True,
270
+ gpu='NULL',
271
+ img_size=[
272
+ 392,
273
+ 518,
274
+ ],
275
+ inverse_midas=False,
276
+ log_images_every=0.1,
277
+ max_depth=80,
278
+ max_temp=50.0,
279
+ max_translation=100,
280
+ memory_efficient=True,
281
+ midas_model_type='vitl',
282
+ min_depth=0.001,
283
+ min_temp=0.0212,
284
+ model='zoedepth',
285
+ n_attractors=[
286
+ 16,
287
+ 8,
288
+ 4,
289
+ 1,
290
+ ],
291
+ n_bins=64,
292
+ name='ZoeDepth',
293
+ notes='',
294
+ output_distribution='logbinomial',
295
+ prefetch=False,
296
+ pretrained_resource='local::./work_dir/DepthAnything_vitl.pt',
297
+ print_losses=False,
298
+ project='ZoeDepth',
299
+ random_crop=False,
300
+ random_translate=False,
301
+ root='.',
302
+ save_dir='',
303
+ shared_dict='NULL',
304
+ tags='',
305
+ train_midas=True,
306
+ translate_prob=0.2,
307
+ type='DA-ZoeDepth',
308
+ uid='NULL',
309
+ use_amp=False,
310
+ use_pretrained_midas=True,
311
+ use_shared_dict=False,
312
+ validate_every=0.25,
313
+ version_name='v1',
314
+ workers=16)
depthanything_vitl_u4k/patchfusion/20240315_175237.log ADDED
The diff for this file is too large to render. See raw diff
 
depthanything_vitl_u4k/patchfusion/checkpoint_16.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:79e530dd2ad7587b21b2b778b60ba0c459621969ed8b015b96987124c0747e10
3
+ size 1128275629
depthanything_vitl_u4k/patchfusion/config.py ADDED
@@ -0,0 +1,347 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ collect_input_args = [
2
+ 'image_lr',
3
+ 'crops_image_hr',
4
+ 'depth_gt',
5
+ 'crop_depths',
6
+ 'bboxs',
7
+ 'image_hr',
8
+ ]
9
+ convert_syncbn = True
10
+ debug = True
11
+ env_cfg = dict(
12
+ cudnn_benchmark=True,
13
+ dist_cfg=dict(backend='nccl'),
14
+ mp_cfg=dict(mp_start_method='forkserver'))
15
+ find_unused_parameters = True
16
+ general_dataloader = dict(
17
+ batch_size=1,
18
+ dataset=dict(
19
+ dataset_name='',
20
+ gt_dir=None,
21
+ network_process_size=(
22
+ 392,
23
+ 518,
24
+ ),
25
+ resize_mode='depth-anything',
26
+ rgb_image_dir='',
27
+ type='ImageDataset'),
28
+ num_workers=2)
29
+ launcher = 'pytorch'
30
+ log_name = 'patchfusion'
31
+ max_depth = 80
32
+ min_depth = 0.001
33
+ model = dict(
34
+ coarse_branch=dict(
35
+ attractor_alpha=1000,
36
+ attractor_gamma=2,
37
+ attractor_kind='mean',
38
+ attractor_type='inv',
39
+ aug=True,
40
+ bin_centers_type='softplus',
41
+ bin_embedding_dim=128,
42
+ clip_grad=0.1,
43
+ dataset='nyu',
44
+ depth_anything=True,
45
+ distributed=True,
46
+ do_resize=False,
47
+ force_keep_ar=True,
48
+ freeze_midas_bn=True,
49
+ gpu='NULL',
50
+ img_size=[
51
+ 392,
52
+ 518,
53
+ ],
54
+ inverse_midas=False,
55
+ log_images_every=0.1,
56
+ max_depth=80,
57
+ max_temp=50.0,
58
+ max_translation=100,
59
+ memory_efficient=True,
60
+ midas_model_type='vitl',
61
+ min_depth=0.001,
62
+ min_temp=0.0212,
63
+ model='zoedepth',
64
+ n_attractors=[
65
+ 16,
66
+ 8,
67
+ 4,
68
+ 1,
69
+ ],
70
+ n_bins=64,
71
+ name='ZoeDepth',
72
+ notes='',
73
+ output_distribution='logbinomial',
74
+ prefetch=False,
75
+ pretrained_resource='local::./work_dir/DepthAnything_vitl.pt',
76
+ print_losses=False,
77
+ project='ZoeDepth',
78
+ random_crop=False,
79
+ random_translate=False,
80
+ root='.',
81
+ save_dir='',
82
+ shared_dict='NULL',
83
+ tags='',
84
+ train_midas=True,
85
+ translate_prob=0.2,
86
+ type='DA-ZoeDepth',
87
+ uid='NULL',
88
+ use_amp=False,
89
+ use_pretrained_midas=True,
90
+ use_shared_dict=False,
91
+ validate_every=0.25,
92
+ version_name='v1',
93
+ workers=16),
94
+ fine_branch=dict(
95
+ attractor_alpha=1000,
96
+ attractor_gamma=2,
97
+ attractor_kind='mean',
98
+ attractor_type='inv',
99
+ aug=True,
100
+ bin_centers_type='softplus',
101
+ bin_embedding_dim=128,
102
+ clip_grad=0.1,
103
+ dataset='nyu',
104
+ depth_anything=True,
105
+ distributed=True,
106
+ do_resize=False,
107
+ force_keep_ar=True,
108
+ freeze_midas_bn=True,
109
+ gpu='NULL',
110
+ img_size=[
111
+ 392,
112
+ 518,
113
+ ],
114
+ inverse_midas=False,
115
+ log_images_every=0.1,
116
+ max_depth=80,
117
+ max_temp=50.0,
118
+ max_translation=100,
119
+ memory_efficient=True,
120
+ midas_model_type='vitl',
121
+ min_depth=0.001,
122
+ min_temp=0.0212,
123
+ model='zoedepth',
124
+ n_attractors=[
125
+ 16,
126
+ 8,
127
+ 4,
128
+ 1,
129
+ ],
130
+ n_bins=64,
131
+ name='ZoeDepth',
132
+ notes='',
133
+ output_distribution='logbinomial',
134
+ prefetch=False,
135
+ pretrained_resource='local::./work_dir/DepthAnything_vitl.pt',
136
+ print_losses=False,
137
+ project='ZoeDepth',
138
+ random_crop=False,
139
+ random_translate=False,
140
+ root='.',
141
+ save_dir='',
142
+ shared_dict='NULL',
143
+ tags='',
144
+ train_midas=True,
145
+ translate_prob=0.2,
146
+ type='DA-ZoeDepth',
147
+ uid='NULL',
148
+ use_amp=False,
149
+ use_pretrained_midas=True,
150
+ use_shared_dict=False,
151
+ validate_every=0.25,
152
+ version_name='v1',
153
+ workers=16),
154
+ guided_fusion=dict(
155
+ g2l=True,
156
+ in_channels=[
157
+ 32,
158
+ 256,
159
+ 256,
160
+ 256,
161
+ 256,
162
+ 256,
163
+ ],
164
+ n_channels=5,
165
+ num_patches=[
166
+ 203056,
167
+ 66304,
168
+ 16576,
169
+ 4144,
170
+ 1036,
171
+ 266,
172
+ ],
173
+ patch_process_shape=(
174
+ 392,
175
+ 518,
176
+ ),
177
+ type='GuidedFusionPatchFusion'),
178
+ max_depth=80,
179
+ min_depth=0.001,
180
+ patch_process_shape=(
181
+ 392,
182
+ 518,
183
+ ),
184
+ pretrain_model=[
185
+ './work_dir/depthanything_vitl_u4k/coarse_pretrain/checkpoint_24.pth',
186
+ './work_dir/depthanything_vitl_u4k/fine_pretrain/checkpoint_24.pth',
187
+ ],
188
+ sigloss=dict(type='SILogLoss'),
189
+ type='PatchFusion')
190
+ optim_wrapper = dict(
191
+ clip_grad=dict(max_norm=0.1, norm_type=2, type='norm'),
192
+ optimizer=dict(lr=0.0001, type='AdamW', weight_decay=0.001),
193
+ paramwise_cfg=dict(bypass_duplicate=True, custom_keys=dict()))
194
+ param_scheduler = dict(
195
+ base_momentum=0.85,
196
+ cycle_momentum=True,
197
+ div_factor=10,
198
+ final_div_factor=10000,
199
+ max_momentum=0.95,
200
+ pct_start=0.25,
201
+ three_phase=False)
202
+ project = 'patchfusion'
203
+ resume = False
204
+ tags = [
205
+ 'patchfusion',
206
+ 'da',
207
+ 'vitl',
208
+ ]
209
+ test_in_dataloader = dict(
210
+ batch_size=1,
211
+ dataset=dict(
212
+ data_root='./data/u4k',
213
+ max_depth=80,
214
+ min_depth=0.001,
215
+ mode='infer',
216
+ split='./data/u4k/splits/test.txt',
217
+ transform_cfg=dict(network_process_size=[
218
+ 384,
219
+ 512,
220
+ ]),
221
+ type='UnrealStereo4kDataset'),
222
+ num_workers=2)
223
+ test_out_dataloader = dict(
224
+ batch_size=1,
225
+ dataset=dict(
226
+ data_root='./data/u4k',
227
+ max_depth=80,
228
+ min_depth=0.001,
229
+ mode='infer',
230
+ split='./data/u4k/splits/test_out.txt',
231
+ transform_cfg=dict(network_process_size=[
232
+ 384,
233
+ 512,
234
+ ]),
235
+ type='UnrealStereo4kDataset'),
236
+ num_workers=2)
237
+ train_cfg = dict(
238
+ eval_start=0,
239
+ log_interval=100,
240
+ max_epochs=16,
241
+ save_checkpoint_interval=16,
242
+ train_log_img_interval=500,
243
+ val_interval=2,
244
+ val_log_img_interval=50,
245
+ val_type='epoch_base')
246
+ train_dataloader = dict(
247
+ batch_size=4,
248
+ dataset=dict(
249
+ data_root='./data/u4k',
250
+ max_depth=80,
251
+ min_depth=0.001,
252
+ mode='train',
253
+ resize_mode='depth-anything',
254
+ split='./data/u4k/splits/train.txt',
255
+ transform_cfg=dict(
256
+ degree=1.0,
257
+ network_process_size=[
258
+ 392,
259
+ 518,
260
+ ],
261
+ random_crop=True,
262
+ random_crop_size=(
263
+ 540,
264
+ 960,
265
+ )),
266
+ type='UnrealStereo4kDataset'),
267
+ num_workers=4)
268
+ val_dataloader = dict(
269
+ batch_size=1,
270
+ dataset=dict(
271
+ data_root='./data/u4k',
272
+ max_depth=80,
273
+ min_depth=0.001,
274
+ mode='infer',
275
+ resize_mode='depth-anything',
276
+ split='./data/u4k/splits/val.txt',
277
+ transform_cfg=dict(
278
+ network_process_size=[
279
+ 392,
280
+ 518,
281
+ ], random_crop_size=(
282
+ 540,
283
+ 960,
284
+ )),
285
+ type='UnrealStereo4kDataset'),
286
+ num_workers=2)
287
+ work_dir = './work_dir/depthanything_vitl_u4k/patchfusion'
288
+ zoe_depth_config = dict(
289
+ attractor_alpha=1000,
290
+ attractor_gamma=2,
291
+ attractor_kind='mean',
292
+ attractor_type='inv',
293
+ aug=True,
294
+ bin_centers_type='softplus',
295
+ bin_embedding_dim=128,
296
+ clip_grad=0.1,
297
+ dataset='nyu',
298
+ depth_anything=True,
299
+ distributed=True,
300
+ do_resize=False,
301
+ force_keep_ar=True,
302
+ freeze_midas_bn=True,
303
+ gpu='NULL',
304
+ img_size=[
305
+ 392,
306
+ 518,
307
+ ],
308
+ inverse_midas=False,
309
+ log_images_every=0.1,
310
+ max_depth=80,
311
+ max_temp=50.0,
312
+ max_translation=100,
313
+ memory_efficient=True,
314
+ midas_model_type='vitl',
315
+ min_depth=0.001,
316
+ min_temp=0.0212,
317
+ model='zoedepth',
318
+ n_attractors=[
319
+ 16,
320
+ 8,
321
+ 4,
322
+ 1,
323
+ ],
324
+ n_bins=64,
325
+ name='ZoeDepth',
326
+ notes='',
327
+ output_distribution='logbinomial',
328
+ prefetch=False,
329
+ pretrained_resource='local::./work_dir/DepthAnything_vitl.pt',
330
+ print_losses=False,
331
+ project='ZoeDepth',
332
+ random_crop=False,
333
+ random_translate=False,
334
+ root='.',
335
+ save_dir='',
336
+ shared_dict='NULL',
337
+ tags='',
338
+ train_midas=True,
339
+ translate_prob=0.2,
340
+ type='DA-ZoeDepth',
341
+ uid='NULL',
342
+ use_amp=False,
343
+ use_pretrained_midas=True,
344
+ use_shared_dict=False,
345
+ validate_every=0.25,
346
+ version_name='v1',
347
+ workers=16)
depthanything_vits_u4k/coarse_pretrain/20240315_002030.log ADDED
@@ -0,0 +1,1024 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2024/03/15 00:20:41 - patchstitcher - INFO -
2
+ ------------------------------------------------------------
3
+ System environment:
4
+ sys.platform: linux
5
+ Python: 3.8.18 | packaged by conda-forge | (default, Oct 10 2023, 15:44:36) [GCC 12.3.0]
6
+ CUDA available: True
7
+ numpy_random_seed: 621
8
+ GPU 0,1,2,3: NVIDIA A100-SXM4-80GB
9
+ CUDA_HOME: /sw/rl9g/cuda/11.8/rl9_binary
10
+ NVCC: Cuda compilation tools, release 11.8, V11.8.89
11
+ GCC: gcc (GCC) 11.3.1 20220421 (Red Hat 11.3.1-2)
12
+ PyTorch: 2.1.2
13
+ PyTorch compiling details: PyTorch built with:
14
+ - GCC 9.3
15
+ - C++ Version: 201703
16
+ - Intel(R) oneAPI Math Kernel Library Version 2022.1-Product Build 20220311 for Intel(R) 64 architecture applications
17
+ - Intel(R) MKL-DNN v3.1.1 (Git Hash 64f6bcbcbab628e96f33a62c3e975f8535a7bde4)
18
+ - OpenMP 201511 (a.k.a. OpenMP 4.5)
19
+ - LAPACK is enabled (usually provided by MKL)
20
+ - NNPACK is enabled
21
+ - CPU capability usage: AVX2
22
+ - CUDA Runtime 11.8
23
+ - NVCC architecture flags: -gencode;arch=compute_50,code=sm_50;-gencode;arch=compute_60,code=sm_60;-gencode;arch=compute_61,code=sm_61;-gencode;arch=compute_70,code=sm_70;-gencode;arch=compute_75,code=sm_75;-gencode;arch=compute_80,code=sm_80;-gencode;arch=compute_86,code=sm_86;-gencode;arch=compute_37,code=sm_37;-gencode;arch=compute_90,code=sm_90;-gencode;arch=compute_37,code=compute_37
24
+ - CuDNN 8.7
25
+ - Magma 2.6.1
26
+ - Build settings: BLAS_INFO=mkl, BUILD_TYPE=Release, CUDA_VERSION=11.8, CUDNN_VERSION=8.7.0, CXX_COMPILER=/opt/rh/devtoolset-9/root/usr/bin/c++, CXX_FLAGS= -D_GLIBCXX_USE_CXX11_ABI=0 -fabi-version=11 -fvisibility-inlines-hidden -DUSE_PTHREADPOOL -DNDEBUG -DUSE_KINETO -DLIBKINETO_NOROCTRACER -DUSE_FBGEMM -DUSE_QNNPACK -DUSE_PYTORCH_QNNPACK -DUSE_XNNPACK -DSYMBOLICATE_MOBILE_DEBUG_HANDLE -O2 -fPIC -Wall -Wextra -Werror=return-type -Werror=non-virtual-dtor -Werror=bool-operation -Wnarrowing -Wno-missing-field-initializers -Wno-type-limits -Wno-array-bounds -Wno-unknown-pragmas -Wno-unused-parameter -Wno-unused-function -Wno-unused-result -Wno-strict-overflow -Wno-strict-aliasing -Wno-stringop-overflow -Wno-psabi -Wno-error=pedantic -Wno-error=old-style-cast -Wno-invalid-partial-specialization -Wno-unused-private-field -Wno-aligned-allocation-unavailable -Wno-missing-braces -fdiagnostics-color=always -faligned-new -Wno-unused-but-set-variable -Wno-maybe-uninitialized -fno-math-errno -fno-trapping-math -Werror=format -Werror=cast-function-type -Wno-stringop-overflow, LAPACK_INFO=mkl, PERF_WITH_AVX=1, PERF_WITH_AVX2=1, PERF_WITH_AVX512=1, TORCH_DISABLE_GPU_ASSERTS=ON, TORCH_VERSION=2.1.2, USE_CUDA=ON, USE_CUDNN=ON, USE_EXCEPTION_PTR=1, USE_GFLAGS=OFF, USE_GLOG=OFF, USE_MKL=ON, USE_MKLDNN=ON, USE_MPI=OFF, USE_NCCL=ON, USE_NNPACK=ON, USE_OPENMP=ON, USE_ROCM=OFF,
27
+
28
+ TorchVision: 0.16.2
29
+ OpenCV: 4.8.1
30
+ MMEngine: 0.10.2
31
+
32
+ Runtime environment:
33
+ cudnn_benchmark: True
34
+ mp_cfg: {'mp_start_method': 'forkserver'}
35
+ dist_cfg: {'backend': 'nccl'}
36
+ seed: 621
37
+ Distributed launcher: pytorch
38
+ Distributed training: True
39
+ GPU number: 4
40
+ ------------------------------------------------------------
41
+
42
+ 2024/03/15 00:20:41 - patchstitcher - INFO - Config:
43
+ collect_input_args = [
44
+ 'image_lr',
45
+ 'crops_image_hr',
46
+ 'depth_gt',
47
+ 'crop_depths',
48
+ 'bboxs',
49
+ 'image_hr',
50
+ ]
51
+ convert_syncbn = True
52
+ debug = False
53
+ env_cfg = dict(
54
+ cudnn_benchmark=True,
55
+ dist_cfg=dict(backend='nccl'),
56
+ mp_cfg=dict(mp_start_method='forkserver'))
57
+ find_unused_parameters = True
58
+ general_dataloader = dict(
59
+ batch_size=1,
60
+ dataset=dict(
61
+ dataset_name='', gt_dir=None, rgb_image_dir='', type='ImageDataset'),
62
+ num_workers=2)
63
+ launcher = 'pytorch'
64
+ log_name = 'coarse_pretrain'
65
+ max_depth = 80
66
+ min_depth = 0.001
67
+ model = dict(
68
+ coarse_branch=dict(
69
+ attractor_alpha=1000,
70
+ attractor_gamma=2,
71
+ attractor_kind='mean',
72
+ attractor_type='inv',
73
+ aug=True,
74
+ bin_centers_type='softplus',
75
+ bin_embedding_dim=128,
76
+ clip_grad=0.1,
77
+ dataset='nyu',
78
+ depth_anything=True,
79
+ distributed=True,
80
+ do_resize=False,
81
+ force_keep_ar=True,
82
+ freeze_midas_bn=True,
83
+ gpu='NULL',
84
+ img_size=[
85
+ 392,
86
+ 518,
87
+ ],
88
+ inverse_midas=False,
89
+ log_images_every=0.1,
90
+ max_depth=80,
91
+ max_temp=50.0,
92
+ max_translation=100,
93
+ memory_efficient=True,
94
+ midas_model_type='vits',
95
+ min_depth=0.001,
96
+ min_temp=0.0212,
97
+ model='zoedepth',
98
+ n_attractors=[
99
+ 16,
100
+ 8,
101
+ 4,
102
+ 1,
103
+ ],
104
+ n_bins=64,
105
+ name='ZoeDepth',
106
+ notes='',
107
+ output_distribution='logbinomial',
108
+ prefetch=False,
109
+ pretrained_resource='local::./work_dir/DepthAnything_vits.pt',
110
+ print_losses=False,
111
+ project='ZoeDepth',
112
+ random_crop=False,
113
+ random_translate=False,
114
+ root='.',
115
+ save_dir='',
116
+ shared_dict='NULL',
117
+ tags='',
118
+ train_midas=True,
119
+ translate_prob=0.2,
120
+ type='DA-ZoeDepth',
121
+ uid='NULL',
122
+ use_amp=False,
123
+ use_pretrained_midas=True,
124
+ use_shared_dict=False,
125
+ validate_every=0.25,
126
+ version_name='v1',
127
+ workers=16),
128
+ fine_branch=dict(
129
+ attractor_alpha=1000,
130
+ attractor_gamma=2,
131
+ attractor_kind='mean',
132
+ attractor_type='inv',
133
+ aug=True,
134
+ bin_centers_type='softplus',
135
+ bin_embedding_dim=128,
136
+ clip_grad=0.1,
137
+ dataset='nyu',
138
+ depth_anything=True,
139
+ distributed=True,
140
+ do_resize=False,
141
+ force_keep_ar=True,
142
+ freeze_midas_bn=True,
143
+ gpu='NULL',
144
+ img_size=[
145
+ 392,
146
+ 518,
147
+ ],
148
+ inverse_midas=False,
149
+ log_images_every=0.1,
150
+ max_depth=80,
151
+ max_temp=50.0,
152
+ max_translation=100,
153
+ memory_efficient=True,
154
+ midas_model_type='vits',
155
+ min_depth=0.001,
156
+ min_temp=0.0212,
157
+ model='zoedepth',
158
+ n_attractors=[
159
+ 16,
160
+ 8,
161
+ 4,
162
+ 1,
163
+ ],
164
+ n_bins=64,
165
+ name='ZoeDepth',
166
+ notes='',
167
+ output_distribution='logbinomial',
168
+ prefetch=False,
169
+ pretrained_resource='local::./work_dir/DepthAnything_vits.pt',
170
+ print_losses=False,
171
+ project='ZoeDepth',
172
+ random_crop=False,
173
+ random_translate=False,
174
+ root='.',
175
+ save_dir='',
176
+ shared_dict='NULL',
177
+ tags='',
178
+ train_midas=True,
179
+ translate_prob=0.2,
180
+ type='DA-ZoeDepth',
181
+ uid='NULL',
182
+ use_amp=False,
183
+ use_pretrained_midas=True,
184
+ use_shared_dict=False,
185
+ validate_every=0.25,
186
+ version_name='v1',
187
+ workers=16),
188
+ max_depth=80,
189
+ min_depth=0.001,
190
+ sigloss=dict(type='SILogLoss'),
191
+ target='coarse',
192
+ type='BaselinePretrain')
193
+ optim_wrapper = dict(
194
+ clip_grad=dict(max_norm=0.1, norm_type=2, type='norm'),
195
+ optimizer=dict(lr=4e-06, type='AdamW', weight_decay=0.01),
196
+ paramwise_cfg=dict(bypass_duplicate=True, custom_keys=dict()))
197
+ param_scheduler = dict(
198
+ base_momentum=0.85,
199
+ cycle_momentum=True,
200
+ div_factor=1,
201
+ final_div_factor=10000,
202
+ max_momentum=0.95,
203
+ pct_start=0.5,
204
+ three_phase=False)
205
+ project = 'patchfusion'
206
+ tags = [
207
+ 'coarse',
208
+ 'da',
209
+ 'vits',
210
+ ]
211
+ test_in_dataloader = dict(
212
+ batch_size=1,
213
+ dataset=dict(
214
+ data_root='./data/u4k',
215
+ max_depth=80,
216
+ min_depth=0.001,
217
+ mode='infer',
218
+ split='./data/u4k/splits/test.txt',
219
+ transform_cfg=dict(network_process_size=[
220
+ 384,
221
+ 512,
222
+ ]),
223
+ type='UnrealStereo4kDataset'),
224
+ num_workers=2)
225
+ test_out_dataloader = dict(
226
+ batch_size=1,
227
+ dataset=dict(
228
+ data_root='./data/u4k',
229
+ max_depth=80,
230
+ min_depth=0.001,
231
+ mode='infer',
232
+ split='./data/u4k/splits/test_out.txt',
233
+ transform_cfg=dict(network_process_size=[
234
+ 384,
235
+ 512,
236
+ ]),
237
+ type='UnrealStereo4kDataset'),
238
+ num_workers=2)
239
+ train_cfg = dict(
240
+ eval_start=0,
241
+ log_interval=100,
242
+ max_epochs=24,
243
+ save_checkpoint_interval=24,
244
+ train_log_img_interval=100,
245
+ val_interval=2,
246
+ val_log_img_interval=50,
247
+ val_type='epoch_base')
248
+ train_dataloader = dict(
249
+ batch_size=4,
250
+ dataset=dict(
251
+ data_root='./data/u4k',
252
+ max_depth=80,
253
+ min_depth=0.001,
254
+ mode='train',
255
+ resize_mode='depth-anything',
256
+ split='./data/u4k/splits/train.txt',
257
+ transform_cfg=dict(
258
+ degree=1.0, network_process_size=[
259
+ 392,
260
+ 518,
261
+ ], random_crop=True),
262
+ type='UnrealStereo4kDataset'),
263
+ num_workers=4)
264
+ val_dataloader = dict(
265
+ batch_size=1,
266
+ dataset=dict(
267
+ data_root='./data/u4k',
268
+ max_depth=80,
269
+ min_depth=0.001,
270
+ mode='infer',
271
+ resize_mode='depth-anything',
272
+ split='./data/u4k/splits/val.txt',
273
+ transform_cfg=dict(degree=1.0, network_process_size=[
274
+ 392,
275
+ 518,
276
+ ]),
277
+ type='UnrealStereo4kDataset'),
278
+ num_workers=2)
279
+ work_dir = './work_dir/depthanything_vits_u4k/coarse_pretrain'
280
+ zoe_depth_config = dict(
281
+ attractor_alpha=1000,
282
+ attractor_gamma=2,
283
+ attractor_kind='mean',
284
+ attractor_type='inv',
285
+ aug=True,
286
+ bin_centers_type='softplus',
287
+ bin_embedding_dim=128,
288
+ clip_grad=0.1,
289
+ dataset='nyu',
290
+ depth_anything=True,
291
+ distributed=True,
292
+ do_resize=False,
293
+ force_keep_ar=True,
294
+ freeze_midas_bn=True,
295
+ gpu='NULL',
296
+ img_size=[
297
+ 392,
298
+ 518,
299
+ ],
300
+ inverse_midas=False,
301
+ log_images_every=0.1,
302
+ max_depth=80,
303
+ max_temp=50.0,
304
+ max_translation=100,
305
+ memory_efficient=True,
306
+ midas_model_type='vits',
307
+ min_depth=0.001,
308
+ min_temp=0.0212,
309
+ model='zoedepth',
310
+ n_attractors=[
311
+ 16,
312
+ 8,
313
+ 4,
314
+ 1,
315
+ ],
316
+ n_bins=64,
317
+ name='ZoeDepth',
318
+ notes='',
319
+ output_distribution='logbinomial',
320
+ prefetch=False,
321
+ pretrained_resource='local::./work_dir/DepthAnything_vits.pt',
322
+ print_losses=False,
323
+ project='ZoeDepth',
324
+ random_crop=False,
325
+ random_translate=False,
326
+ root='.',
327
+ save_dir='',
328
+ shared_dict='NULL',
329
+ tags='',
330
+ train_midas=True,
331
+ translate_prob=0.2,
332
+ type='DA-ZoeDepth',
333
+ uid='NULL',
334
+ use_amp=False,
335
+ use_pretrained_midas=True,
336
+ use_shared_dict=False,
337
+ validate_every=0.25,
338
+ version_name='v1',
339
+ workers=16)
340
+
341
+ 2024/03/15 00:20:41 - patchstitcher - INFO - Loading deepnet from local::./work_dir/DepthAnything_vits.pt
342
+ 2024/03/15 00:20:41 - patchstitcher - INFO - Current zoedepth.core.prep.resizer is <class 'torch.nn.modules.linear.Identity'>
343
+ 2024/03/15 00:20:42 - patchstitcher - INFO - DistributedDataParallel(
344
+ (module): BaselinePretrain(
345
+ (coarse_branch): ZoeDepth(
346
+ (core): DepthAnythingCore(
347
+ (core): DPT_DINOv2(
348
+ (pretrained): DinoVisionTransformer(
349
+ (patch_embed): PatchEmbed(
350
+ (proj): Conv2d(3, 384, kernel_size=(14, 14), stride=(14, 14))
351
+ (norm): Identity()
352
+ )
353
+ (blocks): ModuleList(
354
+ (0-11): 12 x NestedTensorBlock(
355
+ (norm1): LayerNorm((384,), eps=1e-06, elementwise_affine=True)
356
+ (attn): MemEffAttention(
357
+ (qkv): Linear(in_features=384, out_features=1152, bias=True)
358
+ (attn_drop): Dropout(p=0.0, inplace=False)
359
+ (proj): Linear(in_features=384, out_features=384, bias=True)
360
+ (proj_drop): Dropout(p=0.0, inplace=False)
361
+ )
362
+ (ls1): LayerScale()
363
+ (drop_path1): Identity()
364
+ (norm2): LayerNorm((384,), eps=1e-06, elementwise_affine=True)
365
+ (mlp): Mlp(
366
+ (fc1): Linear(in_features=384, out_features=1536, bias=True)
367
+ (act): GELU(approximate='none')
368
+ (fc2): Linear(in_features=1536, out_features=384, bias=True)
369
+ (drop): Dropout(p=0.0, inplace=False)
370
+ )
371
+ (ls2): LayerScale()
372
+ (drop_path2): Identity()
373
+ )
374
+ )
375
+ (norm): LayerNorm((384,), eps=1e-06, elementwise_affine=True)
376
+ (head): Identity()
377
+ )
378
+ (depth_head): DPTHead(
379
+ (projects): ModuleList(
380
+ (0): Conv2d(384, 48, kernel_size=(1, 1), stride=(1, 1))
381
+ (1): Conv2d(384, 96, kernel_size=(1, 1), stride=(1, 1))
382
+ (2): Conv2d(384, 192, kernel_size=(1, 1), stride=(1, 1))
383
+ (3): Conv2d(384, 384, kernel_size=(1, 1), stride=(1, 1))
384
+ )
385
+ (resize_layers): ModuleList(
386
+ (0): ConvTranspose2d(48, 48, kernel_size=(4, 4), stride=(4, 4))
387
+ (1): ConvTranspose2d(96, 96, kernel_size=(2, 2), stride=(2, 2))
388
+ (2): Identity()
389
+ (3): Conv2d(384, 384, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1))
390
+ )
391
+ (scratch): Module(
392
+ (layer1_rn): Conv2d(48, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
393
+ (layer2_rn): Conv2d(96, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
394
+ (layer3_rn): Conv2d(192, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
395
+ (layer4_rn): Conv2d(384, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
396
+ (refinenet1): FeatureFusionBlock(
397
+ (out_conv): Conv2d(64, 64, kernel_size=(1, 1), stride=(1, 1))
398
+ (resConfUnit1): ResidualConvUnit(
399
+ (conv1): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
400
+ (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
401
+ (activation): ReLU()
402
+ (skip_add): FloatFunctional(
403
+ (activation_post_process): Identity()
404
+ )
405
+ )
406
+ (resConfUnit2): ResidualConvUnit(
407
+ (conv1): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
408
+ (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
409
+ (activation): ReLU()
410
+ (skip_add): FloatFunctional(
411
+ (activation_post_process): Identity()
412
+ )
413
+ )
414
+ (skip_add): FloatFunctional(
415
+ (activation_post_process): Identity()
416
+ )
417
+ )
418
+ (refinenet2): FeatureFusionBlock(
419
+ (out_conv): Conv2d(64, 64, kernel_size=(1, 1), stride=(1, 1))
420
+ (resConfUnit1): ResidualConvUnit(
421
+ (conv1): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
422
+ (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
423
+ (activation): ReLU()
424
+ (skip_add): FloatFunctional(
425
+ (activation_post_process): Identity()
426
+ )
427
+ )
428
+ (resConfUnit2): ResidualConvUnit(
429
+ (conv1): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
430
+ (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
431
+ (activation): ReLU()
432
+ (skip_add): FloatFunctional(
433
+ (activation_post_process): Identity()
434
+ )
435
+ )
436
+ (skip_add): FloatFunctional(
437
+ (activation_post_process): Identity()
438
+ )
439
+ )
440
+ (refinenet3): FeatureFusionBlock(
441
+ (out_conv): Conv2d(64, 64, kernel_size=(1, 1), stride=(1, 1))
442
+ (resConfUnit1): ResidualConvUnit(
443
+ (conv1): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
444
+ (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
445
+ (activation): ReLU()
446
+ (skip_add): FloatFunctional(
447
+ (activation_post_process): Identity()
448
+ )
449
+ )
450
+ (resConfUnit2): ResidualConvUnit(
451
+ (conv1): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
452
+ (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
453
+ (activation): ReLU()
454
+ (skip_add): FloatFunctional(
455
+ (activation_post_process): Identity()
456
+ )
457
+ )
458
+ (skip_add): FloatFunctional(
459
+ (activation_post_process): Identity()
460
+ )
461
+ )
462
+ (refinenet4): FeatureFusionBlock(
463
+ (out_conv): Conv2d(64, 64, kernel_size=(1, 1), stride=(1, 1))
464
+ (resConfUnit1): ResidualConvUnit(
465
+ (conv1): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
466
+ (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
467
+ (activation): ReLU()
468
+ (skip_add): FloatFunctional(
469
+ (activation_post_process): Identity()
470
+ )
471
+ )
472
+ (resConfUnit2): ResidualConvUnit(
473
+ (conv1): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
474
+ (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
475
+ (activation): ReLU()
476
+ (skip_add): FloatFunctional(
477
+ (activation_post_process): Identity()
478
+ )
479
+ )
480
+ (skip_add): FloatFunctional(
481
+ (activation_post_process): Identity()
482
+ )
483
+ )
484
+ (output_conv1): Conv2d(64, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
485
+ (output_conv2): Sequential(
486
+ (0): Conv2d(32, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
487
+ (1): ReLU(inplace=True)
488
+ (2): Conv2d(32, 1, kernel_size=(1, 1), stride=(1, 1))
489
+ (3): ReLU(inplace=True)
490
+ (4): Identity()
491
+ )
492
+ )
493
+ )
494
+ )
495
+ )
496
+ (conv2): Conv2d(64, 64, kernel_size=(1, 1), stride=(1, 1))
497
+ (seed_bin_regressor): SeedBinRegressorUnnormed(
498
+ (_net): Sequential(
499
+ (0): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1))
500
+ (1): ReLU(inplace=True)
501
+ (2): Conv2d(256, 64, kernel_size=(1, 1), stride=(1, 1))
502
+ (3): Softplus(beta=1, threshold=20)
503
+ )
504
+ )
505
+ (seed_projector): Projector(
506
+ (_net): Sequential(
507
+ (0): Conv2d(64, 128, kernel_size=(1, 1), stride=(1, 1))
508
+ (1): ReLU(inplace=True)
509
+ (2): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1))
510
+ )
511
+ )
512
+ (projectors): ModuleList(
513
+ (0-3): 4 x Projector(
514
+ (_net): Sequential(
515
+ (0): Conv2d(64, 128, kernel_size=(1, 1), stride=(1, 1))
516
+ (1): ReLU(inplace=True)
517
+ (2): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1))
518
+ )
519
+ )
520
+ )
521
+ (attractors): ModuleList(
522
+ (0): AttractorLayerUnnormed(
523
+ (_net): Sequential(
524
+ (0): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1))
525
+ (1): ReLU(inplace=True)
526
+ (2): Conv2d(128, 16, kernel_size=(1, 1), stride=(1, 1))
527
+ (3): Softplus(beta=1, threshold=20)
528
+ )
529
+ )
530
+ (1): AttractorLayerUnnormed(
531
+ (_net): Sequential(
532
+ (0): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1))
533
+ (1): ReLU(inplace=True)
534
+ (2): Conv2d(128, 8, kernel_size=(1, 1), stride=(1, 1))
535
+ (3): Softplus(beta=1, threshold=20)
536
+ )
537
+ )
538
+ (2): AttractorLayerUnnormed(
539
+ (_net): Sequential(
540
+ (0): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1))
541
+ (1): ReLU(inplace=True)
542
+ (2): Conv2d(128, 4, kernel_size=(1, 1), stride=(1, 1))
543
+ (3): Softplus(beta=1, threshold=20)
544
+ )
545
+ )
546
+ (3): AttractorLayerUnnormed(
547
+ (_net): Sequential(
548
+ (0): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1))
549
+ (1): ReLU(inplace=True)
550
+ (2): Conv2d(128, 1, kernel_size=(1, 1), stride=(1, 1))
551
+ (3): Softplus(beta=1, threshold=20)
552
+ )
553
+ )
554
+ )
555
+ (conditional_log_binomial): ConditionalLogBinomial(
556
+ (log_binomial_transform): LogBinomial()
557
+ (mlp): Sequential(
558
+ (0): Conv2d(161, 80, kernel_size=(1, 1), stride=(1, 1))
559
+ (1): GELU(approximate='none')
560
+ (2): Conv2d(80, 4, kernel_size=(1, 1), stride=(1, 1))
561
+ (3): Softplus(beta=1, threshold=20)
562
+ )
563
+ )
564
+ )
565
+ (sigloss): SILogLoss()
566
+ )
567
+ )
568
+ 2024/03/15 00:20:47 - patchstitcher - INFO - successfully init trainer
569
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.cls_token
570
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.pos_embed
571
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.mask_token
572
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.patch_embed.proj.weight
573
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.patch_embed.proj.bias
574
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.0.norm1.weight
575
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.0.norm1.bias
576
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.0.attn.qkv.weight
577
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.0.attn.qkv.bias
578
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.0.attn.proj.weight
579
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.0.attn.proj.bias
580
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.0.ls1.gamma
581
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.0.norm2.weight
582
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.0.norm2.bias
583
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.0.mlp.fc1.weight
584
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.0.mlp.fc1.bias
585
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.0.mlp.fc2.weight
586
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.0.mlp.fc2.bias
587
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.0.ls2.gamma
588
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.1.norm1.weight
589
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.1.norm1.bias
590
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.1.attn.qkv.weight
591
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.1.attn.qkv.bias
592
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.1.attn.proj.weight
593
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.1.attn.proj.bias
594
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.1.ls1.gamma
595
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.1.norm2.weight
596
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.1.norm2.bias
597
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.1.mlp.fc1.weight
598
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.1.mlp.fc1.bias
599
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.1.mlp.fc2.weight
600
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.1.mlp.fc2.bias
601
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.1.ls2.gamma
602
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.2.norm1.weight
603
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.2.norm1.bias
604
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.2.attn.qkv.weight
605
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.2.attn.qkv.bias
606
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.2.attn.proj.weight
607
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.2.attn.proj.bias
608
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.2.ls1.gamma
609
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.2.norm2.weight
610
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.2.norm2.bias
611
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.2.mlp.fc1.weight
612
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.2.mlp.fc1.bias
613
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.2.mlp.fc2.weight
614
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.2.mlp.fc2.bias
615
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.2.ls2.gamma
616
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.3.norm1.weight
617
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.3.norm1.bias
618
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.3.attn.qkv.weight
619
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.3.attn.qkv.bias
620
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.3.attn.proj.weight
621
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.3.attn.proj.bias
622
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.3.ls1.gamma
623
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.3.norm2.weight
624
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.3.norm2.bias
625
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.3.mlp.fc1.weight
626
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.3.mlp.fc1.bias
627
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.3.mlp.fc2.weight
628
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.3.mlp.fc2.bias
629
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.3.ls2.gamma
630
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.4.norm1.weight
631
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.4.norm1.bias
632
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.4.attn.qkv.weight
633
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.4.attn.qkv.bias
634
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.4.attn.proj.weight
635
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.4.attn.proj.bias
636
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.4.ls1.gamma
637
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.4.norm2.weight
638
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.4.norm2.bias
639
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.4.mlp.fc1.weight
640
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.4.mlp.fc1.bias
641
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.4.mlp.fc2.weight
642
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.4.mlp.fc2.bias
643
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.4.ls2.gamma
644
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.5.norm1.weight
645
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.5.norm1.bias
646
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.5.attn.qkv.weight
647
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.5.attn.qkv.bias
648
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.5.attn.proj.weight
649
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.5.attn.proj.bias
650
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.5.ls1.gamma
651
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.5.norm2.weight
652
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.5.norm2.bias
653
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.5.mlp.fc1.weight
654
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.5.mlp.fc1.bias
655
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.5.mlp.fc2.weight
656
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.5.mlp.fc2.bias
657
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.5.ls2.gamma
658
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.6.norm1.weight
659
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.6.norm1.bias
660
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.6.attn.qkv.weight
661
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.6.attn.qkv.bias
662
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.6.attn.proj.weight
663
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.6.attn.proj.bias
664
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.6.ls1.gamma
665
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.6.norm2.weight
666
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.6.norm2.bias
667
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.6.mlp.fc1.weight
668
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.6.mlp.fc1.bias
669
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.6.mlp.fc2.weight
670
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.6.mlp.fc2.bias
671
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.6.ls2.gamma
672
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.7.norm1.weight
673
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.7.norm1.bias
674
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.7.attn.qkv.weight
675
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.7.attn.qkv.bias
676
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.7.attn.proj.weight
677
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.7.attn.proj.bias
678
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.7.ls1.gamma
679
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.7.norm2.weight
680
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.7.norm2.bias
681
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.7.mlp.fc1.weight
682
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.7.mlp.fc1.bias
683
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.7.mlp.fc2.weight
684
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.7.mlp.fc2.bias
685
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.7.ls2.gamma
686
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.8.norm1.weight
687
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.8.norm1.bias
688
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.8.attn.qkv.weight
689
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.8.attn.qkv.bias
690
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.8.attn.proj.weight
691
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.8.attn.proj.bias
692
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.8.ls1.gamma
693
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.8.norm2.weight
694
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.8.norm2.bias
695
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.8.mlp.fc1.weight
696
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.8.mlp.fc1.bias
697
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.8.mlp.fc2.weight
698
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.8.mlp.fc2.bias
699
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.8.ls2.gamma
700
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.9.norm1.weight
701
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.9.norm1.bias
702
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.9.attn.qkv.weight
703
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.9.attn.qkv.bias
704
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.9.attn.proj.weight
705
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.9.attn.proj.bias
706
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.9.ls1.gamma
707
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.9.norm2.weight
708
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.9.norm2.bias
709
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.9.mlp.fc1.weight
710
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.9.mlp.fc1.bias
711
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.9.mlp.fc2.weight
712
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.9.mlp.fc2.bias
713
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.9.ls2.gamma
714
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.10.norm1.weight
715
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.10.norm1.bias
716
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.10.attn.qkv.weight
717
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.10.attn.qkv.bias
718
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.10.attn.proj.weight
719
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.10.attn.proj.bias
720
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.10.ls1.gamma
721
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.10.norm2.weight
722
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.10.norm2.bias
723
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.10.mlp.fc1.weight
724
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.10.mlp.fc1.bias
725
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.10.mlp.fc2.weight
726
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.10.mlp.fc2.bias
727
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.10.ls2.gamma
728
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.11.norm1.weight
729
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.11.norm1.bias
730
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.11.attn.qkv.weight
731
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.11.attn.qkv.bias
732
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.11.attn.proj.weight
733
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.11.attn.proj.bias
734
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.11.ls1.gamma
735
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.11.norm2.weight
736
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.11.norm2.bias
737
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.11.mlp.fc1.weight
738
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.11.mlp.fc1.bias
739
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.11.mlp.fc2.weight
740
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.11.mlp.fc2.bias
741
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.blocks.11.ls2.gamma
742
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.norm.weight
743
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.pretrained.norm.bias
744
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.projects.0.weight
745
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.projects.0.bias
746
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.projects.1.weight
747
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.projects.1.bias
748
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.projects.2.weight
749
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.projects.2.bias
750
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.projects.3.weight
751
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.projects.3.bias
752
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.resize_layers.0.weight
753
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.resize_layers.0.bias
754
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.resize_layers.1.weight
755
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.resize_layers.1.bias
756
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.resize_layers.3.weight
757
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.resize_layers.3.bias
758
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.layer1_rn.weight
759
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.layer2_rn.weight
760
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.layer3_rn.weight
761
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.layer4_rn.weight
762
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet1.out_conv.weight
763
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet1.out_conv.bias
764
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet1.resConfUnit1.conv1.weight
765
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet1.resConfUnit1.conv1.bias
766
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet1.resConfUnit1.conv2.weight
767
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet1.resConfUnit1.conv2.bias
768
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet1.resConfUnit2.conv1.weight
769
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet1.resConfUnit2.conv1.bias
770
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet1.resConfUnit2.conv2.weight
771
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet1.resConfUnit2.conv2.bias
772
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet2.out_conv.weight
773
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet2.out_conv.bias
774
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet2.resConfUnit1.conv1.weight
775
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet2.resConfUnit1.conv1.bias
776
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet2.resConfUnit1.conv2.weight
777
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet2.resConfUnit1.conv2.bias
778
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet2.resConfUnit2.conv1.weight
779
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet2.resConfUnit2.conv1.bias
780
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet2.resConfUnit2.conv2.weight
781
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet2.resConfUnit2.conv2.bias
782
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet3.out_conv.weight
783
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet3.out_conv.bias
784
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet3.resConfUnit1.conv1.weight
785
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet3.resConfUnit1.conv1.bias
786
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet3.resConfUnit1.conv2.weight
787
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet3.resConfUnit1.conv2.bias
788
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet3.resConfUnit2.conv1.weight
789
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet3.resConfUnit2.conv1.bias
790
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet3.resConfUnit2.conv2.weight
791
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet3.resConfUnit2.conv2.bias
792
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet4.out_conv.weight
793
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet4.out_conv.bias
794
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet4.resConfUnit1.conv1.weight
795
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet4.resConfUnit1.conv1.bias
796
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet4.resConfUnit1.conv2.weight
797
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet4.resConfUnit1.conv2.bias
798
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet4.resConfUnit2.conv1.weight
799
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet4.resConfUnit2.conv1.bias
800
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet4.resConfUnit2.conv2.weight
801
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.refinenet4.resConfUnit2.conv2.bias
802
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.output_conv1.weight
803
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.output_conv1.bias
804
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.output_conv2.0.weight
805
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.output_conv2.0.bias
806
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.output_conv2.2.weight
807
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.core.core.depth_head.scratch.output_conv2.2.bias
808
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.conv2.weight
809
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.conv2.bias
810
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.seed_bin_regressor._net.0.weight
811
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.seed_bin_regressor._net.0.bias
812
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.seed_bin_regressor._net.2.weight
813
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.seed_bin_regressor._net.2.bias
814
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.seed_projector._net.0.weight
815
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.seed_projector._net.0.bias
816
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.seed_projector._net.2.weight
817
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.seed_projector._net.2.bias
818
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.projectors.0._net.0.weight
819
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.projectors.0._net.0.bias
820
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.projectors.0._net.2.weight
821
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.projectors.0._net.2.bias
822
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.projectors.1._net.0.weight
823
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.projectors.1._net.0.bias
824
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.projectors.1._net.2.weight
825
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.projectors.1._net.2.bias
826
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.projectors.2._net.0.weight
827
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.projectors.2._net.0.bias
828
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.projectors.2._net.2.weight
829
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.projectors.2._net.2.bias
830
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.projectors.3._net.0.weight
831
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.projectors.3._net.0.bias
832
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.projectors.3._net.2.weight
833
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.projectors.3._net.2.bias
834
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.attractors.0._net.0.weight
835
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.attractors.0._net.0.bias
836
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.attractors.0._net.2.weight
837
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.attractors.0._net.2.bias
838
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.attractors.1._net.0.weight
839
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.attractors.1._net.0.bias
840
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.attractors.1._net.2.weight
841
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.attractors.1._net.2.bias
842
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.attractors.2._net.0.weight
843
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.attractors.2._net.0.bias
844
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.attractors.2._net.2.weight
845
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.attractors.2._net.2.bias
846
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.attractors.3._net.0.weight
847
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.attractors.3._net.0.bias
848
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.attractors.3._net.2.weight
849
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.attractors.3._net.2.bias
850
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.conditional_log_binomial.mlp.0.weight
851
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.conditional_log_binomial.mlp.0.bias
852
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.conditional_log_binomial.mlp.2.weight
853
+ 2024/03/15 00:20:47 - patchstitcher - INFO - training param: module.coarse_branch.conditional_log_binomial.mlp.2.bias
854
+ 2024/03/15 00:23:05 - patchstitcher - INFO - Epoch: [01/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 2.218886375427246 - coarse_loss: 2.218886375427246
855
+ 2024/03/15 00:24:52 - patchstitcher - INFO - Epoch: [01/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 2.0132031440734863 - coarse_loss: 2.0132031440734863
856
+ 2024/03/15 00:26:41 - patchstitcher - INFO - Epoch: [01/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 2.1340489387512207 - coarse_loss: 2.1340489387512207
857
+ 2024/03/15 00:28:31 - patchstitcher - INFO - Epoch: [01/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 1.68356192111969 - coarse_loss: 1.68356192111969
858
+ 2024/03/15 00:31:46 - patchstitcher - INFO - Epoch: [02/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 1.1240144968032837 - coarse_loss: 1.1240144968032837
859
+ 2024/03/15 00:33:37 - patchstitcher - INFO - Epoch: [02/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 1.2552540302276611 - coarse_loss: 1.2552540302276611
860
+ 2024/03/15 00:35:27 - patchstitcher - INFO - Epoch: [02/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 1.3931670188903809 - coarse_loss: 1.3931670188903809
861
+ 2024/03/15 00:37:17 - patchstitcher - INFO - Epoch: [02/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 1.4315416812896729 - coarse_loss: 1.4315416812896729
862
+ 2024/03/15 00:38:56 - patchstitcher - INFO - Evaluation Summary:
863
+ +-----------+-----------+-----------+-----------+----------+-----------+-----------+------------+-----------+-----------+
864
+ | a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see |
865
+ +-----------+-----------+-----------+-----------+----------+-----------+-----------+------------+-----------+-----------+
866
+ | 0.9222131 | 0.9841732 | 0.9937032 | 0.0942684 | 1.901311 | 0.0392215 | 0.1319014 | 11.5870857 | 0.3169146 | 1.4523976 |
867
+ +-----------+-----------+-----------+-----------+----------+-----------+-----------+------------+-----------+-----------+
868
+ 2024/03/15 00:40:53 - patchstitcher - INFO - Epoch: [03/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 1.3891286849975586 - coarse_loss: 1.3891286849975586
869
+ 2024/03/15 00:42:45 - patchstitcher - INFO - Epoch: [03/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 1.3853542804718018 - coarse_loss: 1.3853542804718018
870
+ 2024/03/15 00:44:31 - patchstitcher - INFO - Epoch: [03/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 1.6085820198059082 - coarse_loss: 1.6085820198059082
871
+ 2024/03/15 00:46:24 - patchstitcher - INFO - Epoch: [03/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 1.2743269205093384 - coarse_loss: 1.2743269205093384
872
+ 2024/03/15 00:49:33 - patchstitcher - INFO - Epoch: [04/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 1.4644969701766968 - coarse_loss: 1.4644969701766968
873
+ 2024/03/15 00:51:20 - patchstitcher - INFO - Epoch: [04/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 1.040415644645691 - coarse_loss: 1.040415644645691
874
+ 2024/03/15 00:53:07 - patchstitcher - INFO - Epoch: [04/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 1.2523736953735352 - coarse_loss: 1.2523736953735352
875
+ 2024/03/15 00:54:57 - patchstitcher - INFO - Epoch: [04/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.7893640995025635 - coarse_loss: 0.7893640995025635
876
+ 2024/03/15 00:56:31 - patchstitcher - INFO - Evaluation Summary:
877
+ +-----------+-----------+-----------+-----------+----------+-----------+-----------+------------+-----------+-----------+
878
+ | a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see |
879
+ +-----------+-----------+-----------+-----------+----------+-----------+-----------+------------+-----------+-----------+
880
+ | 0.9466366 | 0.9857079 | 0.9944696 | 0.0784504 | 1.723246 | 0.0331783 | 0.1166779 | 10.4672395 | 0.2658952 | 1.2480133 |
881
+ +-----------+-----------+-----------+-----------+----------+-----------+-----------+------------+-----------+-----------+
882
+ 2024/03/15 00:58:25 - patchstitcher - INFO - Epoch: [05/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.8934182524681091 - coarse_loss: 0.8934182524681091
883
+ 2024/03/15 01:00:10 - patchstitcher - INFO - Epoch: [05/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 1.0365135669708252 - coarse_loss: 1.0365135669708252
884
+ 2024/03/15 01:02:00 - patchstitcher - INFO - Epoch: [05/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 1.0158889293670654 - coarse_loss: 1.0158889293670654
885
+ 2024/03/15 01:03:50 - patchstitcher - INFO - Epoch: [05/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.7366129159927368 - coarse_loss: 0.7366129159927368
886
+ 2024/03/15 01:07:04 - patchstitcher - INFO - Epoch: [06/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 1.4556183815002441 - coarse_loss: 1.4556183815002441
887
+ 2024/03/15 01:08:51 - patchstitcher - INFO - Epoch: [06/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 1.093213677406311 - coarse_loss: 1.093213677406311
888
+ 2024/03/15 01:10:45 - patchstitcher - INFO - Epoch: [06/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.8329901099205017 - coarse_loss: 0.8329901099205017
889
+ 2024/03/15 01:12:32 - patchstitcher - INFO - Epoch: [06/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.8255199193954468 - coarse_loss: 0.8255199193954468
890
+ 2024/03/15 01:14:05 - patchstitcher - INFO - Evaluation Summary:
891
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+
892
+ | a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see |
893
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+
894
+ | 0.9492006 | 0.9876058 | 0.9947174 | 0.0765434 | 1.6623389 | 0.0336977 | 0.1157899 | 10.168448 | 0.2274059 | 1.1601292 |
895
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+
896
+ 2024/03/15 01:16:01 - patchstitcher - INFO - Epoch: [07/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.9320656061172485 - coarse_loss: 0.9320656061172485
897
+ 2024/03/15 01:17:44 - patchstitcher - INFO - Epoch: [07/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 1.3558683395385742 - coarse_loss: 1.3558683395385742
898
+ 2024/03/15 01:19:36 - patchstitcher - INFO - Epoch: [07/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 1.1851251125335693 - coarse_loss: 1.1851251125335693
899
+ 2024/03/15 01:21:28 - patchstitcher - INFO - Epoch: [07/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.7694360613822937 - coarse_loss: 0.7694360613822937
900
+ 2024/03/15 01:24:39 - patchstitcher - INFO - Epoch: [08/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.9268642067909241 - coarse_loss: 0.9268642067909241
901
+ 2024/03/15 01:26:28 - patchstitcher - INFO - Epoch: [08/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 1.0070387125015259 - coarse_loss: 1.0070387125015259
902
+ 2024/03/15 01:28:17 - patchstitcher - INFO - Epoch: [08/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 1.3363308906555176 - coarse_loss: 1.3363308906555176
903
+ 2024/03/15 01:30:03 - patchstitcher - INFO - Epoch: [08/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 1.549015998840332 - coarse_loss: 1.549015998840332
904
+ 2024/03/15 01:31:38 - patchstitcher - INFO - Evaluation Summary:
905
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+----------+-----------+
906
+ | a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see |
907
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+----------+-----------+
908
+ | 0.9580896 | 0.9882235 | 0.9949475 | 0.0697348 | 1.6023046 | 0.0295156 | 0.1067427 | 9.6755001 | 0.224005 | 1.1545794 |
909
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+----------+-----------+
910
+ 2024/03/15 01:33:33 - patchstitcher - INFO - Epoch: [09/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 1.2725939750671387 - coarse_loss: 1.2725939750671387
911
+ 2024/03/15 01:35:25 - patchstitcher - INFO - Epoch: [09/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 1.8319188356399536 - coarse_loss: 1.8319188356399536
912
+ 2024/03/15 01:37:16 - patchstitcher - INFO - Epoch: [09/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.9146164655685425 - coarse_loss: 0.9146164655685425
913
+ 2024/03/15 01:39:08 - patchstitcher - INFO - Epoch: [09/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.9933633208274841 - coarse_loss: 0.9933633208274841
914
+ 2024/03/15 01:42:21 - patchstitcher - INFO - Epoch: [10/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.531670331954956 - coarse_loss: 0.531670331954956
915
+ 2024/03/15 01:44:13 - patchstitcher - INFO - Epoch: [10/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.98005211353302 - coarse_loss: 0.98005211353302
916
+ 2024/03/15 01:46:08 - patchstitcher - INFO - Epoch: [10/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 1.562972068786621 - coarse_loss: 1.562972068786621
917
+ 2024/03/15 01:48:00 - patchstitcher - INFO - Epoch: [10/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 1.0578055381774902 - coarse_loss: 1.0578055381774902
918
+ 2024/03/15 01:49:39 - patchstitcher - INFO - Evaluation Summary:
919
+ +-----------+-----------+-----------+-----------+-----------+----------+-----------+--------+-----------+-----------+
920
+ | a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see |
921
+ +-----------+-----------+-----------+-----------+-----------+----------+-----------+--------+-----------+-----------+
922
+ | 0.9573399 | 0.9884624 | 0.9949526 | 0.0727779 | 1.5619678 | 0.030998 | 0.1089102 | 9.5524 | 0.2075647 | 1.1259904 |
923
+ +-----------+-----------+-----------+-----------+-----------+----------+-----------+--------+-----------+-----------+
924
+ 2024/03/15 01:51:35 - patchstitcher - INFO - Epoch: [11/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.9536416530609131 - coarse_loss: 0.9536416530609131
925
+ 2024/03/15 01:53:33 - patchstitcher - INFO - Epoch: [11/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 1.061272382736206 - coarse_loss: 1.061272382736206
926
+ 2024/03/15 01:55:27 - patchstitcher - INFO - Epoch: [11/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 1.403846263885498 - coarse_loss: 1.403846263885498
927
+ 2024/03/15 01:57:19 - patchstitcher - INFO - Epoch: [11/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.6634379625320435 - coarse_loss: 0.6634379625320435
928
+ 2024/03/15 02:00:39 - patchstitcher - INFO - Epoch: [12/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.7105982303619385 - coarse_loss: 0.7105982303619385
929
+ 2024/03/15 02:02:34 - patchstitcher - INFO - Epoch: [12/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.8706010580062866 - coarse_loss: 0.8706010580062866
930
+ 2024/03/15 02:04:29 - patchstitcher - INFO - Epoch: [12/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 1.013525366783142 - coarse_loss: 1.013525366783142
931
+ 2024/03/15 02:06:17 - patchstitcher - INFO - Epoch: [12/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.9357657432556152 - coarse_loss: 0.9357657432556152
932
+ 2024/03/15 02:07:50 - patchstitcher - INFO - Evaluation Summary:
933
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+
934
+ | a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see |
935
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+
936
+ | 0.9572283 | 0.9886936 | 0.9950871 | 0.0731142 | 1.5668887 | 0.0308406 | 0.1077591 | 9.5263897 | 0.2176418 | 1.1755943 |
937
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+
938
+ 2024/03/15 02:09:47 - patchstitcher - INFO - Epoch: [13/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 1.4000838994979858 - coarse_loss: 1.4000838994979858
939
+ 2024/03/15 02:11:41 - patchstitcher - INFO - Epoch: [13/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 1.147301435470581 - coarse_loss: 1.147301435470581
940
+ 2024/03/15 02:13:39 - patchstitcher - INFO - Epoch: [13/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.9417784214019775 - coarse_loss: 0.9417784214019775
941
+ 2024/03/15 02:15:36 - patchstitcher - INFO - Epoch: [13/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.898872971534729 - coarse_loss: 0.898872971534729
942
+ 2024/03/15 02:18:53 - patchstitcher - INFO - Epoch: [14/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.6218137741088867 - coarse_loss: 0.6218137741088867
943
+ 2024/03/15 02:20:49 - patchstitcher - INFO - Epoch: [14/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.9591147303581238 - coarse_loss: 0.9591147303581238
944
+ 2024/03/15 02:22:42 - patchstitcher - INFO - Epoch: [14/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.7330798506736755 - coarse_loss: 0.7330798506736755
945
+ 2024/03/15 02:24:37 - patchstitcher - INFO - Epoch: [14/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.671249508857727 - coarse_loss: 0.671249508857727
946
+ 2024/03/15 02:26:12 - patchstitcher - INFO - Evaluation Summary:
947
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+
948
+ | a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see |
949
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+
950
+ | 0.9656246 | 0.9891472 | 0.9951051 | 0.0614303 | 1.5103214 | 0.0264747 | 0.1001097 | 9.4011921 | 0.1946697 | 1.0991172 |
951
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+
952
+ 2024/03/15 02:28:10 - patchstitcher - INFO - Epoch: [15/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.7798411846160889 - coarse_loss: 0.7798411846160889
953
+ 2024/03/15 02:30:02 - patchstitcher - INFO - Epoch: [15/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.9757544994354248 - coarse_loss: 0.9757544994354248
954
+ 2024/03/15 02:31:49 - patchstitcher - INFO - Epoch: [15/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 1.1485944986343384 - coarse_loss: 1.1485944986343384
955
+ 2024/03/15 02:33:42 - patchstitcher - INFO - Epoch: [15/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.8730670809745789 - coarse_loss: 0.8730670809745789
956
+ 2024/03/15 02:36:57 - patchstitcher - INFO - Epoch: [16/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 1.0859500169754028 - coarse_loss: 1.0859500169754028
957
+ 2024/03/15 02:38:46 - patchstitcher - INFO - Epoch: [16/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.9123729467391968 - coarse_loss: 0.9123729467391968
958
+ 2024/03/15 02:40:36 - patchstitcher - INFO - Epoch: [16/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 1.0700657367706299 - coarse_loss: 1.0700657367706299
959
+ 2024/03/15 02:42:24 - patchstitcher - INFO - Epoch: [16/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 1.8393980264663696 - coarse_loss: 1.8393980264663696
960
+ 2024/03/15 02:43:58 - patchstitcher - INFO - Evaluation Summary:
961
+ +-----------+-----------+-----------+-----------+----------+-----------+-----------+-----------+-----------+-----------+
962
+ | a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see |
963
+ +-----------+-----------+-----------+-----------+----------+-----------+-----------+-----------+-----------+-----------+
964
+ | 0.9678091 | 0.9892931 | 0.9952321 | 0.0607629 | 1.488932 | 0.0257705 | 0.0981663 | 9.0934609 | 0.1966839 | 1.0878515 |
965
+ +-----------+-----------+-----------+-----------+----------+-----------+-----------+-----------+-----------+-----------+
966
+ 2024/03/15 02:45:51 - patchstitcher - INFO - Epoch: [17/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.7053128480911255 - coarse_loss: 0.7053128480911255
967
+ 2024/03/15 02:47:43 - patchstitcher - INFO - Epoch: [17/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.9886703491210938 - coarse_loss: 0.9886703491210938
968
+ 2024/03/15 02:49:32 - patchstitcher - INFO - Epoch: [17/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 1.180053949356079 - coarse_loss: 1.180053949356079
969
+ 2024/03/15 02:51:22 - patchstitcher - INFO - Epoch: [17/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 1.316230297088623 - coarse_loss: 1.316230297088623
970
+ 2024/03/15 02:54:39 - patchstitcher - INFO - Epoch: [18/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.7665231227874756 - coarse_loss: 0.7665231227874756
971
+ 2024/03/15 02:56:30 - patchstitcher - INFO - Epoch: [18/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.6590834856033325 - coarse_loss: 0.6590834856033325
972
+ 2024/03/15 02:58:17 - patchstitcher - INFO - Epoch: [18/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.9268083572387695 - coarse_loss: 0.9268083572387695
973
+ 2024/03/15 03:00:07 - patchstitcher - INFO - Epoch: [18/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 1.255874752998352 - coarse_loss: 1.255874752998352
974
+ 2024/03/15 03:01:42 - patchstitcher - INFO - Evaluation Summary:
975
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+
976
+ | a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see |
977
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+
978
+ | 0.9691702 | 0.9894969 | 0.9952754 | 0.0559551 | 1.4743834 | 0.0240017 | 0.0943829 | 8.8561864 | 0.1819411 | 1.0395958 |
979
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+
980
+ 2024/03/15 03:03:35 - patchstitcher - INFO - Epoch: [19/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.6205350756645203 - coarse_loss: 0.6205350756645203
981
+ 2024/03/15 03:05:29 - patchstitcher - INFO - Epoch: [19/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.6529569625854492 - coarse_loss: 0.6529569625854492
982
+ 2024/03/15 03:07:18 - patchstitcher - INFO - Epoch: [19/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.8907508850097656 - coarse_loss: 0.8907508850097656
983
+ 2024/03/15 03:09:13 - patchstitcher - INFO - Epoch: [19/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.5823774337768555 - coarse_loss: 0.5823774337768555
984
+ 2024/03/15 03:12:22 - patchstitcher - INFO - Epoch: [20/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 1.3379265069961548 - coarse_loss: 1.3379265069961548
985
+ 2024/03/15 03:14:13 - patchstitcher - INFO - Epoch: [20/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.615516185760498 - coarse_loss: 0.615516185760498
986
+ 2024/03/15 03:16:04 - patchstitcher - INFO - Epoch: [20/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.5864847302436829 - coarse_loss: 0.5864847302436829
987
+ 2024/03/15 03:17:54 - patchstitcher - INFO - Epoch: [20/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.9669459462165833 - coarse_loss: 0.9669459462165833
988
+ 2024/03/15 03:19:29 - patchstitcher - INFO - Evaluation Summary:
989
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+
990
+ | a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see |
991
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+
992
+ | 0.9697102 | 0.9895742 | 0.9953449 | 0.0539071 | 1.4497501 | 0.0229091 | 0.0925752 | 8.7784555 | 0.1802817 | 1.0580258 |
993
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+
994
+ 2024/03/15 03:21:25 - patchstitcher - INFO - Epoch: [21/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.9557666778564453 - coarse_loss: 0.9557666778564453
995
+ 2024/03/15 03:23:15 - patchstitcher - INFO - Epoch: [21/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.6958411931991577 - coarse_loss: 0.6958411931991577
996
+ 2024/03/15 03:25:01 - patchstitcher - INFO - Epoch: [21/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.5607629418373108 - coarse_loss: 0.5607629418373108
997
+ 2024/03/15 03:26:54 - patchstitcher - INFO - Epoch: [21/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 1.8118071556091309 - coarse_loss: 1.8118071556091309
998
+ 2024/03/15 03:30:05 - patchstitcher - INFO - Epoch: [22/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 1.0183720588684082 - coarse_loss: 1.0183720588684082
999
+ 2024/03/15 03:31:53 - patchstitcher - INFO - Epoch: [22/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 1.0083253383636475 - coarse_loss: 1.0083253383636475
1000
+ 2024/03/15 03:33:45 - patchstitcher - INFO - Epoch: [22/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.5852430462837219 - coarse_loss: 0.5852430462837219
1001
+ 2024/03/15 03:35:35 - patchstitcher - INFO - Epoch: [22/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.8135958909988403 - coarse_loss: 0.8135958909988403
1002
+ 2024/03/15 03:37:10 - patchstitcher - INFO - Evaluation Summary:
1003
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+
1004
+ | a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see |
1005
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+
1006
+ | 0.9699838 | 0.9896694 | 0.9953757 | 0.0526183 | 1.4463599 | 0.0224501 | 0.0915034 | 8.7251583 | 0.1771403 | 1.0479052 |
1007
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+
1008
+ 2024/03/15 03:39:04 - patchstitcher - INFO - Epoch: [23/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.6198912262916565 - coarse_loss: 0.6198912262916565
1009
+ 2024/03/15 03:40:57 - patchstitcher - INFO - Epoch: [23/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 1.1995759010314941 - coarse_loss: 1.1995759010314941
1010
+ 2024/03/15 03:42:47 - patchstitcher - INFO - Epoch: [23/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 1.7696393728256226 - coarse_loss: 1.7696393728256226
1011
+ 2024/03/15 03:44:34 - patchstitcher - INFO - Epoch: [23/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 1.4660639762878418 - coarse_loss: 1.4660639762878418
1012
+ 2024/03/15 03:47:48 - patchstitcher - INFO - Epoch: [24/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.9167467355728149 - coarse_loss: 0.9167467355728149
1013
+ 2024/03/15 03:49:40 - patchstitcher - INFO - Epoch: [24/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.6638955473899841 - coarse_loss: 0.6638955473899841
1014
+ 2024/03/15 03:51:31 - patchstitcher - INFO - Epoch: [24/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.4969237446784973 - coarse_loss: 0.4969237446784973
1015
+ 2024/03/15 03:53:18 - patchstitcher - INFO - Epoch: [24/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.5059656500816345 - coarse_loss: 0.5059656500816345
1016
+ 2024/03/15 03:54:52 - patchstitcher - INFO - Evaluation Summary:
1017
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+----------+
1018
+ | a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see |
1019
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+----------+
1020
+ | 0.9701259 | 0.9896677 | 0.9953767 | 0.0521426 | 1.4457442 | 0.0222779 | 0.0914182 | 8.7319618 | 0.1776046 | 1.046505 |
1021
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+----------+
1022
+ 2024/03/15 03:54:52 - patchstitcher - INFO - Saving ckp, but use the inner get_save_dict fuction to get model_dict
1023
+ 2024/03/15 03:54:52 - patchstitcher - INFO - For saving space. Would you like to save base model several times? :>
1024
+ 2024/03/15 03:54:52 - patchstitcher - INFO - save checkpoint_24.pth at ./work_dir/depthanything_vits_u4k/coarse_pretrain
depthanything_vits_u4k/coarse_pretrain/checkpoint_24.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b1eaf830589ea843ca421c175967b31c33609616f04af50b18b40d1abb3cb1e3
3
+ size 300162730
depthanything_vits_u4k/coarse_pretrain/config.py ADDED
@@ -0,0 +1,310 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ collect_input_args = [
2
+ 'image_lr',
3
+ 'crops_image_hr',
4
+ 'depth_gt',
5
+ 'crop_depths',
6
+ 'bboxs',
7
+ 'image_hr',
8
+ ]
9
+ convert_syncbn = True
10
+ debug = False
11
+ env_cfg = dict(
12
+ cudnn_benchmark=True,
13
+ dist_cfg=dict(backend='nccl'),
14
+ mp_cfg=dict(mp_start_method='forkserver'))
15
+ find_unused_parameters = True
16
+ general_dataloader = dict(
17
+ batch_size=1,
18
+ dataset=dict(
19
+ dataset_name='', gt_dir=None, rgb_image_dir='', type='ImageDataset'),
20
+ num_workers=2)
21
+ launcher = 'pytorch'
22
+ log_name = 'coarse_pretrain'
23
+ max_depth = 80
24
+ min_depth = 0.001
25
+ model = dict(
26
+ coarse_branch=dict(
27
+ attractor_alpha=1000,
28
+ attractor_gamma=2,
29
+ attractor_kind='mean',
30
+ attractor_type='inv',
31
+ aug=True,
32
+ bin_centers_type='softplus',
33
+ bin_embedding_dim=128,
34
+ clip_grad=0.1,
35
+ dataset='nyu',
36
+ depth_anything=True,
37
+ distributed=True,
38
+ do_resize=False,
39
+ force_keep_ar=True,
40
+ freeze_midas_bn=True,
41
+ gpu='NULL',
42
+ img_size=[
43
+ 392,
44
+ 518,
45
+ ],
46
+ inverse_midas=False,
47
+ log_images_every=0.1,
48
+ max_depth=80,
49
+ max_temp=50.0,
50
+ max_translation=100,
51
+ memory_efficient=True,
52
+ midas_model_type='vits',
53
+ min_depth=0.001,
54
+ min_temp=0.0212,
55
+ model='zoedepth',
56
+ n_attractors=[
57
+ 16,
58
+ 8,
59
+ 4,
60
+ 1,
61
+ ],
62
+ n_bins=64,
63
+ name='ZoeDepth',
64
+ notes='',
65
+ output_distribution='logbinomial',
66
+ prefetch=False,
67
+ pretrained_resource='local::./work_dir/DepthAnything_vits.pt',
68
+ print_losses=False,
69
+ project='ZoeDepth',
70
+ random_crop=False,
71
+ random_translate=False,
72
+ root='.',
73
+ save_dir='',
74
+ shared_dict='NULL',
75
+ tags='',
76
+ train_midas=True,
77
+ translate_prob=0.2,
78
+ type='DA-ZoeDepth',
79
+ uid='NULL',
80
+ use_amp=False,
81
+ use_pretrained_midas=True,
82
+ use_shared_dict=False,
83
+ validate_every=0.25,
84
+ version_name='v1',
85
+ workers=16),
86
+ fine_branch=dict(
87
+ attractor_alpha=1000,
88
+ attractor_gamma=2,
89
+ attractor_kind='mean',
90
+ attractor_type='inv',
91
+ aug=True,
92
+ bin_centers_type='softplus',
93
+ bin_embedding_dim=128,
94
+ clip_grad=0.1,
95
+ dataset='nyu',
96
+ depth_anything=True,
97
+ distributed=True,
98
+ do_resize=False,
99
+ force_keep_ar=True,
100
+ freeze_midas_bn=True,
101
+ gpu='NULL',
102
+ img_size=[
103
+ 392,
104
+ 518,
105
+ ],
106
+ inverse_midas=False,
107
+ log_images_every=0.1,
108
+ max_depth=80,
109
+ max_temp=50.0,
110
+ max_translation=100,
111
+ memory_efficient=True,
112
+ midas_model_type='vits',
113
+ min_depth=0.001,
114
+ min_temp=0.0212,
115
+ model='zoedepth',
116
+ n_attractors=[
117
+ 16,
118
+ 8,
119
+ 4,
120
+ 1,
121
+ ],
122
+ n_bins=64,
123
+ name='ZoeDepth',
124
+ notes='',
125
+ output_distribution='logbinomial',
126
+ prefetch=False,
127
+ pretrained_resource='local::./work_dir/DepthAnything_vits.pt',
128
+ print_losses=False,
129
+ project='ZoeDepth',
130
+ random_crop=False,
131
+ random_translate=False,
132
+ root='.',
133
+ save_dir='',
134
+ shared_dict='NULL',
135
+ tags='',
136
+ train_midas=True,
137
+ translate_prob=0.2,
138
+ type='DA-ZoeDepth',
139
+ uid='NULL',
140
+ use_amp=False,
141
+ use_pretrained_midas=True,
142
+ use_shared_dict=False,
143
+ validate_every=0.25,
144
+ version_name='v1',
145
+ workers=16),
146
+ max_depth=80,
147
+ min_depth=0.001,
148
+ sigloss=dict(type='SILogLoss'),
149
+ target='coarse',
150
+ type='BaselinePretrain')
151
+ optim_wrapper = dict(
152
+ clip_grad=dict(max_norm=0.1, norm_type=2, type='norm'),
153
+ optimizer=dict(lr=4e-06, type='AdamW', weight_decay=0.01),
154
+ paramwise_cfg=dict(bypass_duplicate=True, custom_keys=dict()))
155
+ param_scheduler = dict(
156
+ base_momentum=0.85,
157
+ cycle_momentum=True,
158
+ div_factor=1,
159
+ final_div_factor=10000,
160
+ max_momentum=0.95,
161
+ pct_start=0.5,
162
+ three_phase=False)
163
+ project = 'patchfusion'
164
+ resume = False
165
+ tags = [
166
+ 'coarse',
167
+ 'da',
168
+ 'vits',
169
+ ]
170
+ test_in_dataloader = dict(
171
+ batch_size=1,
172
+ dataset=dict(
173
+ data_root='./data/u4k',
174
+ max_depth=80,
175
+ min_depth=0.001,
176
+ mode='infer',
177
+ split='./data/u4k/splits/test.txt',
178
+ transform_cfg=dict(network_process_size=[
179
+ 384,
180
+ 512,
181
+ ]),
182
+ type='UnrealStereo4kDataset'),
183
+ num_workers=2)
184
+ test_out_dataloader = dict(
185
+ batch_size=1,
186
+ dataset=dict(
187
+ data_root='./data/u4k',
188
+ max_depth=80,
189
+ min_depth=0.001,
190
+ mode='infer',
191
+ split='./data/u4k/splits/test_out.txt',
192
+ transform_cfg=dict(network_process_size=[
193
+ 384,
194
+ 512,
195
+ ]),
196
+ type='UnrealStereo4kDataset'),
197
+ num_workers=2)
198
+ train_cfg = dict(
199
+ eval_start=0,
200
+ log_interval=100,
201
+ max_epochs=24,
202
+ save_checkpoint_interval=24,
203
+ train_log_img_interval=100,
204
+ val_interval=2,
205
+ val_log_img_interval=50,
206
+ val_type='epoch_base')
207
+ train_dataloader = dict(
208
+ batch_size=4,
209
+ dataset=dict(
210
+ data_root='./data/u4k',
211
+ max_depth=80,
212
+ min_depth=0.001,
213
+ mode='train',
214
+ resize_mode='depth-anything',
215
+ split='./data/u4k/splits/train.txt',
216
+ transform_cfg=dict(
217
+ degree=1.0,
218
+ network_process_size=[
219
+ 392,
220
+ 518,
221
+ ],
222
+ random_crop=True,
223
+ random_crop_size=(
224
+ 540,
225
+ 960,
226
+ )),
227
+ type='UnrealStereo4kDataset'),
228
+ num_workers=4)
229
+ val_dataloader = dict(
230
+ batch_size=1,
231
+ dataset=dict(
232
+ data_root='./data/u4k',
233
+ max_depth=80,
234
+ min_depth=0.001,
235
+ mode='infer',
236
+ resize_mode='depth-anything',
237
+ split='./data/u4k/splits/val.txt',
238
+ transform_cfg=dict(
239
+ degree=1.0,
240
+ network_process_size=[
241
+ 392,
242
+ 518,
243
+ ],
244
+ random_crop_size=(
245
+ 540,
246
+ 960,
247
+ )),
248
+ type='UnrealStereo4kDataset'),
249
+ num_workers=2)
250
+ work_dir = './work_dir/depthanything_vits_u4k/coarse_pretrain'
251
+ zoe_depth_config = dict(
252
+ attractor_alpha=1000,
253
+ attractor_gamma=2,
254
+ attractor_kind='mean',
255
+ attractor_type='inv',
256
+ aug=True,
257
+ bin_centers_type='softplus',
258
+ bin_embedding_dim=128,
259
+ clip_grad=0.1,
260
+ dataset='nyu',
261
+ depth_anything=True,
262
+ distributed=True,
263
+ do_resize=False,
264
+ force_keep_ar=True,
265
+ freeze_midas_bn=True,
266
+ gpu='NULL',
267
+ img_size=[
268
+ 392,
269
+ 518,
270
+ ],
271
+ inverse_midas=False,
272
+ log_images_every=0.1,
273
+ max_depth=80,
274
+ max_temp=50.0,
275
+ max_translation=100,
276
+ memory_efficient=True,
277
+ midas_model_type='vits',
278
+ min_depth=0.001,
279
+ min_temp=0.0212,
280
+ model='zoedepth',
281
+ n_attractors=[
282
+ 16,
283
+ 8,
284
+ 4,
285
+ 1,
286
+ ],
287
+ n_bins=64,
288
+ name='ZoeDepth',
289
+ notes='',
290
+ output_distribution='logbinomial',
291
+ prefetch=False,
292
+ pretrained_resource='local::./work_dir/DepthAnything_vits.pt',
293
+ print_losses=False,
294
+ project='ZoeDepth',
295
+ random_crop=False,
296
+ random_translate=False,
297
+ root='.',
298
+ save_dir='',
299
+ shared_dict='NULL',
300
+ tags='',
301
+ train_midas=True,
302
+ translate_prob=0.2,
303
+ type='DA-ZoeDepth',
304
+ uid='NULL',
305
+ use_amp=False,
306
+ use_pretrained_midas=True,
307
+ use_shared_dict=False,
308
+ validate_every=0.25,
309
+ version_name='v1',
310
+ workers=16)
depthanything_vits_u4k/fine_pretrain/20240315_035516.log ADDED
@@ -0,0 +1,1028 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2024/03/15 03:55:26 - patchstitcher - INFO -
2
+ ------------------------------------------------------------
3
+ System environment:
4
+ sys.platform: linux
5
+ Python: 3.8.18 | packaged by conda-forge | (default, Oct 10 2023, 15:44:36) [GCC 12.3.0]
6
+ CUDA available: True
7
+ numpy_random_seed: 621
8
+ GPU 0,1,2,3: NVIDIA A100-SXM4-80GB
9
+ CUDA_HOME: /sw/rl9g/cuda/11.8/rl9_binary
10
+ NVCC: Cuda compilation tools, release 11.8, V11.8.89
11
+ GCC: gcc (GCC) 11.3.1 20220421 (Red Hat 11.3.1-2)
12
+ PyTorch: 2.1.2
13
+ PyTorch compiling details: PyTorch built with:
14
+ - GCC 9.3
15
+ - C++ Version: 201703
16
+ - Intel(R) oneAPI Math Kernel Library Version 2022.1-Product Build 20220311 for Intel(R) 64 architecture applications
17
+ - Intel(R) MKL-DNN v3.1.1 (Git Hash 64f6bcbcbab628e96f33a62c3e975f8535a7bde4)
18
+ - OpenMP 201511 (a.k.a. OpenMP 4.5)
19
+ - LAPACK is enabled (usually provided by MKL)
20
+ - NNPACK is enabled
21
+ - CPU capability usage: AVX2
22
+ - CUDA Runtime 11.8
23
+ - NVCC architecture flags: -gencode;arch=compute_50,code=sm_50;-gencode;arch=compute_60,code=sm_60;-gencode;arch=compute_61,code=sm_61;-gencode;arch=compute_70,code=sm_70;-gencode;arch=compute_75,code=sm_75;-gencode;arch=compute_80,code=sm_80;-gencode;arch=compute_86,code=sm_86;-gencode;arch=compute_37,code=sm_37;-gencode;arch=compute_90,code=sm_90;-gencode;arch=compute_37,code=compute_37
24
+ - CuDNN 8.7
25
+ - Magma 2.6.1
26
+ - Build settings: BLAS_INFO=mkl, BUILD_TYPE=Release, CUDA_VERSION=11.8, CUDNN_VERSION=8.7.0, CXX_COMPILER=/opt/rh/devtoolset-9/root/usr/bin/c++, CXX_FLAGS= -D_GLIBCXX_USE_CXX11_ABI=0 -fabi-version=11 -fvisibility-inlines-hidden -DUSE_PTHREADPOOL -DNDEBUG -DUSE_KINETO -DLIBKINETO_NOROCTRACER -DUSE_FBGEMM -DUSE_QNNPACK -DUSE_PYTORCH_QNNPACK -DUSE_XNNPACK -DSYMBOLICATE_MOBILE_DEBUG_HANDLE -O2 -fPIC -Wall -Wextra -Werror=return-type -Werror=non-virtual-dtor -Werror=bool-operation -Wnarrowing -Wno-missing-field-initializers -Wno-type-limits -Wno-array-bounds -Wno-unknown-pragmas -Wno-unused-parameter -Wno-unused-function -Wno-unused-result -Wno-strict-overflow -Wno-strict-aliasing -Wno-stringop-overflow -Wno-psabi -Wno-error=pedantic -Wno-error=old-style-cast -Wno-invalid-partial-specialization -Wno-unused-private-field -Wno-aligned-allocation-unavailable -Wno-missing-braces -fdiagnostics-color=always -faligned-new -Wno-unused-but-set-variable -Wno-maybe-uninitialized -fno-math-errno -fno-trapping-math -Werror=format -Werror=cast-function-type -Wno-stringop-overflow, LAPACK_INFO=mkl, PERF_WITH_AVX=1, PERF_WITH_AVX2=1, PERF_WITH_AVX512=1, TORCH_DISABLE_GPU_ASSERTS=ON, TORCH_VERSION=2.1.2, USE_CUDA=ON, USE_CUDNN=ON, USE_EXCEPTION_PTR=1, USE_GFLAGS=OFF, USE_GLOG=OFF, USE_MKL=ON, USE_MKLDNN=ON, USE_MPI=OFF, USE_NCCL=ON, USE_NNPACK=ON, USE_OPENMP=ON, USE_ROCM=OFF,
27
+
28
+ TorchVision: 0.16.2
29
+ OpenCV: 4.8.1
30
+ MMEngine: 0.10.2
31
+
32
+ Runtime environment:
33
+ cudnn_benchmark: True
34
+ mp_cfg: {'mp_start_method': 'forkserver'}
35
+ dist_cfg: {'backend': 'nccl'}
36
+ seed: 621
37
+ Distributed launcher: pytorch
38
+ Distributed training: True
39
+ GPU number: 4
40
+ ------------------------------------------------------------
41
+
42
+ 2024/03/15 03:55:26 - patchstitcher - INFO - Config:
43
+ collect_input_args = [
44
+ 'image_lr',
45
+ 'crops_image_hr',
46
+ 'depth_gt',
47
+ 'crop_depths',
48
+ 'bboxs',
49
+ 'image_hr',
50
+ ]
51
+ convert_syncbn = True
52
+ debug = False
53
+ env_cfg = dict(
54
+ cudnn_benchmark=True,
55
+ dist_cfg=dict(backend='nccl'),
56
+ mp_cfg=dict(mp_start_method='forkserver'))
57
+ find_unused_parameters = True
58
+ general_dataloader = dict(
59
+ batch_size=1,
60
+ dataset=dict(
61
+ dataset_name='', gt_dir=None, rgb_image_dir='', type='ImageDataset'),
62
+ num_workers=2)
63
+ launcher = 'pytorch'
64
+ log_name = 'fine_pretrain'
65
+ max_depth = 80
66
+ min_depth = 0.001
67
+ model = dict(
68
+ coarse_branch=dict(
69
+ attractor_alpha=1000,
70
+ attractor_gamma=2,
71
+ attractor_kind='mean',
72
+ attractor_type='inv',
73
+ aug=True,
74
+ bin_centers_type='softplus',
75
+ bin_embedding_dim=128,
76
+ clip_grad=0.1,
77
+ dataset='nyu',
78
+ depth_anything=True,
79
+ distributed=True,
80
+ do_resize=False,
81
+ force_keep_ar=True,
82
+ freeze_midas_bn=True,
83
+ gpu='NULL',
84
+ img_size=[
85
+ 392,
86
+ 518,
87
+ ],
88
+ inverse_midas=False,
89
+ log_images_every=0.1,
90
+ max_depth=80,
91
+ max_temp=50.0,
92
+ max_translation=100,
93
+ memory_efficient=True,
94
+ midas_model_type='vits',
95
+ min_depth=0.001,
96
+ min_temp=0.0212,
97
+ model='zoedepth',
98
+ n_attractors=[
99
+ 16,
100
+ 8,
101
+ 4,
102
+ 1,
103
+ ],
104
+ n_bins=64,
105
+ name='ZoeDepth',
106
+ notes='',
107
+ output_distribution='logbinomial',
108
+ prefetch=False,
109
+ pretrained_resource='local::./work_dir/DepthAnything_vits.pt',
110
+ print_losses=False,
111
+ project='ZoeDepth',
112
+ random_crop=False,
113
+ random_translate=False,
114
+ root='.',
115
+ save_dir='',
116
+ shared_dict='NULL',
117
+ tags='',
118
+ train_midas=True,
119
+ translate_prob=0.2,
120
+ type='DA-ZoeDepth',
121
+ uid='NULL',
122
+ use_amp=False,
123
+ use_pretrained_midas=True,
124
+ use_shared_dict=False,
125
+ validate_every=0.25,
126
+ version_name='v1',
127
+ workers=16),
128
+ fine_branch=dict(
129
+ attractor_alpha=1000,
130
+ attractor_gamma=2,
131
+ attractor_kind='mean',
132
+ attractor_type='inv',
133
+ aug=True,
134
+ bin_centers_type='softplus',
135
+ bin_embedding_dim=128,
136
+ clip_grad=0.1,
137
+ dataset='nyu',
138
+ depth_anything=True,
139
+ distributed=True,
140
+ do_resize=False,
141
+ force_keep_ar=True,
142
+ freeze_midas_bn=True,
143
+ gpu='NULL',
144
+ img_size=[
145
+ 392,
146
+ 518,
147
+ ],
148
+ inverse_midas=False,
149
+ log_images_every=0.1,
150
+ max_depth=80,
151
+ max_temp=50.0,
152
+ max_translation=100,
153
+ memory_efficient=True,
154
+ midas_model_type='vits',
155
+ min_depth=0.001,
156
+ min_temp=0.0212,
157
+ model='zoedepth',
158
+ n_attractors=[
159
+ 16,
160
+ 8,
161
+ 4,
162
+ 1,
163
+ ],
164
+ n_bins=64,
165
+ name='ZoeDepth',
166
+ notes='',
167
+ output_distribution='logbinomial',
168
+ prefetch=False,
169
+ pretrained_resource='local::./work_dir/DepthAnything_vits.pt',
170
+ print_losses=False,
171
+ project='ZoeDepth',
172
+ random_crop=False,
173
+ random_translate=False,
174
+ root='.',
175
+ save_dir='',
176
+ shared_dict='NULL',
177
+ tags='',
178
+ train_midas=True,
179
+ translate_prob=0.2,
180
+ type='DA-ZoeDepth',
181
+ uid='NULL',
182
+ use_amp=False,
183
+ use_pretrained_midas=True,
184
+ use_shared_dict=False,
185
+ validate_every=0.25,
186
+ version_name='v1',
187
+ workers=16),
188
+ max_depth=80,
189
+ min_depth=0.001,
190
+ patch_process_shape=(
191
+ 392,
192
+ 518,
193
+ ),
194
+ sigloss=dict(type='SILogLoss'),
195
+ target='fine',
196
+ type='BaselinePretrain')
197
+ optim_wrapper = dict(
198
+ clip_grad=dict(max_norm=0.1, norm_type=2, type='norm'),
199
+ optimizer=dict(lr=4e-06, type='AdamW', weight_decay=0.01),
200
+ paramwise_cfg=dict(bypass_duplicate=True, custom_keys=dict()))
201
+ param_scheduler = dict(
202
+ base_momentum=0.85,
203
+ cycle_momentum=True,
204
+ div_factor=1,
205
+ final_div_factor=10000,
206
+ max_momentum=0.95,
207
+ pct_start=0.5,
208
+ three_phase=False)
209
+ project = 'patchfusion'
210
+ tags = [
211
+ 'fine',
212
+ 'da',
213
+ 'vits',
214
+ ]
215
+ test_in_dataloader = dict(
216
+ batch_size=1,
217
+ dataset=dict(
218
+ data_root='./data/u4k',
219
+ max_depth=80,
220
+ min_depth=0.001,
221
+ mode='infer',
222
+ split='./data/u4k/splits/test.txt',
223
+ transform_cfg=dict(network_process_size=[
224
+ 384,
225
+ 512,
226
+ ]),
227
+ type='UnrealStereo4kDataset'),
228
+ num_workers=2)
229
+ test_out_dataloader = dict(
230
+ batch_size=1,
231
+ dataset=dict(
232
+ data_root='./data/u4k',
233
+ max_depth=80,
234
+ min_depth=0.001,
235
+ mode='infer',
236
+ split='./data/u4k/splits/test_out.txt',
237
+ transform_cfg=dict(network_process_size=[
238
+ 384,
239
+ 512,
240
+ ]),
241
+ type='UnrealStereo4kDataset'),
242
+ num_workers=2)
243
+ train_cfg = dict(
244
+ eval_start=0,
245
+ log_interval=100,
246
+ max_epochs=24,
247
+ save_checkpoint_interval=24,
248
+ train_log_img_interval=100,
249
+ val_interval=2,
250
+ val_log_img_interval=50,
251
+ val_type='epoch_base')
252
+ train_dataloader = dict(
253
+ batch_size=4,
254
+ dataset=dict(
255
+ data_root='./data/u4k',
256
+ max_depth=80,
257
+ min_depth=0.001,
258
+ mode='train',
259
+ resize_mode='depth-anything',
260
+ split='./data/u4k/splits/train.txt',
261
+ transform_cfg=dict(
262
+ degree=1.0, network_process_size=[
263
+ 392,
264
+ 518,
265
+ ], random_crop=True),
266
+ type='UnrealStereo4kDataset'),
267
+ num_workers=4)
268
+ val_dataloader = dict(
269
+ batch_size=1,
270
+ dataset=dict(
271
+ data_root='./data/u4k',
272
+ max_depth=80,
273
+ min_depth=0.001,
274
+ mode='infer',
275
+ resize_mode='depth-anything',
276
+ split='./data/u4k/splits/val.txt',
277
+ transform_cfg=dict(degree=1.0, network_process_size=[
278
+ 392,
279
+ 518,
280
+ ]),
281
+ type='UnrealStereo4kDataset'),
282
+ num_workers=2)
283
+ work_dir = './work_dir/depthanything_vits_u4k/fine_pretrain'
284
+ zoe_depth_config = dict(
285
+ attractor_alpha=1000,
286
+ attractor_gamma=2,
287
+ attractor_kind='mean',
288
+ attractor_type='inv',
289
+ aug=True,
290
+ bin_centers_type='softplus',
291
+ bin_embedding_dim=128,
292
+ clip_grad=0.1,
293
+ dataset='nyu',
294
+ depth_anything=True,
295
+ distributed=True,
296
+ do_resize=False,
297
+ force_keep_ar=True,
298
+ freeze_midas_bn=True,
299
+ gpu='NULL',
300
+ img_size=[
301
+ 392,
302
+ 518,
303
+ ],
304
+ inverse_midas=False,
305
+ log_images_every=0.1,
306
+ max_depth=80,
307
+ max_temp=50.0,
308
+ max_translation=100,
309
+ memory_efficient=True,
310
+ midas_model_type='vits',
311
+ min_depth=0.001,
312
+ min_temp=0.0212,
313
+ model='zoedepth',
314
+ n_attractors=[
315
+ 16,
316
+ 8,
317
+ 4,
318
+ 1,
319
+ ],
320
+ n_bins=64,
321
+ name='ZoeDepth',
322
+ notes='',
323
+ output_distribution='logbinomial',
324
+ prefetch=False,
325
+ pretrained_resource='local::./work_dir/DepthAnything_vits.pt',
326
+ print_losses=False,
327
+ project='ZoeDepth',
328
+ random_crop=False,
329
+ random_translate=False,
330
+ root='.',
331
+ save_dir='',
332
+ shared_dict='NULL',
333
+ tags='',
334
+ train_midas=True,
335
+ translate_prob=0.2,
336
+ type='DA-ZoeDepth',
337
+ uid='NULL',
338
+ use_amp=False,
339
+ use_pretrained_midas=True,
340
+ use_shared_dict=False,
341
+ validate_every=0.25,
342
+ version_name='v1',
343
+ workers=16)
344
+
345
+ 2024/03/15 03:55:27 - patchstitcher - INFO - Loading deepnet from local::./work_dir/DepthAnything_vits.pt
346
+ 2024/03/15 03:55:27 - patchstitcher - INFO - Current zoedepth.core.prep.resizer is <class 'torch.nn.modules.linear.Identity'>
347
+ 2024/03/15 03:55:27 - patchstitcher - INFO - DistributedDataParallel(
348
+ (module): BaselinePretrain(
349
+ (fine_branch): ZoeDepth(
350
+ (core): DepthAnythingCore(
351
+ (core): DPT_DINOv2(
352
+ (pretrained): DinoVisionTransformer(
353
+ (patch_embed): PatchEmbed(
354
+ (proj): Conv2d(3, 384, kernel_size=(14, 14), stride=(14, 14))
355
+ (norm): Identity()
356
+ )
357
+ (blocks): ModuleList(
358
+ (0-11): 12 x NestedTensorBlock(
359
+ (norm1): LayerNorm((384,), eps=1e-06, elementwise_affine=True)
360
+ (attn): MemEffAttention(
361
+ (qkv): Linear(in_features=384, out_features=1152, bias=True)
362
+ (attn_drop): Dropout(p=0.0, inplace=False)
363
+ (proj): Linear(in_features=384, out_features=384, bias=True)
364
+ (proj_drop): Dropout(p=0.0, inplace=False)
365
+ )
366
+ (ls1): LayerScale()
367
+ (drop_path1): Identity()
368
+ (norm2): LayerNorm((384,), eps=1e-06, elementwise_affine=True)
369
+ (mlp): Mlp(
370
+ (fc1): Linear(in_features=384, out_features=1536, bias=True)
371
+ (act): GELU(approximate='none')
372
+ (fc2): Linear(in_features=1536, out_features=384, bias=True)
373
+ (drop): Dropout(p=0.0, inplace=False)
374
+ )
375
+ (ls2): LayerScale()
376
+ (drop_path2): Identity()
377
+ )
378
+ )
379
+ (norm): LayerNorm((384,), eps=1e-06, elementwise_affine=True)
380
+ (head): Identity()
381
+ )
382
+ (depth_head): DPTHead(
383
+ (projects): ModuleList(
384
+ (0): Conv2d(384, 48, kernel_size=(1, 1), stride=(1, 1))
385
+ (1): Conv2d(384, 96, kernel_size=(1, 1), stride=(1, 1))
386
+ (2): Conv2d(384, 192, kernel_size=(1, 1), stride=(1, 1))
387
+ (3): Conv2d(384, 384, kernel_size=(1, 1), stride=(1, 1))
388
+ )
389
+ (resize_layers): ModuleList(
390
+ (0): ConvTranspose2d(48, 48, kernel_size=(4, 4), stride=(4, 4))
391
+ (1): ConvTranspose2d(96, 96, kernel_size=(2, 2), stride=(2, 2))
392
+ (2): Identity()
393
+ (3): Conv2d(384, 384, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1))
394
+ )
395
+ (scratch): Module(
396
+ (layer1_rn): Conv2d(48, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
397
+ (layer2_rn): Conv2d(96, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
398
+ (layer3_rn): Conv2d(192, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
399
+ (layer4_rn): Conv2d(384, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
400
+ (refinenet1): FeatureFusionBlock(
401
+ (out_conv): Conv2d(64, 64, kernel_size=(1, 1), stride=(1, 1))
402
+ (resConfUnit1): ResidualConvUnit(
403
+ (conv1): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
404
+ (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
405
+ (activation): ReLU()
406
+ (skip_add): FloatFunctional(
407
+ (activation_post_process): Identity()
408
+ )
409
+ )
410
+ (resConfUnit2): ResidualConvUnit(
411
+ (conv1): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
412
+ (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
413
+ (activation): ReLU()
414
+ (skip_add): FloatFunctional(
415
+ (activation_post_process): Identity()
416
+ )
417
+ )
418
+ (skip_add): FloatFunctional(
419
+ (activation_post_process): Identity()
420
+ )
421
+ )
422
+ (refinenet2): FeatureFusionBlock(
423
+ (out_conv): Conv2d(64, 64, kernel_size=(1, 1), stride=(1, 1))
424
+ (resConfUnit1): ResidualConvUnit(
425
+ (conv1): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
426
+ (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
427
+ (activation): ReLU()
428
+ (skip_add): FloatFunctional(
429
+ (activation_post_process): Identity()
430
+ )
431
+ )
432
+ (resConfUnit2): ResidualConvUnit(
433
+ (conv1): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
434
+ (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
435
+ (activation): ReLU()
436
+ (skip_add): FloatFunctional(
437
+ (activation_post_process): Identity()
438
+ )
439
+ )
440
+ (skip_add): FloatFunctional(
441
+ (activation_post_process): Identity()
442
+ )
443
+ )
444
+ (refinenet3): FeatureFusionBlock(
445
+ (out_conv): Conv2d(64, 64, kernel_size=(1, 1), stride=(1, 1))
446
+ (resConfUnit1): ResidualConvUnit(
447
+ (conv1): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
448
+ (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
449
+ (activation): ReLU()
450
+ (skip_add): FloatFunctional(
451
+ (activation_post_process): Identity()
452
+ )
453
+ )
454
+ (resConfUnit2): ResidualConvUnit(
455
+ (conv1): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
456
+ (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
457
+ (activation): ReLU()
458
+ (skip_add): FloatFunctional(
459
+ (activation_post_process): Identity()
460
+ )
461
+ )
462
+ (skip_add): FloatFunctional(
463
+ (activation_post_process): Identity()
464
+ )
465
+ )
466
+ (refinenet4): FeatureFusionBlock(
467
+ (out_conv): Conv2d(64, 64, kernel_size=(1, 1), stride=(1, 1))
468
+ (resConfUnit1): ResidualConvUnit(
469
+ (conv1): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
470
+ (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
471
+ (activation): ReLU()
472
+ (skip_add): FloatFunctional(
473
+ (activation_post_process): Identity()
474
+ )
475
+ )
476
+ (resConfUnit2): ResidualConvUnit(
477
+ (conv1): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
478
+ (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
479
+ (activation): ReLU()
480
+ (skip_add): FloatFunctional(
481
+ (activation_post_process): Identity()
482
+ )
483
+ )
484
+ (skip_add): FloatFunctional(
485
+ (activation_post_process): Identity()
486
+ )
487
+ )
488
+ (output_conv1): Conv2d(64, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
489
+ (output_conv2): Sequential(
490
+ (0): Conv2d(32, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
491
+ (1): ReLU(inplace=True)
492
+ (2): Conv2d(32, 1, kernel_size=(1, 1), stride=(1, 1))
493
+ (3): ReLU(inplace=True)
494
+ (4): Identity()
495
+ )
496
+ )
497
+ )
498
+ )
499
+ )
500
+ (conv2): Conv2d(64, 64, kernel_size=(1, 1), stride=(1, 1))
501
+ (seed_bin_regressor): SeedBinRegressorUnnormed(
502
+ (_net): Sequential(
503
+ (0): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1))
504
+ (1): ReLU(inplace=True)
505
+ (2): Conv2d(256, 64, kernel_size=(1, 1), stride=(1, 1))
506
+ (3): Softplus(beta=1, threshold=20)
507
+ )
508
+ )
509
+ (seed_projector): Projector(
510
+ (_net): Sequential(
511
+ (0): Conv2d(64, 128, kernel_size=(1, 1), stride=(1, 1))
512
+ (1): ReLU(inplace=True)
513
+ (2): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1))
514
+ )
515
+ )
516
+ (projectors): ModuleList(
517
+ (0-3): 4 x Projector(
518
+ (_net): Sequential(
519
+ (0): Conv2d(64, 128, kernel_size=(1, 1), stride=(1, 1))
520
+ (1): ReLU(inplace=True)
521
+ (2): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1))
522
+ )
523
+ )
524
+ )
525
+ (attractors): ModuleList(
526
+ (0): AttractorLayerUnnormed(
527
+ (_net): Sequential(
528
+ (0): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1))
529
+ (1): ReLU(inplace=True)
530
+ (2): Conv2d(128, 16, kernel_size=(1, 1), stride=(1, 1))
531
+ (3): Softplus(beta=1, threshold=20)
532
+ )
533
+ )
534
+ (1): AttractorLayerUnnormed(
535
+ (_net): Sequential(
536
+ (0): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1))
537
+ (1): ReLU(inplace=True)
538
+ (2): Conv2d(128, 8, kernel_size=(1, 1), stride=(1, 1))
539
+ (3): Softplus(beta=1, threshold=20)
540
+ )
541
+ )
542
+ (2): AttractorLayerUnnormed(
543
+ (_net): Sequential(
544
+ (0): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1))
545
+ (1): ReLU(inplace=True)
546
+ (2): Conv2d(128, 4, kernel_size=(1, 1), stride=(1, 1))
547
+ (3): Softplus(beta=1, threshold=20)
548
+ )
549
+ )
550
+ (3): AttractorLayerUnnormed(
551
+ (_net): Sequential(
552
+ (0): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1))
553
+ (1): ReLU(inplace=True)
554
+ (2): Conv2d(128, 1, kernel_size=(1, 1), stride=(1, 1))
555
+ (3): Softplus(beta=1, threshold=20)
556
+ )
557
+ )
558
+ )
559
+ (conditional_log_binomial): ConditionalLogBinomial(
560
+ (log_binomial_transform): LogBinomial()
561
+ (mlp): Sequential(
562
+ (0): Conv2d(161, 80, kernel_size=(1, 1), stride=(1, 1))
563
+ (1): GELU(approximate='none')
564
+ (2): Conv2d(80, 4, kernel_size=(1, 1), stride=(1, 1))
565
+ (3): Softplus(beta=1, threshold=20)
566
+ )
567
+ )
568
+ )
569
+ (sigloss): SILogLoss()
570
+ )
571
+ )
572
+ 2024/03/15 03:55:33 - patchstitcher - INFO - successfully init trainer
573
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.cls_token
574
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.pos_embed
575
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.mask_token
576
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.patch_embed.proj.weight
577
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.patch_embed.proj.bias
578
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.0.norm1.weight
579
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.0.norm1.bias
580
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.0.attn.qkv.weight
581
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.0.attn.qkv.bias
582
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.0.attn.proj.weight
583
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.0.attn.proj.bias
584
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.0.ls1.gamma
585
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.0.norm2.weight
586
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.0.norm2.bias
587
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.0.mlp.fc1.weight
588
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.0.mlp.fc1.bias
589
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.0.mlp.fc2.weight
590
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.0.mlp.fc2.bias
591
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.0.ls2.gamma
592
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.1.norm1.weight
593
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.1.norm1.bias
594
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.1.attn.qkv.weight
595
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.1.attn.qkv.bias
596
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.1.attn.proj.weight
597
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.1.attn.proj.bias
598
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.1.ls1.gamma
599
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.1.norm2.weight
600
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.1.norm2.bias
601
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.1.mlp.fc1.weight
602
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.1.mlp.fc1.bias
603
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.1.mlp.fc2.weight
604
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.1.mlp.fc2.bias
605
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.1.ls2.gamma
606
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.2.norm1.weight
607
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.2.norm1.bias
608
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.2.attn.qkv.weight
609
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.2.attn.qkv.bias
610
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.2.attn.proj.weight
611
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.2.attn.proj.bias
612
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.2.ls1.gamma
613
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.2.norm2.weight
614
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.2.norm2.bias
615
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.2.mlp.fc1.weight
616
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.2.mlp.fc1.bias
617
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.2.mlp.fc2.weight
618
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.2.mlp.fc2.bias
619
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.2.ls2.gamma
620
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.3.norm1.weight
621
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.3.norm1.bias
622
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.3.attn.qkv.weight
623
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.3.attn.qkv.bias
624
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.3.attn.proj.weight
625
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.3.attn.proj.bias
626
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.3.ls1.gamma
627
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.3.norm2.weight
628
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.3.norm2.bias
629
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.3.mlp.fc1.weight
630
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.3.mlp.fc1.bias
631
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.3.mlp.fc2.weight
632
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.3.mlp.fc2.bias
633
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.3.ls2.gamma
634
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.4.norm1.weight
635
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.4.norm1.bias
636
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.4.attn.qkv.weight
637
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.4.attn.qkv.bias
638
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.4.attn.proj.weight
639
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.4.attn.proj.bias
640
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.4.ls1.gamma
641
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.4.norm2.weight
642
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.4.norm2.bias
643
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.4.mlp.fc1.weight
644
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.4.mlp.fc1.bias
645
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.4.mlp.fc2.weight
646
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.4.mlp.fc2.bias
647
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.4.ls2.gamma
648
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.5.norm1.weight
649
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.5.norm1.bias
650
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.5.attn.qkv.weight
651
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.5.attn.qkv.bias
652
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.5.attn.proj.weight
653
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.5.attn.proj.bias
654
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.5.ls1.gamma
655
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.5.norm2.weight
656
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.5.norm2.bias
657
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.5.mlp.fc1.weight
658
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.5.mlp.fc1.bias
659
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.5.mlp.fc2.weight
660
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.5.mlp.fc2.bias
661
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.5.ls2.gamma
662
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.6.norm1.weight
663
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.6.norm1.bias
664
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.6.attn.qkv.weight
665
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.6.attn.qkv.bias
666
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.6.attn.proj.weight
667
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.6.attn.proj.bias
668
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.6.ls1.gamma
669
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.6.norm2.weight
670
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.6.norm2.bias
671
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.6.mlp.fc1.weight
672
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.6.mlp.fc1.bias
673
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.6.mlp.fc2.weight
674
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.6.mlp.fc2.bias
675
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.6.ls2.gamma
676
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.7.norm1.weight
677
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.7.norm1.bias
678
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.7.attn.qkv.weight
679
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.7.attn.qkv.bias
680
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.7.attn.proj.weight
681
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.7.attn.proj.bias
682
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.7.ls1.gamma
683
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.7.norm2.weight
684
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.7.norm2.bias
685
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.7.mlp.fc1.weight
686
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.7.mlp.fc1.bias
687
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.7.mlp.fc2.weight
688
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.7.mlp.fc2.bias
689
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.7.ls2.gamma
690
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.8.norm1.weight
691
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.8.norm1.bias
692
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.8.attn.qkv.weight
693
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.8.attn.qkv.bias
694
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.8.attn.proj.weight
695
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.8.attn.proj.bias
696
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.8.ls1.gamma
697
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.8.norm2.weight
698
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.8.norm2.bias
699
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.8.mlp.fc1.weight
700
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.8.mlp.fc1.bias
701
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.8.mlp.fc2.weight
702
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.8.mlp.fc2.bias
703
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.8.ls2.gamma
704
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.9.norm1.weight
705
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.9.norm1.bias
706
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.9.attn.qkv.weight
707
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.9.attn.qkv.bias
708
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.9.attn.proj.weight
709
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.9.attn.proj.bias
710
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.9.ls1.gamma
711
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.9.norm2.weight
712
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.9.norm2.bias
713
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.9.mlp.fc1.weight
714
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.9.mlp.fc1.bias
715
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.9.mlp.fc2.weight
716
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.9.mlp.fc2.bias
717
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.9.ls2.gamma
718
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.10.norm1.weight
719
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.10.norm1.bias
720
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.10.attn.qkv.weight
721
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.10.attn.qkv.bias
722
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.10.attn.proj.weight
723
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.10.attn.proj.bias
724
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.10.ls1.gamma
725
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.10.norm2.weight
726
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.10.norm2.bias
727
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.10.mlp.fc1.weight
728
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.10.mlp.fc1.bias
729
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.10.mlp.fc2.weight
730
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.10.mlp.fc2.bias
731
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.10.ls2.gamma
732
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.11.norm1.weight
733
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.11.norm1.bias
734
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.11.attn.qkv.weight
735
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.11.attn.qkv.bias
736
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.11.attn.proj.weight
737
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.11.attn.proj.bias
738
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.11.ls1.gamma
739
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.11.norm2.weight
740
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.11.norm2.bias
741
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.11.mlp.fc1.weight
742
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.11.mlp.fc1.bias
743
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.11.mlp.fc2.weight
744
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.11.mlp.fc2.bias
745
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.11.ls2.gamma
746
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.norm.weight
747
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.norm.bias
748
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.projects.0.weight
749
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.projects.0.bias
750
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.projects.1.weight
751
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.projects.1.bias
752
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.projects.2.weight
753
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.projects.2.bias
754
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.projects.3.weight
755
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.projects.3.bias
756
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.resize_layers.0.weight
757
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.resize_layers.0.bias
758
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.resize_layers.1.weight
759
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.resize_layers.1.bias
760
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.resize_layers.3.weight
761
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.resize_layers.3.bias
762
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.layer1_rn.weight
763
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.layer2_rn.weight
764
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.layer3_rn.weight
765
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.layer4_rn.weight
766
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet1.out_conv.weight
767
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet1.out_conv.bias
768
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet1.resConfUnit1.conv1.weight
769
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet1.resConfUnit1.conv1.bias
770
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet1.resConfUnit1.conv2.weight
771
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet1.resConfUnit1.conv2.bias
772
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet1.resConfUnit2.conv1.weight
773
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet1.resConfUnit2.conv1.bias
774
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet1.resConfUnit2.conv2.weight
775
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet1.resConfUnit2.conv2.bias
776
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet2.out_conv.weight
777
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet2.out_conv.bias
778
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet2.resConfUnit1.conv1.weight
779
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet2.resConfUnit1.conv1.bias
780
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet2.resConfUnit1.conv2.weight
781
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet2.resConfUnit1.conv2.bias
782
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet2.resConfUnit2.conv1.weight
783
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet2.resConfUnit2.conv1.bias
784
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet2.resConfUnit2.conv2.weight
785
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet2.resConfUnit2.conv2.bias
786
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet3.out_conv.weight
787
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet3.out_conv.bias
788
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet3.resConfUnit1.conv1.weight
789
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet3.resConfUnit1.conv1.bias
790
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet3.resConfUnit1.conv2.weight
791
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet3.resConfUnit1.conv2.bias
792
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet3.resConfUnit2.conv1.weight
793
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet3.resConfUnit2.conv1.bias
794
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet3.resConfUnit2.conv2.weight
795
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet3.resConfUnit2.conv2.bias
796
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet4.out_conv.weight
797
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet4.out_conv.bias
798
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet4.resConfUnit1.conv1.weight
799
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet4.resConfUnit1.conv1.bias
800
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet4.resConfUnit1.conv2.weight
801
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet4.resConfUnit1.conv2.bias
802
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet4.resConfUnit2.conv1.weight
803
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet4.resConfUnit2.conv1.bias
804
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet4.resConfUnit2.conv2.weight
805
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet4.resConfUnit2.conv2.bias
806
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.output_conv1.weight
807
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.output_conv1.bias
808
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.output_conv2.0.weight
809
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.output_conv2.0.bias
810
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.output_conv2.2.weight
811
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.output_conv2.2.bias
812
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.conv2.weight
813
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.conv2.bias
814
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.seed_bin_regressor._net.0.weight
815
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.seed_bin_regressor._net.0.bias
816
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.seed_bin_regressor._net.2.weight
817
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.seed_bin_regressor._net.2.bias
818
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.seed_projector._net.0.weight
819
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.seed_projector._net.0.bias
820
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.seed_projector._net.2.weight
821
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.seed_projector._net.2.bias
822
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.projectors.0._net.0.weight
823
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.projectors.0._net.0.bias
824
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.projectors.0._net.2.weight
825
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.projectors.0._net.2.bias
826
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.projectors.1._net.0.weight
827
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.projectors.1._net.0.bias
828
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.projectors.1._net.2.weight
829
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.projectors.1._net.2.bias
830
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.projectors.2._net.0.weight
831
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.projectors.2._net.0.bias
832
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.projectors.2._net.2.weight
833
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.projectors.2._net.2.bias
834
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.projectors.3._net.0.weight
835
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.projectors.3._net.0.bias
836
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.projectors.3._net.2.weight
837
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.projectors.3._net.2.bias
838
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.attractors.0._net.0.weight
839
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.attractors.0._net.0.bias
840
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.attractors.0._net.2.weight
841
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.attractors.0._net.2.bias
842
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.attractors.1._net.0.weight
843
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.attractors.1._net.0.bias
844
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.attractors.1._net.2.weight
845
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.attractors.1._net.2.bias
846
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.attractors.2._net.0.weight
847
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.attractors.2._net.0.bias
848
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.attractors.2._net.2.weight
849
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.attractors.2._net.2.bias
850
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.attractors.3._net.0.weight
851
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.attractors.3._net.0.bias
852
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.attractors.3._net.2.weight
853
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.attractors.3._net.2.bias
854
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.conditional_log_binomial.mlp.0.weight
855
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.conditional_log_binomial.mlp.0.bias
856
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.conditional_log_binomial.mlp.2.weight
857
+ 2024/03/15 03:55:33 - patchstitcher - INFO - training param: module.fine_branch.conditional_log_binomial.mlp.2.bias
858
+ 2024/03/15 03:57:49 - patchstitcher - INFO - Epoch: [01/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 2.039879322052002 - fine_loss: 2.039879322052002
859
+ 2024/03/15 03:59:40 - patchstitcher - INFO - Epoch: [01/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 3.776620626449585 - fine_loss: 3.776620626449585
860
+ 2024/03/15 04:01:30 - patchstitcher - INFO - Epoch: [01/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 2.1612205505371094 - fine_loss: 2.1612205505371094
861
+ 2024/03/15 04:03:20 - patchstitcher - INFO - Epoch: [01/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 1.3563077449798584 - fine_loss: 1.3563077449798584
862
+ 2024/03/15 04:06:31 - patchstitcher - INFO - Epoch: [02/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 2.1678900718688965 - fine_loss: 2.1678900718688965
863
+ 2024/03/15 04:08:25 - patchstitcher - INFO - Epoch: [02/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 1.8825774192810059 - fine_loss: 1.8825774192810059
864
+ 2024/03/15 04:10:14 - patchstitcher - INFO - Epoch: [02/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 2.350590467453003 - fine_loss: 2.350590467453003
865
+ 2024/03/15 04:12:06 - patchstitcher - INFO - Epoch: [02/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 2.691840648651123 - fine_loss: 2.691840648651123
866
+ 2024/03/15 04:13:51 - patchstitcher - INFO - Evaluation Summary:
867
+ +----------+-----------+-----------+-----------+-----------+-----------+-----------+------------+-----------+-----------+
868
+ | a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see |
869
+ +----------+-----------+-----------+-----------+-----------+-----------+-----------+------------+-----------+-----------+
870
+ | 0.707044 | 0.9293698 | 0.9801447 | 0.1927294 | 2.3443637 | 0.0782506 | 0.2331481 | 20.0879481 | 0.4492522 | 1.7012854 |
871
+ +----------+-----------+-----------+-----------+-----------+-----------+-----------+------------+-----------+-----------+
872
+ 2024/03/15 04:15:48 - patchstitcher - INFO - Epoch: [03/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 1.2447803020477295 - fine_loss: 1.2447803020477295
873
+ 2024/03/15 04:17:37 - patchstitcher - INFO - Epoch: [03/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 1.6822900772094727 - fine_loss: 1.6822900772094727
874
+ 2024/03/15 04:19:22 - patchstitcher - INFO - Epoch: [03/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 2.7436625957489014 - fine_loss: 2.7436625957489014
875
+ 2024/03/15 04:21:15 - patchstitcher - INFO - Epoch: [03/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 1.9489283561706543 - fine_loss: 1.9489283561706543
876
+ 2024/03/15 04:24:21 - patchstitcher - INFO - Epoch: [04/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 1.5366265773773193 - fine_loss: 1.5366265773773193
877
+ 2024/03/15 04:26:10 - patchstitcher - INFO - Epoch: [04/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 2.0812580585479736 - fine_loss: 2.0812580585479736
878
+ 2024/03/15 04:28:00 - patchstitcher - INFO - Epoch: [04/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 2.318430185317993 - fine_loss: 2.318430185317993
879
+ 2024/03/15 04:29:48 - patchstitcher - INFO - Epoch: [04/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 1.638041615486145 - fine_loss: 1.638041615486145
880
+ 2024/03/15 04:31:27 - patchstitcher - INFO - Evaluation Summary:
881
+ +-----------+-----------+-----------+-----------+----------+-----------+-----------+------------+-----------+----------+
882
+ | a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see |
883
+ +-----------+-----------+-----------+-----------+----------+-----------+-----------+------------+-----------+----------+
884
+ | 0.7926732 | 0.9574633 | 0.9874803 | 0.1658911 | 2.033809 | 0.0653377 | 0.1971613 | 17.6386279 | 0.3764188 | 1.566062 |
885
+ +-----------+-----------+-----------+-----------+----------+-----------+-----------+------------+-----------+----------+
886
+ 2024/03/15 04:33:22 - patchstitcher - INFO - Epoch: [05/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 1.071550726890564 - fine_loss: 1.071550726890564
887
+ 2024/03/15 04:35:11 - patchstitcher - INFO - Epoch: [05/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 1.159848928451538 - fine_loss: 1.159848928451538
888
+ 2024/03/15 04:36:58 - patchstitcher - INFO - Epoch: [05/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 1.2986273765563965 - fine_loss: 1.2986273765563965
889
+ 2024/03/15 04:38:48 - patchstitcher - INFO - Epoch: [05/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 1.5721113681793213 - fine_loss: 1.5721113681793213
890
+ 2024/03/15 04:42:00 - patchstitcher - INFO - Epoch: [06/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 1.7645320892333984 - fine_loss: 1.7645320892333984
891
+ 2024/03/15 04:43:48 - patchstitcher - INFO - Epoch: [06/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 1.2818663120269775 - fine_loss: 1.2818663120269775
892
+ 2024/03/15 04:45:40 - patchstitcher - INFO - Epoch: [06/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 1.2445242404937744 - fine_loss: 1.2445242404937744
893
+ 2024/03/15 04:47:30 - patchstitcher - INFO - Epoch: [06/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 1.5368983745574951 - fine_loss: 1.5368983745574951
894
+ 2024/03/15 04:49:02 - patchstitcher - INFO - Evaluation Summary:
895
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+------------+-----------+-----------+
896
+ | a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see |
897
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+------------+-----------+-----------+
898
+ | 0.8194143 | 0.9697637 | 0.9905658 | 0.1490125 | 1.8480574 | 0.0592408 | 0.1810736 | 15.8342003 | 0.3005681 | 1.3977808 |
899
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+------------+-----------+-----------+
900
+ 2024/03/15 04:50:58 - patchstitcher - INFO - Epoch: [07/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 1.4791369438171387 - fine_loss: 1.4791369438171387
901
+ 2024/03/15 04:52:44 - patchstitcher - INFO - Epoch: [07/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 2.1252331733703613 - fine_loss: 2.1252331733703613
902
+ 2024/03/15 04:54:32 - patchstitcher - INFO - Epoch: [07/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 1.84209406375885 - fine_loss: 1.84209406375885
903
+ 2024/03/15 04:56:25 - patchstitcher - INFO - Epoch: [07/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 1.1359673738479614 - fine_loss: 1.1359673738479614
904
+ 2024/03/15 04:59:38 - patchstitcher - INFO - Epoch: [08/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 1.5866280794143677 - fine_loss: 1.5866280794143677
905
+ 2024/03/15 05:01:29 - patchstitcher - INFO - Epoch: [08/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 1.3199617862701416 - fine_loss: 1.3199617862701416
906
+ 2024/03/15 05:03:15 - patchstitcher - INFO - Epoch: [08/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 1.6660882234573364 - fine_loss: 1.6660882234573364
907
+ 2024/03/15 05:05:05 - patchstitcher - INFO - Epoch: [08/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 1.0399880409240723 - fine_loss: 1.0399880409240723
908
+ 2024/03/15 05:06:40 - patchstitcher - INFO - Evaluation Summary:
909
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+------------+-----------+-----------+
910
+ | a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see |
911
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+------------+-----------+-----------+
912
+ | 0.8804187 | 0.9831836 | 0.9948749 | 0.1118127 | 1.7537212 | 0.0498078 | 0.1550232 | 14.4210851 | 0.2352216 | 1.2980962 |
913
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+------------+-----------+-----------+
914
+ 2024/03/15 05:08:36 - patchstitcher - INFO - Epoch: [09/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 1.7554281949996948 - fine_loss: 1.7554281949996948
915
+ 2024/03/15 05:10:27 - patchstitcher - INFO - Epoch: [09/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 2.8572347164154053 - fine_loss: 2.8572347164154053
916
+ 2024/03/15 05:12:16 - patchstitcher - INFO - Epoch: [09/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 1.3657317161560059 - fine_loss: 1.3657317161560059
917
+ 2024/03/15 05:14:08 - patchstitcher - INFO - Epoch: [09/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 1.3460898399353027 - fine_loss: 1.3460898399353027
918
+ 2024/03/15 05:17:20 - patchstitcher - INFO - Epoch: [10/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 1.0736647844314575 - fine_loss: 1.0736647844314575
919
+ 2024/03/15 05:19:11 - patchstitcher - INFO - Epoch: [10/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 1.179059624671936 - fine_loss: 1.179059624671936
920
+ 2024/03/15 05:21:00 - patchstitcher - INFO - Epoch: [10/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 1.0112545490264893 - fine_loss: 1.0112545490264893
921
+ 2024/03/15 05:22:47 - patchstitcher - INFO - Epoch: [10/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 1.2453086376190186 - fine_loss: 1.2453086376190186
922
+ 2024/03/15 05:24:25 - patchstitcher - INFO - Evaluation Summary:
923
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+------------+-----------+-----------+
924
+ | a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see |
925
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+------------+-----------+-----------+
926
+ | 0.8772274 | 0.9823961 | 0.9948375 | 0.1173125 | 1.7241426 | 0.0501591 | 0.1553792 | 14.1530364 | 0.2422748 | 1.3415729 |
927
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+------------+-----------+-----------+
928
+ 2024/03/15 05:26:18 - patchstitcher - INFO - Epoch: [11/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 1.306344747543335 - fine_loss: 1.306344747543335
929
+ 2024/03/15 05:28:16 - patchstitcher - INFO - Epoch: [11/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 1.348771572113037 - fine_loss: 1.348771572113037
930
+ 2024/03/15 05:30:06 - patchstitcher - INFO - Epoch: [11/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 1.549656629562378 - fine_loss: 1.549656629562378
931
+ 2024/03/15 05:31:57 - patchstitcher - INFO - Epoch: [11/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 1.4452790021896362 - fine_loss: 1.4452790021896362
932
+ 2024/03/15 05:35:10 - patchstitcher - INFO - Epoch: [12/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 1.1077752113342285 - fine_loss: 1.1077752113342285
933
+ 2024/03/15 05:37:01 - patchstitcher - INFO - Epoch: [12/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.8956596255302429 - fine_loss: 0.8956596255302429
934
+ 2024/03/15 05:38:52 - patchstitcher - INFO - Epoch: [12/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.9720367789268494 - fine_loss: 0.9720367789268494
935
+ 2024/03/15 05:40:41 - patchstitcher - INFO - Epoch: [12/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 1.4826208353042603 - fine_loss: 1.4826208353042603
936
+ 2024/03/15 05:42:16 - patchstitcher - INFO - Evaluation Summary:
937
+ +-----------+-----------+-----------+-----------+----------+-----------+-----------+------------+----------+-----------+
938
+ | a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see |
939
+ +-----------+-----------+-----------+-----------+----------+-----------+-----------+------------+----------+-----------+
940
+ | 0.8740682 | 0.9844725 | 0.9957269 | 0.1142447 | 1.696142 | 0.0509766 | 0.1547242 | 13.9800131 | 0.237403 | 1.2716073 |
941
+ +-----------+-----------+-----------+-----------+----------+-----------+-----------+------------+----------+-----------+
942
+ 2024/03/15 05:44:13 - patchstitcher - INFO - Epoch: [13/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 1.7906665802001953 - fine_loss: 1.7906665802001953
943
+ 2024/03/15 05:46:06 - patchstitcher - INFO - Epoch: [13/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 1.7277212142944336 - fine_loss: 1.7277212142944336
944
+ 2024/03/15 05:48:00 - patchstitcher - INFO - Epoch: [13/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 1.1345900297164917 - fine_loss: 1.1345900297164917
945
+ 2024/03/15 05:49:53 - patchstitcher - INFO - Epoch: [13/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.680286169052124 - fine_loss: 0.680286169052124
946
+ 2024/03/15 05:53:05 - patchstitcher - INFO - Epoch: [14/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 1.0135771036148071 - fine_loss: 1.0135771036148071
947
+ 2024/03/15 05:54:56 - patchstitcher - INFO - Epoch: [14/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 1.1816802024841309 - fine_loss: 1.1816802024841309
948
+ 2024/03/15 05:56:44 - patchstitcher - INFO - Epoch: [14/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 1.3476241827011108 - fine_loss: 1.3476241827011108
949
+ 2024/03/15 05:58:33 - patchstitcher - INFO - Epoch: [14/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.6280028223991394 - fine_loss: 0.6280028223991394
950
+ 2024/03/15 06:00:11 - patchstitcher - INFO - Evaluation Summary:
951
+ +-----------+-----------+-----------+-----------+-----------+-----------+----------+------------+-----------+-----------+
952
+ | a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see |
953
+ +-----------+-----------+-----------+-----------+-----------+-----------+----------+------------+-----------+-----------+
954
+ | 0.9147314 | 0.9859354 | 0.9949076 | 0.1007045 | 1.6106567 | 0.0434999 | 0.138901 | 13.0318626 | 0.2056279 | 1.2140529 |
955
+ +-----------+-----------+-----------+-----------+-----------+-----------+----------+------------+-----------+-----------+
956
+ 2024/03/15 06:02:04 - patchstitcher - INFO - Epoch: [15/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.8376606702804565 - fine_loss: 0.8376606702804565
957
+ 2024/03/15 06:03:57 - patchstitcher - INFO - Epoch: [15/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 1.03225576877594 - fine_loss: 1.03225576877594
958
+ 2024/03/15 06:05:44 - patchstitcher - INFO - Epoch: [15/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.9883253574371338 - fine_loss: 0.9883253574371338
959
+ 2024/03/15 06:07:36 - patchstitcher - INFO - Epoch: [15/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 1.262385368347168 - fine_loss: 1.262385368347168
960
+ 2024/03/15 06:10:46 - patchstitcher - INFO - Epoch: [16/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 1.1695902347564697 - fine_loss: 1.1695902347564697
961
+ 2024/03/15 06:12:36 - patchstitcher - INFO - Epoch: [16/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 2.1688151359558105 - fine_loss: 2.1688151359558105
962
+ 2024/03/15 06:14:24 - patchstitcher - INFO - Epoch: [16/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 1.3791565895080566 - fine_loss: 1.3791565895080566
963
+ 2024/03/15 06:16:12 - patchstitcher - INFO - Epoch: [16/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 1.2718651294708252 - fine_loss: 1.2718651294708252
964
+ 2024/03/15 06:17:50 - patchstitcher - INFO - Evaluation Summary:
965
+ +----------+-----------+-----------+-----------+-----------+-----------+-----------+------------+-----------+-----------+
966
+ | a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see |
967
+ +----------+-----------+-----------+-----------+-----------+-----------+-----------+------------+-----------+-----------+
968
+ | 0.917846 | 0.9849823 | 0.9948954 | 0.0979613 | 1.5791011 | 0.0433261 | 0.1380226 | 12.8257169 | 0.1883265 | 1.1684257 |
969
+ +----------+-----------+-----------+-----------+-----------+-----------+-----------+------------+-----------+-----------+
970
+ 2024/03/15 06:19:42 - patchstitcher - INFO - Epoch: [17/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 1.0557522773742676 - fine_loss: 1.0557522773742676
971
+ 2024/03/15 06:21:33 - patchstitcher - INFO - Epoch: [17/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.6954542398452759 - fine_loss: 0.6954542398452759
972
+ 2024/03/15 06:23:20 - patchstitcher - INFO - Epoch: [17/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 1.203284740447998 - fine_loss: 1.203284740447998
973
+ 2024/03/15 06:25:09 - patchstitcher - INFO - Epoch: [17/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 1.2890739440917969 - fine_loss: 1.2890739440917969
974
+ 2024/03/15 06:28:25 - patchstitcher - INFO - Epoch: [18/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.8028295040130615 - fine_loss: 0.8028295040130615
975
+ 2024/03/15 06:30:13 - patchstitcher - INFO - Epoch: [18/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.499220609664917 - fine_loss: 0.499220609664917
976
+ 2024/03/15 06:32:01 - patchstitcher - INFO - Epoch: [18/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.8515260219573975 - fine_loss: 0.8515260219573975
977
+ 2024/03/15 06:33:51 - patchstitcher - INFO - Epoch: [18/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.953697919845581 - fine_loss: 0.953697919845581
978
+ 2024/03/15 06:35:27 - patchstitcher - INFO - Evaluation Summary:
979
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+------------+-----------+-----------+
980
+ | a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see |
981
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+------------+-----------+-----------+
982
+ | 0.9318894 | 0.9858092 | 0.9957183 | 0.0923063 | 1.5112557 | 0.0394818 | 0.1269675 | 11.6301649 | 0.1761516 | 1.1147971 |
983
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+------------+-----------+-----------+
984
+ 2024/03/15 06:37:24 - patchstitcher - INFO - Epoch: [19/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.8879871368408203 - fine_loss: 0.8879871368408203
985
+ 2024/03/15 06:39:14 - patchstitcher - INFO - Epoch: [19/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 1.4138840436935425 - fine_loss: 1.4138840436935425
986
+ 2024/03/15 06:41:05 - patchstitcher - INFO - Epoch: [19/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 1.3911192417144775 - fine_loss: 1.3911192417144775
987
+ 2024/03/15 06:42:59 - patchstitcher - INFO - Epoch: [19/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.9037826061248779 - fine_loss: 0.9037826061248779
988
+ 2024/03/15 06:46:06 - patchstitcher - INFO - Epoch: [20/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.7059022784233093 - fine_loss: 0.7059022784233093
989
+ 2024/03/15 06:47:58 - patchstitcher - INFO - Epoch: [20/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.8616353273391724 - fine_loss: 0.8616353273391724
990
+ 2024/03/15 06:49:51 - patchstitcher - INFO - Epoch: [20/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.8395438194274902 - fine_loss: 0.8395438194274902
991
+ 2024/03/15 06:51:43 - patchstitcher - INFO - Epoch: [20/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.6362200379371643 - fine_loss: 0.6362200379371643
992
+ 2024/03/15 06:53:21 - patchstitcher - INFO - Evaluation Summary:
993
+ +-----------+-----------+-----------+-----------+-----------+-----------+----------+------------+-----------+-----------+
994
+ | a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see |
995
+ +-----------+-----------+-----------+-----------+-----------+-----------+----------+------------+-----------+-----------+
996
+ | 0.9486918 | 0.9883879 | 0.9965515 | 0.0802352 | 1.4414517 | 0.0349744 | 0.116316 | 10.9957016 | 0.1575956 | 1.0969994 |
997
+ +-----------+-----------+-----------+-----------+-----------+-----------+----------+------------+-----------+-----------+
998
+ 2024/03/15 06:55:18 - patchstitcher - INFO - Epoch: [21/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.6189630627632141 - fine_loss: 0.6189630627632141
999
+ 2024/03/15 06:57:11 - patchstitcher - INFO - Epoch: [21/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 1.1719452142715454 - fine_loss: 1.1719452142715454
1000
+ 2024/03/15 06:58:55 - patchstitcher - INFO - Epoch: [21/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 1.142961025238037 - fine_loss: 1.142961025238037
1001
+ 2024/03/15 07:00:45 - patchstitcher - INFO - Epoch: [21/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 1.719948649406433 - fine_loss: 1.719948649406433
1002
+ 2024/03/15 07:03:58 - patchstitcher - INFO - Epoch: [22/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.6470488905906677 - fine_loss: 0.6470488905906677
1003
+ 2024/03/15 07:05:49 - patchstitcher - INFO - Epoch: [22/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.5520279407501221 - fine_loss: 0.5520279407501221
1004
+ 2024/03/15 07:07:38 - patchstitcher - INFO - Epoch: [22/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.8810967803001404 - fine_loss: 0.8810967803001404
1005
+ 2024/03/15 07:09:32 - patchstitcher - INFO - Epoch: [22/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.6827142238616943 - fine_loss: 0.6827142238616943
1006
+ 2024/03/15 07:11:07 - patchstitcher - INFO - Evaluation Summary:
1007
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+----------+
1008
+ | a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see |
1009
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+----------+
1010
+ | 0.9523656 | 0.9892937 | 0.9967417 | 0.0767006 | 1.4133022 | 0.0333895 | 0.1125023 | 10.666611 | 0.1523504 | 1.061902 |
1011
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+----------+
1012
+ 2024/03/15 07:13:02 - patchstitcher - INFO - Epoch: [23/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 1.8002086877822876 - fine_loss: 1.8002086877822876
1013
+ 2024/03/15 07:14:51 - patchstitcher - INFO - Epoch: [23/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.5043245553970337 - fine_loss: 0.5043245553970337
1014
+ 2024/03/15 07:16:39 - patchstitcher - INFO - Epoch: [23/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 1.6025413274765015 - fine_loss: 1.6025413274765015
1015
+ 2024/03/15 07:18:29 - patchstitcher - INFO - Epoch: [23/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 1.3183393478393555 - fine_loss: 1.3183393478393555
1016
+ 2024/03/15 07:21:41 - patchstitcher - INFO - Epoch: [24/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 1.6571695804595947 - fine_loss: 1.6571695804595947
1017
+ 2024/03/15 07:23:30 - patchstitcher - INFO - Epoch: [24/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 1.0306520462036133 - fine_loss: 1.0306520462036133
1018
+ 2024/03/15 07:25:19 - patchstitcher - INFO - Epoch: [24/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.8030037879943848 - fine_loss: 0.8030037879943848
1019
+ 2024/03/15 07:27:09 - patchstitcher - INFO - Epoch: [24/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.6139640808105469 - fine_loss: 0.6139640808105469
1020
+ 2024/03/15 07:28:49 - patchstitcher - INFO - Evaluation Summary:
1021
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+------------+-----------+-----------+
1022
+ | a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see |
1023
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+------------+-----------+-----------+
1024
+ | 0.9531358 | 0.9897053 | 0.9967571 | 0.0759499 | 1.4041272 | 0.0327699 | 0.1107659 | 10.5243982 | 0.1508702 | 1.0635976 |
1025
+ +-----------+-----------+-----------+-----------+-----------+-----------+-----------+------------+-----------+-----------+
1026
+ 2024/03/15 07:28:49 - patchstitcher - INFO - Saving ckp, but use the inner get_save_dict fuction to get model_dict
1027
+ 2024/03/15 07:28:49 - patchstitcher - INFO - For saving space. Would you like to save base model several times? :>
1028
+ 2024/03/15 07:28:49 - patchstitcher - INFO - save checkpoint_24.pth at ./work_dir/depthanything_vits_u4k/fine_pretrain
depthanything_vits_u4k/fine_pretrain/checkpoint_24.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:83e1263ca84ae622c57b1cdfc1e1e2b548ad13d3e1d2b143193a6d5efcc5fc92
3
+ size 300162730
depthanything_vits_u4k/fine_pretrain/config.py ADDED
@@ -0,0 +1,314 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ collect_input_args = [
2
+ 'image_lr',
3
+ 'crops_image_hr',
4
+ 'depth_gt',
5
+ 'crop_depths',
6
+ 'bboxs',
7
+ 'image_hr',
8
+ ]
9
+ convert_syncbn = True
10
+ debug = False
11
+ env_cfg = dict(
12
+ cudnn_benchmark=True,
13
+ dist_cfg=dict(backend='nccl'),
14
+ mp_cfg=dict(mp_start_method='forkserver'))
15
+ find_unused_parameters = True
16
+ general_dataloader = dict(
17
+ batch_size=1,
18
+ dataset=dict(
19
+ dataset_name='', gt_dir=None, rgb_image_dir='', type='ImageDataset'),
20
+ num_workers=2)
21
+ launcher = 'pytorch'
22
+ log_name = 'fine_pretrain'
23
+ max_depth = 80
24
+ min_depth = 0.001
25
+ model = dict(
26
+ coarse_branch=dict(
27
+ attractor_alpha=1000,
28
+ attractor_gamma=2,
29
+ attractor_kind='mean',
30
+ attractor_type='inv',
31
+ aug=True,
32
+ bin_centers_type='softplus',
33
+ bin_embedding_dim=128,
34
+ clip_grad=0.1,
35
+ dataset='nyu',
36
+ depth_anything=True,
37
+ distributed=True,
38
+ do_resize=False,
39
+ force_keep_ar=True,
40
+ freeze_midas_bn=True,
41
+ gpu='NULL',
42
+ img_size=[
43
+ 392,
44
+ 518,
45
+ ],
46
+ inverse_midas=False,
47
+ log_images_every=0.1,
48
+ max_depth=80,
49
+ max_temp=50.0,
50
+ max_translation=100,
51
+ memory_efficient=True,
52
+ midas_model_type='vits',
53
+ min_depth=0.001,
54
+ min_temp=0.0212,
55
+ model='zoedepth',
56
+ n_attractors=[
57
+ 16,
58
+ 8,
59
+ 4,
60
+ 1,
61
+ ],
62
+ n_bins=64,
63
+ name='ZoeDepth',
64
+ notes='',
65
+ output_distribution='logbinomial',
66
+ prefetch=False,
67
+ pretrained_resource='local::./work_dir/DepthAnything_vits.pt',
68
+ print_losses=False,
69
+ project='ZoeDepth',
70
+ random_crop=False,
71
+ random_translate=False,
72
+ root='.',
73
+ save_dir='',
74
+ shared_dict='NULL',
75
+ tags='',
76
+ train_midas=True,
77
+ translate_prob=0.2,
78
+ type='DA-ZoeDepth',
79
+ uid='NULL',
80
+ use_amp=False,
81
+ use_pretrained_midas=True,
82
+ use_shared_dict=False,
83
+ validate_every=0.25,
84
+ version_name='v1',
85
+ workers=16),
86
+ fine_branch=dict(
87
+ attractor_alpha=1000,
88
+ attractor_gamma=2,
89
+ attractor_kind='mean',
90
+ attractor_type='inv',
91
+ aug=True,
92
+ bin_centers_type='softplus',
93
+ bin_embedding_dim=128,
94
+ clip_grad=0.1,
95
+ dataset='nyu',
96
+ depth_anything=True,
97
+ distributed=True,
98
+ do_resize=False,
99
+ force_keep_ar=True,
100
+ freeze_midas_bn=True,
101
+ gpu='NULL',
102
+ img_size=[
103
+ 392,
104
+ 518,
105
+ ],
106
+ inverse_midas=False,
107
+ log_images_every=0.1,
108
+ max_depth=80,
109
+ max_temp=50.0,
110
+ max_translation=100,
111
+ memory_efficient=True,
112
+ midas_model_type='vits',
113
+ min_depth=0.001,
114
+ min_temp=0.0212,
115
+ model='zoedepth',
116
+ n_attractors=[
117
+ 16,
118
+ 8,
119
+ 4,
120
+ 1,
121
+ ],
122
+ n_bins=64,
123
+ name='ZoeDepth',
124
+ notes='',
125
+ output_distribution='logbinomial',
126
+ prefetch=False,
127
+ pretrained_resource='local::./work_dir/DepthAnything_vits.pt',
128
+ print_losses=False,
129
+ project='ZoeDepth',
130
+ random_crop=False,
131
+ random_translate=False,
132
+ root='.',
133
+ save_dir='',
134
+ shared_dict='NULL',
135
+ tags='',
136
+ train_midas=True,
137
+ translate_prob=0.2,
138
+ type='DA-ZoeDepth',
139
+ uid='NULL',
140
+ use_amp=False,
141
+ use_pretrained_midas=True,
142
+ use_shared_dict=False,
143
+ validate_every=0.25,
144
+ version_name='v1',
145
+ workers=16),
146
+ max_depth=80,
147
+ min_depth=0.001,
148
+ patch_process_shape=(
149
+ 392,
150
+ 518,
151
+ ),
152
+ sigloss=dict(type='SILogLoss'),
153
+ target='fine',
154
+ type='BaselinePretrain')
155
+ optim_wrapper = dict(
156
+ clip_grad=dict(max_norm=0.1, norm_type=2, type='norm'),
157
+ optimizer=dict(lr=4e-06, type='AdamW', weight_decay=0.01),
158
+ paramwise_cfg=dict(bypass_duplicate=True, custom_keys=dict()))
159
+ param_scheduler = dict(
160
+ base_momentum=0.85,
161
+ cycle_momentum=True,
162
+ div_factor=1,
163
+ final_div_factor=10000,
164
+ max_momentum=0.95,
165
+ pct_start=0.5,
166
+ three_phase=False)
167
+ project = 'patchfusion'
168
+ resume = False
169
+ tags = [
170
+ 'fine',
171
+ 'da',
172
+ 'vits',
173
+ ]
174
+ test_in_dataloader = dict(
175
+ batch_size=1,
176
+ dataset=dict(
177
+ data_root='./data/u4k',
178
+ max_depth=80,
179
+ min_depth=0.001,
180
+ mode='infer',
181
+ split='./data/u4k/splits/test.txt',
182
+ transform_cfg=dict(network_process_size=[
183
+ 384,
184
+ 512,
185
+ ]),
186
+ type='UnrealStereo4kDataset'),
187
+ num_workers=2)
188
+ test_out_dataloader = dict(
189
+ batch_size=1,
190
+ dataset=dict(
191
+ data_root='./data/u4k',
192
+ max_depth=80,
193
+ min_depth=0.001,
194
+ mode='infer',
195
+ split='./data/u4k/splits/test_out.txt',
196
+ transform_cfg=dict(network_process_size=[
197
+ 384,
198
+ 512,
199
+ ]),
200
+ type='UnrealStereo4kDataset'),
201
+ num_workers=2)
202
+ train_cfg = dict(
203
+ eval_start=0,
204
+ log_interval=100,
205
+ max_epochs=24,
206
+ save_checkpoint_interval=24,
207
+ train_log_img_interval=100,
208
+ val_interval=2,
209
+ val_log_img_interval=50,
210
+ val_type='epoch_base')
211
+ train_dataloader = dict(
212
+ batch_size=4,
213
+ dataset=dict(
214
+ data_root='./data/u4k',
215
+ max_depth=80,
216
+ min_depth=0.001,
217
+ mode='train',
218
+ resize_mode='depth-anything',
219
+ split='./data/u4k/splits/train.txt',
220
+ transform_cfg=dict(
221
+ degree=1.0,
222
+ network_process_size=[
223
+ 392,
224
+ 518,
225
+ ],
226
+ random_crop=True,
227
+ random_crop_size=(
228
+ 540,
229
+ 960,
230
+ )),
231
+ type='UnrealStereo4kDataset'),
232
+ num_workers=4)
233
+ val_dataloader = dict(
234
+ batch_size=1,
235
+ dataset=dict(
236
+ data_root='./data/u4k',
237
+ max_depth=80,
238
+ min_depth=0.001,
239
+ mode='infer',
240
+ resize_mode='depth-anything',
241
+ split='./data/u4k/splits/val.txt',
242
+ transform_cfg=dict(
243
+ degree=1.0,
244
+ network_process_size=[
245
+ 392,
246
+ 518,
247
+ ],
248
+ random_crop_size=(
249
+ 540,
250
+ 960,
251
+ )),
252
+ type='UnrealStereo4kDataset'),
253
+ num_workers=2)
254
+ work_dir = './work_dir/depthanything_vits_u4k/fine_pretrain'
255
+ zoe_depth_config = dict(
256
+ attractor_alpha=1000,
257
+ attractor_gamma=2,
258
+ attractor_kind='mean',
259
+ attractor_type='inv',
260
+ aug=True,
261
+ bin_centers_type='softplus',
262
+ bin_embedding_dim=128,
263
+ clip_grad=0.1,
264
+ dataset='nyu',
265
+ depth_anything=True,
266
+ distributed=True,
267
+ do_resize=False,
268
+ force_keep_ar=True,
269
+ freeze_midas_bn=True,
270
+ gpu='NULL',
271
+ img_size=[
272
+ 392,
273
+ 518,
274
+ ],
275
+ inverse_midas=False,
276
+ log_images_every=0.1,
277
+ max_depth=80,
278
+ max_temp=50.0,
279
+ max_translation=100,
280
+ memory_efficient=True,
281
+ midas_model_type='vits',
282
+ min_depth=0.001,
283
+ min_temp=0.0212,
284
+ model='zoedepth',
285
+ n_attractors=[
286
+ 16,
287
+ 8,
288
+ 4,
289
+ 1,
290
+ ],
291
+ n_bins=64,
292
+ name='ZoeDepth',
293
+ notes='',
294
+ output_distribution='logbinomial',
295
+ prefetch=False,
296
+ pretrained_resource='local::./work_dir/DepthAnything_vits.pt',
297
+ print_losses=False,
298
+ project='ZoeDepth',
299
+ random_crop=False,
300
+ random_translate=False,
301
+ root='.',
302
+ save_dir='',
303
+ shared_dict='NULL',
304
+ tags='',
305
+ train_midas=True,
306
+ translate_prob=0.2,
307
+ type='DA-ZoeDepth',
308
+ uid='NULL',
309
+ use_amp=False,
310
+ use_pretrained_midas=True,
311
+ use_shared_dict=False,
312
+ validate_every=0.25,
313
+ version_name='v1',
314
+ workers=16)
depthanything_vits_u4k/patchfusion/20240315_072915.log ADDED
The diff for this file is too large to render. See raw diff
 
depthanything_vits_u4k/patchfusion/checkpoint_16.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:12c10292f758cc38f45bd2dad11240190d440c8416b5834180642253f0ea93b9
3
+ size 205127853
depthanything_vits_u4k/patchfusion/config.py ADDED
@@ -0,0 +1,341 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ collect_input_args = [
2
+ 'image_lr',
3
+ 'crops_image_hr',
4
+ 'depth_gt',
5
+ 'crop_depths',
6
+ 'bboxs',
7
+ 'image_hr',
8
+ ]
9
+ convert_syncbn = True
10
+ debug = False
11
+ env_cfg = dict(
12
+ cudnn_benchmark=True,
13
+ dist_cfg=dict(backend='nccl'),
14
+ mp_cfg=dict(mp_start_method='forkserver'))
15
+ find_unused_parameters = True
16
+ general_dataloader = dict(
17
+ batch_size=1,
18
+ dataset=dict(
19
+ dataset_name='', gt_dir=None, rgb_image_dir='', type='ImageDataset'),
20
+ num_workers=2)
21
+ launcher = 'pytorch'
22
+ log_name = 'patchfusion'
23
+ max_depth = 80
24
+ min_depth = 0.001
25
+ model = dict(
26
+ coarse_branch=dict(
27
+ attractor_alpha=1000,
28
+ attractor_gamma=2,
29
+ attractor_kind='mean',
30
+ attractor_type='inv',
31
+ aug=True,
32
+ bin_centers_type='softplus',
33
+ bin_embedding_dim=128,
34
+ clip_grad=0.1,
35
+ dataset='nyu',
36
+ depth_anything=True,
37
+ distributed=True,
38
+ do_resize=False,
39
+ force_keep_ar=True,
40
+ freeze_midas_bn=True,
41
+ gpu='NULL',
42
+ img_size=[
43
+ 392,
44
+ 518,
45
+ ],
46
+ inverse_midas=False,
47
+ log_images_every=0.1,
48
+ max_depth=80,
49
+ max_temp=50.0,
50
+ max_translation=100,
51
+ memory_efficient=True,
52
+ midas_model_type='vits',
53
+ min_depth=0.001,
54
+ min_temp=0.0212,
55
+ model='zoedepth',
56
+ n_attractors=[
57
+ 16,
58
+ 8,
59
+ 4,
60
+ 1,
61
+ ],
62
+ n_bins=64,
63
+ name='ZoeDepth',
64
+ notes='',
65
+ output_distribution='logbinomial',
66
+ prefetch=False,
67
+ pretrained_resource='local::./work_dir/DepthAnything_vits.pt',
68
+ print_losses=False,
69
+ project='ZoeDepth',
70
+ random_crop=False,
71
+ random_translate=False,
72
+ root='.',
73
+ save_dir='',
74
+ shared_dict='NULL',
75
+ tags='',
76
+ train_midas=True,
77
+ translate_prob=0.2,
78
+ type='DA-ZoeDepth',
79
+ uid='NULL',
80
+ use_amp=False,
81
+ use_pretrained_midas=True,
82
+ use_shared_dict=False,
83
+ validate_every=0.25,
84
+ version_name='v1',
85
+ workers=16),
86
+ fine_branch=dict(
87
+ attractor_alpha=1000,
88
+ attractor_gamma=2,
89
+ attractor_kind='mean',
90
+ attractor_type='inv',
91
+ aug=True,
92
+ bin_centers_type='softplus',
93
+ bin_embedding_dim=128,
94
+ clip_grad=0.1,
95
+ dataset='nyu',
96
+ depth_anything=True,
97
+ distributed=True,
98
+ do_resize=False,
99
+ force_keep_ar=True,
100
+ freeze_midas_bn=True,
101
+ gpu='NULL',
102
+ img_size=[
103
+ 392,
104
+ 518,
105
+ ],
106
+ inverse_midas=False,
107
+ log_images_every=0.1,
108
+ max_depth=80,
109
+ max_temp=50.0,
110
+ max_translation=100,
111
+ memory_efficient=True,
112
+ midas_model_type='vits',
113
+ min_depth=0.001,
114
+ min_temp=0.0212,
115
+ model='zoedepth',
116
+ n_attractors=[
117
+ 16,
118
+ 8,
119
+ 4,
120
+ 1,
121
+ ],
122
+ n_bins=64,
123
+ name='ZoeDepth',
124
+ notes='',
125
+ output_distribution='logbinomial',
126
+ prefetch=False,
127
+ pretrained_resource='local::./work_dir/DepthAnything_vits.pt',
128
+ print_losses=False,
129
+ project='ZoeDepth',
130
+ random_crop=False,
131
+ random_translate=False,
132
+ root='.',
133
+ save_dir='',
134
+ shared_dict='NULL',
135
+ tags='',
136
+ train_midas=True,
137
+ translate_prob=0.2,
138
+ type='DA-ZoeDepth',
139
+ uid='NULL',
140
+ use_amp=False,
141
+ use_pretrained_midas=True,
142
+ use_shared_dict=False,
143
+ validate_every=0.25,
144
+ version_name='v1',
145
+ workers=16),
146
+ guided_fusion=dict(
147
+ g2l=True,
148
+ in_channels=[
149
+ 32,
150
+ 64,
151
+ 64,
152
+ 64,
153
+ 64,
154
+ 64,
155
+ ],
156
+ n_channels=5,
157
+ num_patches=[
158
+ 203056,
159
+ 66304,
160
+ 16576,
161
+ 4144,
162
+ 1036,
163
+ 266,
164
+ ],
165
+ patch_process_shape=(
166
+ 392,
167
+ 518,
168
+ ),
169
+ type='GuidedFusionPatchFusion'),
170
+ max_depth=80,
171
+ min_depth=0.001,
172
+ patch_process_shape=(
173
+ 392,
174
+ 518,
175
+ ),
176
+ pretrain_model=[
177
+ './work_dir/depthanything_vits_u4k/coarse_pretrain/checkpoint_24.pth',
178
+ './work_dir/depthanything_vits_u4k/fine_pretrain/checkpoint_24.pth',
179
+ ],
180
+ sigloss=dict(type='SILogLoss'),
181
+ type='PatchFusion')
182
+ optim_wrapper = dict(
183
+ clip_grad=dict(max_norm=0.1, norm_type=2, type='norm'),
184
+ optimizer=dict(lr=0.0001, type='AdamW', weight_decay=0.001),
185
+ paramwise_cfg=dict(bypass_duplicate=True, custom_keys=dict()))
186
+ param_scheduler = dict(
187
+ base_momentum=0.85,
188
+ cycle_momentum=True,
189
+ div_factor=10,
190
+ final_div_factor=10000,
191
+ max_momentum=0.95,
192
+ pct_start=0.25,
193
+ three_phase=False)
194
+ project = 'patchfusion'
195
+ resume = False
196
+ tags = [
197
+ 'patchfusion',
198
+ 'da',
199
+ 'vits',
200
+ ]
201
+ test_in_dataloader = dict(
202
+ batch_size=1,
203
+ dataset=dict(
204
+ data_root='./data/u4k',
205
+ max_depth=80,
206
+ min_depth=0.001,
207
+ mode='infer',
208
+ split='./data/u4k/splits/test.txt',
209
+ transform_cfg=dict(network_process_size=[
210
+ 384,
211
+ 512,
212
+ ]),
213
+ type='UnrealStereo4kDataset'),
214
+ num_workers=2)
215
+ test_out_dataloader = dict(
216
+ batch_size=1,
217
+ dataset=dict(
218
+ data_root='./data/u4k',
219
+ max_depth=80,
220
+ min_depth=0.001,
221
+ mode='infer',
222
+ split='./data/u4k/splits/test_out.txt',
223
+ transform_cfg=dict(network_process_size=[
224
+ 384,
225
+ 512,
226
+ ]),
227
+ type='UnrealStereo4kDataset'),
228
+ num_workers=2)
229
+ train_cfg = dict(
230
+ eval_start=0,
231
+ log_interval=100,
232
+ max_epochs=16,
233
+ save_checkpoint_interval=16,
234
+ train_log_img_interval=500,
235
+ val_interval=2,
236
+ val_log_img_interval=50,
237
+ val_type='epoch_base')
238
+ train_dataloader = dict(
239
+ batch_size=4,
240
+ dataset=dict(
241
+ data_root='./data/u4k',
242
+ max_depth=80,
243
+ min_depth=0.001,
244
+ mode='train',
245
+ resize_mode='depth-anything',
246
+ split='./data/u4k/splits/train.txt',
247
+ transform_cfg=dict(
248
+ degree=1.0,
249
+ network_process_size=[
250
+ 392,
251
+ 518,
252
+ ],
253
+ random_crop=True,
254
+ random_crop_size=(
255
+ 540,
256
+ 960,
257
+ )),
258
+ type='UnrealStereo4kDataset'),
259
+ num_workers=4)
260
+ val_dataloader = dict(
261
+ batch_size=1,
262
+ dataset=dict(
263
+ data_root='./data/u4k',
264
+ max_depth=80,
265
+ min_depth=0.001,
266
+ mode='infer',
267
+ resize_mode='depth-anything',
268
+ split='./data/u4k/splits/val.txt',
269
+ transform_cfg=dict(
270
+ degree=1.0,
271
+ network_process_size=[
272
+ 392,
273
+ 518,
274
+ ],
275
+ random_crop_size=(
276
+ 540,
277
+ 960,
278
+ )),
279
+ type='UnrealStereo4kDataset'),
280
+ num_workers=2)
281
+ work_dir = './work_dir/depthanything_vits_u4k/patchfusion'
282
+ zoe_depth_config = dict(
283
+ attractor_alpha=1000,
284
+ attractor_gamma=2,
285
+ attractor_kind='mean',
286
+ attractor_type='inv',
287
+ aug=True,
288
+ bin_centers_type='softplus',
289
+ bin_embedding_dim=128,
290
+ clip_grad=0.1,
291
+ dataset='nyu',
292
+ depth_anything=True,
293
+ distributed=True,
294
+ do_resize=False,
295
+ force_keep_ar=True,
296
+ freeze_midas_bn=True,
297
+ gpu='NULL',
298
+ img_size=[
299
+ 392,
300
+ 518,
301
+ ],
302
+ inverse_midas=False,
303
+ log_images_every=0.1,
304
+ max_depth=80,
305
+ max_temp=50.0,
306
+ max_translation=100,
307
+ memory_efficient=True,
308
+ midas_model_type='vits',
309
+ min_depth=0.001,
310
+ min_temp=0.0212,
311
+ model='zoedepth',
312
+ n_attractors=[
313
+ 16,
314
+ 8,
315
+ 4,
316
+ 1,
317
+ ],
318
+ n_bins=64,
319
+ name='ZoeDepth',
320
+ notes='',
321
+ output_distribution='logbinomial',
322
+ prefetch=False,
323
+ pretrained_resource='local::./work_dir/DepthAnything_vits.pt',
324
+ print_losses=False,
325
+ project='ZoeDepth',
326
+ random_crop=False,
327
+ random_translate=False,
328
+ root='.',
329
+ save_dir='',
330
+ shared_dict='NULL',
331
+ tags='',
332
+ train_midas=True,
333
+ translate_prob=0.2,
334
+ type='DA-ZoeDepth',
335
+ uid='NULL',
336
+ use_amp=False,
337
+ use_pretrained_midas=True,
338
+ use_shared_dict=False,
339
+ validate_every=0.25,
340
+ version_name='v1',
341
+ workers=16)
zoedepth_u4k/coarse_pretrain/20240313_154004.log ADDED
The diff for this file is too large to render. See raw diff
 
zoedepth_u4k/coarse_pretrain/checkpoint_24.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b241f5a664d9b8b27f138d677ca56a9b1629838c4529cced25d00043daa85950
3
+ size 4184807605
zoedepth_u4k/coarse_pretrain/config.py ADDED
@@ -0,0 +1,307 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ collect_input_args = [
2
+ 'image_lr',
3
+ 'crops_image_hr',
4
+ 'depth_gt',
5
+ 'crop_depths',
6
+ 'bboxs',
7
+ 'image_hr',
8
+ ]
9
+ convert_syncbn = True
10
+ debug = True
11
+ env_cfg = dict(
12
+ cudnn_benchmark=True,
13
+ dist_cfg=dict(backend='nccl'),
14
+ mp_cfg=dict(mp_start_method='forkserver'))
15
+ find_unused_parameters = True
16
+ general_dataloader = dict(
17
+ batch_size=1,
18
+ dataset=dict(
19
+ dataset_name='', gt_dir=None, rgb_image_dir='', type='ImageDataset'),
20
+ num_workers=2)
21
+ launcher = 'pytorch'
22
+ log_name = 'coarse_pretrain'
23
+ max_depth = 80
24
+ min_depth = 0.001
25
+ model = dict(
26
+ coarse_branch=dict(
27
+ attractor_alpha=1000,
28
+ attractor_gamma=2,
29
+ attractor_kind='mean',
30
+ attractor_type='inv',
31
+ aug=True,
32
+ bin_centers_type='softplus',
33
+ bin_embedding_dim=128,
34
+ clip_grad=0.1,
35
+ dataset='nyu',
36
+ distributed=True,
37
+ do_resize=False,
38
+ force_keep_ar=True,
39
+ freeze_midas_bn=True,
40
+ gpu='NULL',
41
+ img_size=[
42
+ 384,
43
+ 512,
44
+ ],
45
+ inverse_midas=False,
46
+ log_images_every=0.1,
47
+ max_depth=80,
48
+ max_temp=50.0,
49
+ max_translation=100,
50
+ memory_efficient=True,
51
+ midas_model_type='DPT_BEiT_L_384',
52
+ min_depth=0.001,
53
+ min_temp=0.0212,
54
+ model='zoedepth',
55
+ n_attractors=[
56
+ 16,
57
+ 8,
58
+ 4,
59
+ 1,
60
+ ],
61
+ n_bins=64,
62
+ name='ZoeDepth',
63
+ notes='',
64
+ output_distribution='logbinomial',
65
+ prefetch=False,
66
+ pretrained_resource=
67
+ 'local::./work_dir/ZoeDepthv1_30-Dec_16-29-4e2bc436e4e1_best.pt',
68
+ print_losses=False,
69
+ project='ZoeDepth',
70
+ random_crop=False,
71
+ random_translate=False,
72
+ root='.',
73
+ save_dir='',
74
+ shared_dict='NULL',
75
+ tags='',
76
+ train_midas=True,
77
+ translate_prob=0.2,
78
+ type='ZoeDepth',
79
+ uid='NULL',
80
+ use_amp=False,
81
+ use_pretrained_midas=True,
82
+ use_shared_dict=False,
83
+ validate_every=0.25,
84
+ version_name='v1',
85
+ workers=16),
86
+ fine_branch=dict(
87
+ attractor_alpha=1000,
88
+ attractor_gamma=2,
89
+ attractor_kind='mean',
90
+ attractor_type='inv',
91
+ aug=True,
92
+ bin_centers_type='softplus',
93
+ bin_embedding_dim=128,
94
+ clip_grad=0.1,
95
+ dataset='nyu',
96
+ distributed=True,
97
+ do_resize=False,
98
+ force_keep_ar=True,
99
+ freeze_midas_bn=True,
100
+ gpu='NULL',
101
+ img_size=[
102
+ 384,
103
+ 512,
104
+ ],
105
+ inverse_midas=False,
106
+ log_images_every=0.1,
107
+ max_depth=80,
108
+ max_temp=50.0,
109
+ max_translation=100,
110
+ memory_efficient=True,
111
+ midas_model_type='DPT_BEiT_L_384',
112
+ min_depth=0.001,
113
+ min_temp=0.0212,
114
+ model='zoedepth',
115
+ n_attractors=[
116
+ 16,
117
+ 8,
118
+ 4,
119
+ 1,
120
+ ],
121
+ n_bins=64,
122
+ name='ZoeDepth',
123
+ notes='',
124
+ output_distribution='logbinomial',
125
+ prefetch=False,
126
+ pretrained_resource=
127
+ 'local::./work_dir/ZoeDepthv1_30-Dec_16-29-4e2bc436e4e1_best.pt',
128
+ print_losses=False,
129
+ project='ZoeDepth',
130
+ random_crop=False,
131
+ random_translate=False,
132
+ root='.',
133
+ save_dir='',
134
+ shared_dict='NULL',
135
+ tags='',
136
+ train_midas=True,
137
+ translate_prob=0.2,
138
+ type='ZoeDepth',
139
+ uid='NULL',
140
+ use_amp=False,
141
+ use_pretrained_midas=True,
142
+ use_shared_dict=False,
143
+ validate_every=0.25,
144
+ version_name='v1',
145
+ workers=16),
146
+ max_depth=80,
147
+ min_depth=0.001,
148
+ sigloss=dict(type='SILogLoss'),
149
+ target='coarse',
150
+ type='BaselinePretrain')
151
+ optim_wrapper = dict(
152
+ clip_grad=dict(max_norm=0.1, norm_type=2, type='norm'),
153
+ optimizer=dict(lr=0.0002, type='AdamW', weight_decay=0.01),
154
+ paramwise_cfg=dict(
155
+ bypass_duplicate=True,
156
+ custom_keys=dict(
157
+ {'coarse_branch.core': dict(decay_mult=1.0, lr_mult=0.1)})))
158
+ param_scheduler = dict(
159
+ base_momentum=0.85,
160
+ cycle_momentum=True,
161
+ div_factor=1,
162
+ final_div_factor=10000,
163
+ max_momentum=0.95,
164
+ pct_start=0.5,
165
+ three_phase=False)
166
+ project = 'patchfusion'
167
+ resume = False
168
+ tags = [
169
+ 'pcoarse',
170
+ ]
171
+ test_in_dataloader = dict(
172
+ batch_size=1,
173
+ dataset=dict(
174
+ data_root='./data/u4k',
175
+ max_depth=80,
176
+ min_depth=0.001,
177
+ mode='infer',
178
+ split='./data/u4k/splits/test.txt',
179
+ transform_cfg=dict(network_process_size=[
180
+ 384,
181
+ 512,
182
+ ]),
183
+ type='UnrealStereo4kDataset'),
184
+ num_workers=2)
185
+ test_out_dataloader = dict(
186
+ batch_size=1,
187
+ dataset=dict(
188
+ data_root='./data/u4k',
189
+ max_depth=80,
190
+ min_depth=0.001,
191
+ mode='infer',
192
+ split='./data/u4k/splits/test_out.txt',
193
+ transform_cfg=dict(network_process_size=[
194
+ 384,
195
+ 512,
196
+ ]),
197
+ type='UnrealStereo4kDataset'),
198
+ num_workers=2)
199
+ train_cfg = dict(
200
+ eval_start=0,
201
+ log_interval=100,
202
+ max_epochs=24,
203
+ save_checkpoint_interval=24,
204
+ train_log_img_interval=100,
205
+ val_interval=2,
206
+ val_log_img_interval=50,
207
+ val_type='epoch_base')
208
+ train_dataloader = dict(
209
+ batch_size=4,
210
+ dataset=dict(
211
+ data_root='./data/u4k',
212
+ max_depth=80,
213
+ min_depth=0.001,
214
+ mode='train',
215
+ split='./data/u4k/splits/train.txt',
216
+ transform_cfg=dict(
217
+ degree=1.0,
218
+ network_process_size=[
219
+ 384,
220
+ 512,
221
+ ],
222
+ random_crop=True,
223
+ random_crop_size=(
224
+ 540,
225
+ 960,
226
+ )),
227
+ type='UnrealStereo4kDataset'),
228
+ num_workers=4)
229
+ val_dataloader = dict(
230
+ batch_size=1,
231
+ dataset=dict(
232
+ data_root='./data/u4k',
233
+ max_depth=80,
234
+ min_depth=0.001,
235
+ mode='infer',
236
+ split='./data/u4k/splits/val.txt',
237
+ transform_cfg=dict(
238
+ network_process_size=[
239
+ 384,
240
+ 512,
241
+ ], random_crop_size=(
242
+ 540,
243
+ 960,
244
+ )),
245
+ type='UnrealStereo4kDataset'),
246
+ num_workers=2)
247
+ work_dir = './work_dir/coarse_pretrain'
248
+ zoe_depth_config = dict(
249
+ attractor_alpha=1000,
250
+ attractor_gamma=2,
251
+ attractor_kind='mean',
252
+ attractor_type='inv',
253
+ aug=True,
254
+ bin_centers_type='softplus',
255
+ bin_embedding_dim=128,
256
+ clip_grad=0.1,
257
+ dataset='nyu',
258
+ distributed=True,
259
+ do_resize=False,
260
+ force_keep_ar=True,
261
+ freeze_midas_bn=True,
262
+ gpu='NULL',
263
+ img_size=[
264
+ 384,
265
+ 512,
266
+ ],
267
+ inverse_midas=False,
268
+ log_images_every=0.1,
269
+ max_depth=80,
270
+ max_temp=50.0,
271
+ max_translation=100,
272
+ memory_efficient=True,
273
+ midas_model_type='DPT_BEiT_L_384',
274
+ min_depth=0.001,
275
+ min_temp=0.0212,
276
+ model='zoedepth',
277
+ n_attractors=[
278
+ 16,
279
+ 8,
280
+ 4,
281
+ 1,
282
+ ],
283
+ n_bins=64,
284
+ name='ZoeDepth',
285
+ notes='',
286
+ output_distribution='logbinomial',
287
+ prefetch=False,
288
+ pretrained_resource=
289
+ 'local::./work_dir/ZoeDepthv1_30-Dec_16-29-4e2bc436e4e1_best.pt',
290
+ print_losses=False,
291
+ project='ZoeDepth',
292
+ random_crop=False,
293
+ random_translate=False,
294
+ root='.',
295
+ save_dir='',
296
+ shared_dict='NULL',
297
+ tags='',
298
+ train_midas=True,
299
+ translate_prob=0.2,
300
+ type='ZoeDepth',
301
+ uid='NULL',
302
+ use_amp=False,
303
+ use_pretrained_midas=True,
304
+ use_shared_dict=False,
305
+ validate_every=0.25,
306
+ version_name='v1',
307
+ workers=16)
zoedepth_u4k/fine_pretrain/20240313_205222.log ADDED
The diff for this file is too large to render. See raw diff
 
zoedepth_u4k/fine_pretrain/checkpoint_24.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8915b17cf436ed3e1e4cfe9c3eecdfb806aa5dc0f66924de6e785f4a16c431e2
3
+ size 4184807669
zoedepth_u4k/fine_pretrain/config.py ADDED
@@ -0,0 +1,307 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ collect_input_args = [
2
+ 'image_lr',
3
+ 'crops_image_hr',
4
+ 'depth_gt',
5
+ 'crop_depths',
6
+ 'bboxs',
7
+ 'image_hr',
8
+ ]
9
+ convert_syncbn = True
10
+ debug = True
11
+ env_cfg = dict(
12
+ cudnn_benchmark=True,
13
+ dist_cfg=dict(backend='nccl'),
14
+ mp_cfg=dict(mp_start_method='forkserver'))
15
+ find_unused_parameters = True
16
+ general_dataloader = dict(
17
+ batch_size=1,
18
+ dataset=dict(
19
+ dataset_name='', gt_dir=None, rgb_image_dir='', type='ImageDataset'),
20
+ num_workers=2)
21
+ launcher = 'pytorch'
22
+ log_name = 'fine_pretrain'
23
+ max_depth = 80
24
+ min_depth = 0.001
25
+ model = dict(
26
+ coarse_branch=dict(
27
+ attractor_alpha=1000,
28
+ attractor_gamma=2,
29
+ attractor_kind='mean',
30
+ attractor_type='inv',
31
+ aug=True,
32
+ bin_centers_type='softplus',
33
+ bin_embedding_dim=128,
34
+ clip_grad=0.1,
35
+ dataset='nyu',
36
+ distributed=True,
37
+ do_resize=False,
38
+ force_keep_ar=True,
39
+ freeze_midas_bn=True,
40
+ gpu='NULL',
41
+ img_size=[
42
+ 384,
43
+ 512,
44
+ ],
45
+ inverse_midas=False,
46
+ log_images_every=0.1,
47
+ max_depth=80,
48
+ max_temp=50.0,
49
+ max_translation=100,
50
+ memory_efficient=True,
51
+ midas_model_type='DPT_BEiT_L_384',
52
+ min_depth=0.001,
53
+ min_temp=0.0212,
54
+ model='zoedepth',
55
+ n_attractors=[
56
+ 16,
57
+ 8,
58
+ 4,
59
+ 1,
60
+ ],
61
+ n_bins=64,
62
+ name='ZoeDepth',
63
+ notes='',
64
+ output_distribution='logbinomial',
65
+ prefetch=False,
66
+ pretrained_resource=
67
+ 'local::./work_dir/ZoeDepthv1_30-Dec_16-29-4e2bc436e4e1_best.pt',
68
+ print_losses=False,
69
+ project='ZoeDepth',
70
+ random_crop=False,
71
+ random_translate=False,
72
+ root='.',
73
+ save_dir='',
74
+ shared_dict='NULL',
75
+ tags='',
76
+ train_midas=True,
77
+ translate_prob=0.2,
78
+ type='ZoeDepth',
79
+ uid='NULL',
80
+ use_amp=False,
81
+ use_pretrained_midas=True,
82
+ use_shared_dict=False,
83
+ validate_every=0.25,
84
+ version_name='v1',
85
+ workers=16),
86
+ fine_branch=dict(
87
+ attractor_alpha=1000,
88
+ attractor_gamma=2,
89
+ attractor_kind='mean',
90
+ attractor_type='inv',
91
+ aug=True,
92
+ bin_centers_type='softplus',
93
+ bin_embedding_dim=128,
94
+ clip_grad=0.1,
95
+ dataset='nyu',
96
+ distributed=True,
97
+ do_resize=False,
98
+ force_keep_ar=True,
99
+ freeze_midas_bn=True,
100
+ gpu='NULL',
101
+ img_size=[
102
+ 384,
103
+ 512,
104
+ ],
105
+ inverse_midas=False,
106
+ log_images_every=0.1,
107
+ max_depth=80,
108
+ max_temp=50.0,
109
+ max_translation=100,
110
+ memory_efficient=True,
111
+ midas_model_type='DPT_BEiT_L_384',
112
+ min_depth=0.001,
113
+ min_temp=0.0212,
114
+ model='zoedepth',
115
+ n_attractors=[
116
+ 16,
117
+ 8,
118
+ 4,
119
+ 1,
120
+ ],
121
+ n_bins=64,
122
+ name='ZoeDepth',
123
+ notes='',
124
+ output_distribution='logbinomial',
125
+ prefetch=False,
126
+ pretrained_resource=
127
+ 'local::./work_dir/ZoeDepthv1_30-Dec_16-29-4e2bc436e4e1_best.pt',
128
+ print_losses=False,
129
+ project='ZoeDepth',
130
+ random_crop=False,
131
+ random_translate=False,
132
+ root='.',
133
+ save_dir='',
134
+ shared_dict='NULL',
135
+ tags='',
136
+ train_midas=True,
137
+ translate_prob=0.2,
138
+ type='ZoeDepth',
139
+ uid='NULL',
140
+ use_amp=False,
141
+ use_pretrained_midas=True,
142
+ use_shared_dict=False,
143
+ validate_every=0.25,
144
+ version_name='v1',
145
+ workers=16),
146
+ max_depth=80,
147
+ min_depth=0.001,
148
+ sigloss=dict(type='SILogLoss'),
149
+ target='fine',
150
+ type='BaselinePretrain')
151
+ optim_wrapper = dict(
152
+ clip_grad=dict(max_norm=0.1, norm_type=2, type='norm'),
153
+ optimizer=dict(lr=0.0002, type='AdamW', weight_decay=0.01),
154
+ paramwise_cfg=dict(
155
+ bypass_duplicate=True,
156
+ custom_keys=dict(
157
+ {'fine_branch.core': dict(decay_mult=1.0, lr_mult=0.1)})))
158
+ param_scheduler = dict(
159
+ base_momentum=0.85,
160
+ cycle_momentum=True,
161
+ div_factor=1,
162
+ final_div_factor=10000,
163
+ max_momentum=0.95,
164
+ pct_start=0.5,
165
+ three_phase=False)
166
+ project = 'patchfusion'
167
+ resume = False
168
+ tags = [
169
+ 'fine',
170
+ ]
171
+ test_in_dataloader = dict(
172
+ batch_size=1,
173
+ dataset=dict(
174
+ data_root='./data/u4k',
175
+ max_depth=80,
176
+ min_depth=0.001,
177
+ mode='infer',
178
+ split='./data/u4k/splits/test.txt',
179
+ transform_cfg=dict(network_process_size=[
180
+ 384,
181
+ 512,
182
+ ]),
183
+ type='UnrealStereo4kDataset'),
184
+ num_workers=2)
185
+ test_out_dataloader = dict(
186
+ batch_size=1,
187
+ dataset=dict(
188
+ data_root='./data/u4k',
189
+ max_depth=80,
190
+ min_depth=0.001,
191
+ mode='infer',
192
+ split='./data/u4k/splits/test_out.txt',
193
+ transform_cfg=dict(network_process_size=[
194
+ 384,
195
+ 512,
196
+ ]),
197
+ type='UnrealStereo4kDataset'),
198
+ num_workers=2)
199
+ train_cfg = dict(
200
+ eval_start=0,
201
+ log_interval=100,
202
+ max_epochs=24,
203
+ save_checkpoint_interval=24,
204
+ train_log_img_interval=100,
205
+ val_interval=2,
206
+ val_log_img_interval=50,
207
+ val_type='epoch_base')
208
+ train_dataloader = dict(
209
+ batch_size=4,
210
+ dataset=dict(
211
+ data_root='./data/u4k',
212
+ max_depth=80,
213
+ min_depth=0.001,
214
+ mode='train',
215
+ split='./data/u4k/splits/train.txt',
216
+ transform_cfg=dict(
217
+ degree=1.0,
218
+ network_process_size=[
219
+ 384,
220
+ 512,
221
+ ],
222
+ random_crop=True,
223
+ random_crop_size=(
224
+ 540,
225
+ 960,
226
+ )),
227
+ type='UnrealStereo4kDataset'),
228
+ num_workers=4)
229
+ val_dataloader = dict(
230
+ batch_size=1,
231
+ dataset=dict(
232
+ data_root='./data/u4k',
233
+ max_depth=80,
234
+ min_depth=0.001,
235
+ mode='infer',
236
+ split='./data/u4k/splits/val.txt',
237
+ transform_cfg=dict(
238
+ network_process_size=[
239
+ 384,
240
+ 512,
241
+ ], random_crop_size=(
242
+ 540,
243
+ 960,
244
+ )),
245
+ type='UnrealStereo4kDataset'),
246
+ num_workers=2)
247
+ work_dir = './work_dir/fine_pretrain'
248
+ zoe_depth_config = dict(
249
+ attractor_alpha=1000,
250
+ attractor_gamma=2,
251
+ attractor_kind='mean',
252
+ attractor_type='inv',
253
+ aug=True,
254
+ bin_centers_type='softplus',
255
+ bin_embedding_dim=128,
256
+ clip_grad=0.1,
257
+ dataset='nyu',
258
+ distributed=True,
259
+ do_resize=False,
260
+ force_keep_ar=True,
261
+ freeze_midas_bn=True,
262
+ gpu='NULL',
263
+ img_size=[
264
+ 384,
265
+ 512,
266
+ ],
267
+ inverse_midas=False,
268
+ log_images_every=0.1,
269
+ max_depth=80,
270
+ max_temp=50.0,
271
+ max_translation=100,
272
+ memory_efficient=True,
273
+ midas_model_type='DPT_BEiT_L_384',
274
+ min_depth=0.001,
275
+ min_temp=0.0212,
276
+ model='zoedepth',
277
+ n_attractors=[
278
+ 16,
279
+ 8,
280
+ 4,
281
+ 1,
282
+ ],
283
+ n_bins=64,
284
+ name='ZoeDepth',
285
+ notes='',
286
+ output_distribution='logbinomial',
287
+ prefetch=False,
288
+ pretrained_resource=
289
+ 'local::./work_dir/ZoeDepthv1_30-Dec_16-29-4e2bc436e4e1_best.pt',
290
+ print_losses=False,
291
+ project='ZoeDepth',
292
+ random_crop=False,
293
+ random_translate=False,
294
+ root='.',
295
+ save_dir='',
296
+ shared_dict='NULL',
297
+ tags='',
298
+ train_midas=True,
299
+ translate_prob=0.2,
300
+ type='ZoeDepth',
301
+ uid='NULL',
302
+ use_amp=False,
303
+ use_pretrained_midas=True,
304
+ use_shared_dict=False,
305
+ validate_every=0.25,
306
+ version_name='v1',
307
+ workers=16)
zoedepth_u4k/patchfusion/20240314_171340.log ADDED
The diff for this file is too large to render. See raw diff
 
zoedepth_u4k/patchfusion/checkpoint_16.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e414483120fa6b95ced56e55b3cb5e711b076f9cc1f62a5c54d1edecc5c90fab
3
+ size 1055616493
zoedepth_u4k/patchfusion/config.py ADDED
@@ -0,0 +1,305 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ collect_input_args = [
2
+ 'image_lr',
3
+ 'crops_image_hr',
4
+ 'depth_gt',
5
+ 'crop_depths',
6
+ 'bboxs',
7
+ 'image_hr',
8
+ ]
9
+ convert_syncbn = True
10
+ debug = False
11
+ env_cfg = dict(
12
+ cudnn_benchmark=True,
13
+ dist_cfg=dict(backend='nccl'),
14
+ mp_cfg=dict(mp_start_method='forkserver'))
15
+ find_unused_parameters = True
16
+ general_dataloader = dict(
17
+ batch_size=1,
18
+ dataset=dict(
19
+ dataset_name='', gt_dir=None, rgb_image_dir='', type='ImageDataset'),
20
+ num_workers=2)
21
+ launcher = 'pytorch'
22
+ log_name = 'patchfusion'
23
+ max_depth = 80
24
+ min_depth = 0.001
25
+ model = dict(
26
+ coarse_branch=dict(
27
+ attractor_alpha=1000,
28
+ attractor_gamma=2,
29
+ attractor_kind='mean',
30
+ attractor_type='inv',
31
+ aug=True,
32
+ bin_centers_type='softplus',
33
+ bin_embedding_dim=128,
34
+ clip_grad=0.1,
35
+ dataset='nyu',
36
+ distributed=True,
37
+ do_resize=False,
38
+ force_keep_ar=True,
39
+ freeze_midas_bn=True,
40
+ gpu='NULL',
41
+ img_size=[
42
+ 384,
43
+ 512,
44
+ ],
45
+ inverse_midas=False,
46
+ log_images_every=0.1,
47
+ max_depth=80,
48
+ max_temp=50.0,
49
+ max_translation=100,
50
+ memory_efficient=True,
51
+ midas_model_type='DPT_BEiT_L_384',
52
+ min_depth=0.001,
53
+ min_temp=0.0212,
54
+ model='zoedepth',
55
+ n_attractors=[
56
+ 16,
57
+ 8,
58
+ 4,
59
+ 1,
60
+ ],
61
+ n_bins=64,
62
+ name='ZoeDepth',
63
+ notes='',
64
+ output_distribution='logbinomial',
65
+ prefetch=False,
66
+ pretrained_resource='local::./work_dir/ZoeDepthv1.pt',
67
+ print_losses=False,
68
+ project='ZoeDepth',
69
+ random_crop=False,
70
+ random_translate=False,
71
+ root='.',
72
+ save_dir='',
73
+ shared_dict='NULL',
74
+ tags='',
75
+ train_midas=True,
76
+ translate_prob=0.2,
77
+ type='ZoeDepth',
78
+ uid='NULL',
79
+ use_amp=False,
80
+ use_pretrained_midas=True,
81
+ use_shared_dict=False,
82
+ validate_every=0.25,
83
+ version_name='v1',
84
+ workers=16),
85
+ fine_branch=dict(
86
+ attractor_alpha=1000,
87
+ attractor_gamma=2,
88
+ attractor_kind='mean',
89
+ attractor_type='inv',
90
+ aug=True,
91
+ bin_centers_type='softplus',
92
+ bin_embedding_dim=128,
93
+ clip_grad=0.1,
94
+ dataset='nyu',
95
+ distributed=True,
96
+ do_resize=False,
97
+ force_keep_ar=True,
98
+ freeze_midas_bn=True,
99
+ gpu='NULL',
100
+ img_size=[
101
+ 384,
102
+ 512,
103
+ ],
104
+ inverse_midas=False,
105
+ log_images_every=0.1,
106
+ max_depth=80,
107
+ max_temp=50.0,
108
+ max_translation=100,
109
+ memory_efficient=True,
110
+ midas_model_type='DPT_BEiT_L_384',
111
+ min_depth=0.001,
112
+ min_temp=0.0212,
113
+ model='zoedepth',
114
+ n_attractors=[
115
+ 16,
116
+ 8,
117
+ 4,
118
+ 1,
119
+ ],
120
+ n_bins=64,
121
+ name='ZoeDepth',
122
+ notes='',
123
+ output_distribution='logbinomial',
124
+ prefetch=False,
125
+ pretrained_resource='local::./work_dir/ZoeDepthv1.pt',
126
+ print_losses=False,
127
+ project='ZoeDepth',
128
+ random_crop=False,
129
+ random_translate=False,
130
+ root='.',
131
+ save_dir='',
132
+ shared_dict='NULL',
133
+ tags='',
134
+ train_midas=True,
135
+ translate_prob=0.2,
136
+ type='ZoeDepth',
137
+ uid='NULL',
138
+ use_amp=False,
139
+ use_pretrained_midas=True,
140
+ use_shared_dict=False,
141
+ validate_every=0.25,
142
+ version_name='v1',
143
+ workers=16),
144
+ guided_fusion=dict(g2l=True, n_channels=5, type='GuidedFusionPatchFusion'),
145
+ max_depth=80,
146
+ min_depth=0.001,
147
+ pretrain_model=[
148
+ './work_dir/coarse_pretrain/checkpoint_24.pth',
149
+ './work_dir/fine_pretrain/checkpoint_24.pth',
150
+ ],
151
+ sigloss=dict(type='SILogLoss'),
152
+ type='PatchFusion')
153
+ optim_wrapper = dict(
154
+ clip_grad=dict(max_norm=0.1, norm_type=2, type='norm'),
155
+ optimizer=dict(lr=0.0001, type='AdamW', weight_decay=0.001),
156
+ paramwise_cfg=dict(bypass_duplicate=True, custom_keys=dict()))
157
+ param_scheduler = dict(
158
+ base_momentum=0.85,
159
+ cycle_momentum=True,
160
+ div_factor=10,
161
+ final_div_factor=10000,
162
+ max_momentum=0.95,
163
+ pct_start=0.25,
164
+ three_phase=False)
165
+ project = 'patchfusion'
166
+ resume = False
167
+ tags = [
168
+ 'patchfusion',
169
+ ]
170
+ test_in_dataloader = dict(
171
+ batch_size=1,
172
+ dataset=dict(
173
+ data_root='./data/u4k',
174
+ max_depth=80,
175
+ min_depth=0.001,
176
+ mode='infer',
177
+ split='./data/u4k/splits/test.txt',
178
+ transform_cfg=dict(network_process_size=[
179
+ 384,
180
+ 512,
181
+ ]),
182
+ type='UnrealStereo4kDataset'),
183
+ num_workers=2)
184
+ test_out_dataloader = dict(
185
+ batch_size=1,
186
+ dataset=dict(
187
+ data_root='./data/u4k',
188
+ max_depth=80,
189
+ min_depth=0.001,
190
+ mode='infer',
191
+ split='./data/u4k/splits/test_out.txt',
192
+ transform_cfg=dict(network_process_size=[
193
+ 384,
194
+ 512,
195
+ ]),
196
+ type='UnrealStereo4kDataset'),
197
+ num_workers=2)
198
+ train_cfg = dict(
199
+ eval_start=0,
200
+ log_interval=100,
201
+ max_epochs=16,
202
+ save_checkpoint_interval=16,
203
+ train_log_img_interval=500,
204
+ val_interval=2,
205
+ val_log_img_interval=10,
206
+ val_type='epoch_base')
207
+ train_dataloader = dict(
208
+ batch_size=4,
209
+ dataset=dict(
210
+ data_root='./data/u4k',
211
+ max_depth=80,
212
+ min_depth=0.001,
213
+ mode='train',
214
+ split='./data/u4k/splits/train.txt',
215
+ transform_cfg=dict(
216
+ degree=1.0,
217
+ network_process_size=[
218
+ 384,
219
+ 512,
220
+ ],
221
+ random_crop=True,
222
+ random_crop_size=(
223
+ 540,
224
+ 960,
225
+ )),
226
+ type='UnrealStereo4kDataset'),
227
+ num_workers=4)
228
+ val_dataloader = dict(
229
+ batch_size=1,
230
+ dataset=dict(
231
+ data_root='./data/u4k',
232
+ max_depth=80,
233
+ min_depth=0.001,
234
+ mode='infer',
235
+ split='./data/u4k/splits/val.txt',
236
+ transform_cfg=dict(
237
+ network_process_size=[
238
+ 384,
239
+ 512,
240
+ ], random_crop_size=(
241
+ 540,
242
+ 960,
243
+ )),
244
+ type='UnrealStereo4kDataset'),
245
+ num_workers=2)
246
+ work_dir = './work_dir/patchfusion'
247
+ zoe_depth_config = dict(
248
+ attractor_alpha=1000,
249
+ attractor_gamma=2,
250
+ attractor_kind='mean',
251
+ attractor_type='inv',
252
+ aug=True,
253
+ bin_centers_type='softplus',
254
+ bin_embedding_dim=128,
255
+ clip_grad=0.1,
256
+ dataset='nyu',
257
+ distributed=True,
258
+ do_resize=False,
259
+ force_keep_ar=True,
260
+ freeze_midas_bn=True,
261
+ gpu='NULL',
262
+ img_size=[
263
+ 384,
264
+ 512,
265
+ ],
266
+ inverse_midas=False,
267
+ log_images_every=0.1,
268
+ max_depth=80,
269
+ max_temp=50.0,
270
+ max_translation=100,
271
+ memory_efficient=True,
272
+ midas_model_type='DPT_BEiT_L_384',
273
+ min_depth=0.001,
274
+ min_temp=0.0212,
275
+ model='zoedepth',
276
+ n_attractors=[
277
+ 16,
278
+ 8,
279
+ 4,
280
+ 1,
281
+ ],
282
+ n_bins=64,
283
+ name='ZoeDepth',
284
+ notes='',
285
+ output_distribution='logbinomial',
286
+ prefetch=False,
287
+ pretrained_resource='local::./work_dir/ZoeDepthv1.pt',
288
+ print_losses=False,
289
+ project='ZoeDepth',
290
+ random_crop=False,
291
+ random_translate=False,
292
+ root='.',
293
+ save_dir='',
294
+ shared_dict='NULL',
295
+ tags='',
296
+ train_midas=True,
297
+ translate_prob=0.2,
298
+ type='ZoeDepth',
299
+ uid='NULL',
300
+ use_amp=False,
301
+ use_pretrained_midas=True,
302
+ use_shared_dict=False,
303
+ validate_every=0.25,
304
+ version_name='v1',
305
+ workers=16)