diff --git "a/depthanything_vitl_u4k/patchfusion/20240315_175237.log" "b/depthanything_vitl_u4k/patchfusion/20240315_175237.log" new file mode 100644--- /dev/null +++ "b/depthanything_vitl_u4k/patchfusion/20240315_175237.log" @@ -0,0 +1,1604 @@ +2024/03/15 17:52:47 - patchstitcher - INFO - +------------------------------------------------------------ +System environment: + sys.platform: linux + Python: 3.8.18 | packaged by conda-forge | (default, Oct 10 2023, 15:44:36) [GCC 12.3.0] + CUDA available: True + numpy_random_seed: 621 + GPU 0,1,2,3: NVIDIA A100-SXM4-80GB + CUDA_HOME: /sw/rl9g/cuda/11.8/rl9_binary + NVCC: Cuda compilation tools, release 11.8, V11.8.89 + GCC: gcc (GCC) 11.3.1 20220421 (Red Hat 11.3.1-2) + PyTorch: 2.1.2 + PyTorch compiling details: PyTorch built with: + - GCC 9.3 + - C++ Version: 201703 + - Intel(R) oneAPI Math Kernel Library Version 2022.1-Product Build 20220311 for Intel(R) 64 architecture applications + - Intel(R) MKL-DNN v3.1.1 (Git Hash 64f6bcbcbab628e96f33a62c3e975f8535a7bde4) + - OpenMP 201511 (a.k.a. OpenMP 4.5) + - LAPACK is enabled (usually provided by MKL) + - NNPACK is enabled + - CPU capability usage: AVX2 + - CUDA Runtime 11.8 + - NVCC architecture flags: -gencode;arch=compute_50,code=sm_50;-gencode;arch=compute_60,code=sm_60;-gencode;arch=compute_61,code=sm_61;-gencode;arch=compute_70,code=sm_70;-gencode;arch=compute_75,code=sm_75;-gencode;arch=compute_80,code=sm_80;-gencode;arch=compute_86,code=sm_86;-gencode;arch=compute_37,code=sm_37;-gencode;arch=compute_90,code=sm_90;-gencode;arch=compute_37,code=compute_37 + - CuDNN 8.7 + - Magma 2.6.1 + - Build settings: BLAS_INFO=mkl, BUILD_TYPE=Release, CUDA_VERSION=11.8, CUDNN_VERSION=8.7.0, CXX_COMPILER=/opt/rh/devtoolset-9/root/usr/bin/c++, CXX_FLAGS= -D_GLIBCXX_USE_CXX11_ABI=0 -fabi-version=11 -fvisibility-inlines-hidden -DUSE_PTHREADPOOL -DNDEBUG -DUSE_KINETO -DLIBKINETO_NOROCTRACER -DUSE_FBGEMM -DUSE_QNNPACK -DUSE_PYTORCH_QNNPACK -DUSE_XNNPACK -DSYMBOLICATE_MOBILE_DEBUG_HANDLE -O2 -fPIC -Wall -Wextra -Werror=return-type -Werror=non-virtual-dtor -Werror=bool-operation -Wnarrowing -Wno-missing-field-initializers -Wno-type-limits -Wno-array-bounds -Wno-unknown-pragmas -Wno-unused-parameter -Wno-unused-function -Wno-unused-result -Wno-strict-overflow -Wno-strict-aliasing -Wno-stringop-overflow -Wno-psabi -Wno-error=pedantic -Wno-error=old-style-cast -Wno-invalid-partial-specialization -Wno-unused-private-field -Wno-aligned-allocation-unavailable -Wno-missing-braces -fdiagnostics-color=always -faligned-new -Wno-unused-but-set-variable -Wno-maybe-uninitialized -fno-math-errno -fno-trapping-math -Werror=format -Werror=cast-function-type -Wno-stringop-overflow, LAPACK_INFO=mkl, PERF_WITH_AVX=1, PERF_WITH_AVX2=1, PERF_WITH_AVX512=1, TORCH_DISABLE_GPU_ASSERTS=ON, TORCH_VERSION=2.1.2, USE_CUDA=ON, USE_CUDNN=ON, USE_EXCEPTION_PTR=1, USE_GFLAGS=OFF, USE_GLOG=OFF, USE_MKL=ON, USE_MKLDNN=ON, USE_MPI=OFF, USE_NCCL=ON, USE_NNPACK=ON, USE_OPENMP=ON, USE_ROCM=OFF, + + TorchVision: 0.16.2 + OpenCV: 4.8.1 + MMEngine: 0.10.2 + +Runtime environment: + cudnn_benchmark: True + mp_cfg: {'mp_start_method': 'forkserver'} + dist_cfg: {'backend': 'nccl'} + seed: 621 + Distributed launcher: pytorch + Distributed training: True + GPU number: 4 +------------------------------------------------------------ + +2024/03/15 17:52:48 - patchstitcher - INFO - Config: +collect_input_args = [ + 'image_lr', + 'crops_image_hr', + 'depth_gt', + 'crop_depths', + 'bboxs', + 'image_hr', +] +convert_syncbn = True +debug = False +env_cfg = dict( + cudnn_benchmark=True, + dist_cfg=dict(backend='nccl'), + mp_cfg=dict(mp_start_method='forkserver')) +find_unused_parameters = True +general_dataloader = dict( + batch_size=1, + dataset=dict( + dataset_name='', gt_dir=None, rgb_image_dir='', type='ImageDataset'), + num_workers=2) +launcher = 'pytorch' +log_name = 'patchfusion' +max_depth = 80 +min_depth = 0.001 +model = dict( + coarse_branch=dict( + attractor_alpha=1000, + attractor_gamma=2, + attractor_kind='mean', + attractor_type='inv', + aug=True, + bin_centers_type='softplus', + bin_embedding_dim=128, + clip_grad=0.1, + dataset='nyu', + depth_anything=True, + distributed=True, + do_resize=False, + force_keep_ar=True, + freeze_midas_bn=True, + gpu='NULL', + img_size=[ + 392, + 518, + ], + inverse_midas=False, + log_images_every=0.1, + max_depth=80, + max_temp=50.0, + max_translation=100, + memory_efficient=True, + midas_model_type='vitl', + min_depth=0.001, + min_temp=0.0212, + model='zoedepth', + n_attractors=[ + 16, + 8, + 4, + 1, + ], + n_bins=64, + name='ZoeDepth', + notes='', + output_distribution='logbinomial', + prefetch=False, + pretrained_resource='local::./work_dir/DepthAnything_vitl.pt', + print_losses=False, + project='ZoeDepth', + random_crop=False, + random_translate=False, + root='.', + save_dir='', + shared_dict='NULL', + tags='', + train_midas=True, + translate_prob=0.2, + type='DA-ZoeDepth', + uid='NULL', + use_amp=False, + use_pretrained_midas=True, + use_shared_dict=False, + validate_every=0.25, + version_name='v1', + workers=16), + fine_branch=dict( + attractor_alpha=1000, + attractor_gamma=2, + attractor_kind='mean', + attractor_type='inv', + aug=True, + bin_centers_type='softplus', + bin_embedding_dim=128, + clip_grad=0.1, + dataset='nyu', + depth_anything=True, + distributed=True, + do_resize=False, + force_keep_ar=True, + freeze_midas_bn=True, + gpu='NULL', + img_size=[ + 392, + 518, + ], + inverse_midas=False, + log_images_every=0.1, + max_depth=80, + max_temp=50.0, + max_translation=100, + memory_efficient=True, + midas_model_type='vitl', + min_depth=0.001, + min_temp=0.0212, + model='zoedepth', + n_attractors=[ + 16, + 8, + 4, + 1, + ], + n_bins=64, + name='ZoeDepth', + notes='', + output_distribution='logbinomial', + prefetch=False, + pretrained_resource='local::./work_dir/DepthAnything_vitl.pt', + print_losses=False, + project='ZoeDepth', + random_crop=False, + random_translate=False, + root='.', + save_dir='', + shared_dict='NULL', + tags='', + train_midas=True, + translate_prob=0.2, + type='DA-ZoeDepth', + uid='NULL', + use_amp=False, + use_pretrained_midas=True, + use_shared_dict=False, + validate_every=0.25, + version_name='v1', + workers=16), + guided_fusion=dict( + g2l=True, + in_channels=[ + 32, + 256, + 256, + 256, + 256, + 256, + ], + n_channels=5, + num_patches=[ + 203056, + 66304, + 16576, + 4144, + 1036, + 266, + ], + patch_process_shape=( + 392, + 518, + ), + type='GuidedFusionPatchFusion'), + max_depth=80, + min_depth=0.001, + patch_process_shape=( + 392, + 518, + ), + pretrain_model=[ + './work_dir/depthanything_vitl_u4k/coarse_pretrain/checkpoint_24.pth', + './work_dir/depthanything_vitl_u4k/fine_pretrain/checkpoint_24.pth', + ], + sigloss=dict(type='SILogLoss'), + type='PatchFusion') +optim_wrapper = dict( + clip_grad=dict(max_norm=0.1, norm_type=2, type='norm'), + optimizer=dict(lr=0.0001, type='AdamW', weight_decay=0.001), + paramwise_cfg=dict(bypass_duplicate=True, custom_keys=dict())) +param_scheduler = dict( + base_momentum=0.85, + cycle_momentum=True, + div_factor=10, + final_div_factor=10000, + max_momentum=0.95, + pct_start=0.25, + three_phase=False) +project = 'patchfusion' +tags = [ + 'patchfusion', + 'da', + 'vitl', +] +test_in_dataloader = dict( + batch_size=1, + dataset=dict( + data_root='./data/u4k', + max_depth=80, + min_depth=0.001, + mode='infer', + split='./data/u4k/splits/test.txt', + transform_cfg=dict(network_process_size=[ + 384, + 512, + ]), + type='UnrealStereo4kDataset'), + num_workers=2) +test_out_dataloader = dict( + batch_size=1, + dataset=dict( + data_root='./data/u4k', + max_depth=80, + min_depth=0.001, + mode='infer', + split='./data/u4k/splits/test_out.txt', + transform_cfg=dict(network_process_size=[ + 384, + 512, + ]), + type='UnrealStereo4kDataset'), + num_workers=2) +train_cfg = dict( + eval_start=0, + log_interval=100, + max_epochs=16, + save_checkpoint_interval=16, + train_log_img_interval=500, + val_interval=2, + val_log_img_interval=50, + val_type='epoch_base') +train_dataloader = dict( + batch_size=4, + dataset=dict( + data_root='./data/u4k', + max_depth=80, + min_depth=0.001, + mode='train', + resize_mode='depth-anything', + split='./data/u4k/splits/train.txt', + transform_cfg=dict( + degree=1.0, network_process_size=[ + 392, + 518, + ], random_crop=True), + type='UnrealStereo4kDataset'), + num_workers=4) +val_dataloader = dict( + batch_size=1, + dataset=dict( + data_root='./data/u4k', + max_depth=80, + min_depth=0.001, + mode='infer', + resize_mode='depth-anything', + split='./data/u4k/splits/val.txt', + transform_cfg=dict(degree=1.0, network_process_size=[ + 392, + 518, + ]), + type='UnrealStereo4kDataset'), + num_workers=2) +work_dir = './work_dir/depthanything_vitl_u4k/patchfusion' +zoe_depth_config = dict( + attractor_alpha=1000, + attractor_gamma=2, + attractor_kind='mean', + attractor_type='inv', + aug=True, + bin_centers_type='softplus', + bin_embedding_dim=128, + clip_grad=0.1, + dataset='nyu', + depth_anything=True, + distributed=True, + do_resize=False, + force_keep_ar=True, + freeze_midas_bn=True, + gpu='NULL', + img_size=[ + 392, + 518, + ], + inverse_midas=False, + log_images_every=0.1, + max_depth=80, + max_temp=50.0, + max_translation=100, + memory_efficient=True, + midas_model_type='vitl', + min_depth=0.001, + min_temp=0.0212, + model='zoedepth', + n_attractors=[ + 16, + 8, + 4, + 1, + ], + n_bins=64, + name='ZoeDepth', + notes='', + output_distribution='logbinomial', + prefetch=False, + pretrained_resource='local::./work_dir/DepthAnything_vitl.pt', + print_losses=False, + project='ZoeDepth', + random_crop=False, + random_translate=False, + root='.', + save_dir='', + shared_dict='NULL', + tags='', + train_midas=True, + translate_prob=0.2, + type='DA-ZoeDepth', + uid='NULL', + use_amp=False, + use_pretrained_midas=True, + use_shared_dict=False, + validate_every=0.25, + version_name='v1', + workers=16) + +2024/03/15 17:52:52 - patchstitcher - INFO - Loading deepnet from local::./work_dir/DepthAnything_vitl.pt +2024/03/15 17:52:52 - patchstitcher - INFO - Current zoedepth.core.prep.resizer is +2024/03/15 17:52:52 - patchstitcher - INFO - Loading coarse_branch from ./work_dir/depthanything_vitl_u4k/coarse_pretrain/checkpoint_24.pth +2024/03/15 17:52:53 - patchstitcher - INFO - +2024/03/15 17:52:57 - patchstitcher - INFO - Loading deepnet from local::./work_dir/DepthAnything_vitl.pt +2024/03/15 17:52:58 - patchstitcher - INFO - Current zoedepth.core.prep.resizer is +2024/03/15 17:52:58 - patchstitcher - INFO - Loading fine_branch from ./work_dir/depthanything_vitl_u4k/fine_pretrain/checkpoint_24.pth +2024/03/15 17:52:58 - patchstitcher - INFO - +2024/03/15 17:52:59 - patchstitcher - INFO - DistributedDataParallel( + (module): PatchFusion( + (coarse_branch): ZoeDepth( + (core): DepthAnythingCore( + (core): DPT_DINOv2( + (pretrained): DinoVisionTransformer( + (patch_embed): PatchEmbed( + (proj): Conv2d(3, 1024, kernel_size=(14, 14), stride=(14, 14)) + (norm): Identity() + ) + (blocks): ModuleList( + (0-23): 24 x NestedTensorBlock( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): MemEffAttention( + (qkv): Linear(in_features=1024, out_features=3072, bias=True) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (ls1): LayerScale() + (drop_path1): Identity() + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU(approximate='none') + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + (ls2): LayerScale() + (drop_path2): Identity() + ) + ) + (norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (head): Identity() + ) + (depth_head): DPTHead( + (projects): ModuleList( + (0): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1)) + (1): Conv2d(1024, 512, kernel_size=(1, 1), stride=(1, 1)) + (2-3): 2 x Conv2d(1024, 1024, kernel_size=(1, 1), stride=(1, 1)) + ) + (resize_layers): ModuleList( + (0): ConvTranspose2d(256, 256, kernel_size=(4, 4), stride=(4, 4)) + (1): ConvTranspose2d(512, 512, kernel_size=(2, 2), stride=(2, 2)) + (2): Identity() + (3): Conv2d(1024, 1024, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1)) + ) + (scratch): Module( + (layer1_rn): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) + (layer2_rn): Conv2d(512, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) + (layer3_rn): Conv2d(1024, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) + (layer4_rn): Conv2d(1024, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) + (refinenet1): FeatureFusionBlock( + (out_conv): Conv2d(256, 256, kernel_size=(1, 1), stride=(1, 1)) + (resConfUnit1): ResidualConvUnit( + (conv1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (activation): ReLU() + (skip_add): FloatFunctional( + (activation_post_process): Identity() + ) + ) + (resConfUnit2): ResidualConvUnit( + (conv1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (activation): ReLU() + (skip_add): FloatFunctional( + (activation_post_process): Identity() + ) + ) + (skip_add): FloatFunctional( + (activation_post_process): Identity() + ) + ) + (refinenet2): FeatureFusionBlock( + (out_conv): Conv2d(256, 256, kernel_size=(1, 1), stride=(1, 1)) + (resConfUnit1): ResidualConvUnit( + (conv1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (activation): ReLU() + (skip_add): FloatFunctional( + (activation_post_process): Identity() + ) + ) + (resConfUnit2): ResidualConvUnit( + (conv1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (activation): ReLU() + (skip_add): FloatFunctional( + (activation_post_process): Identity() + ) + ) + (skip_add): FloatFunctional( + (activation_post_process): Identity() + ) + ) + (refinenet3): FeatureFusionBlock( + (out_conv): Conv2d(256, 256, kernel_size=(1, 1), stride=(1, 1)) + (resConfUnit1): ResidualConvUnit( + (conv1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (activation): ReLU() + (skip_add): FloatFunctional( + (activation_post_process): Identity() + ) + ) + (resConfUnit2): ResidualConvUnit( + (conv1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (activation): ReLU() + (skip_add): FloatFunctional( + (activation_post_process): Identity() + ) + ) + (skip_add): FloatFunctional( + (activation_post_process): Identity() + ) + ) + (refinenet4): FeatureFusionBlock( + (out_conv): Conv2d(256, 256, kernel_size=(1, 1), stride=(1, 1)) + (resConfUnit1): ResidualConvUnit( + (conv1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (activation): ReLU() + (skip_add): FloatFunctional( + (activation_post_process): Identity() + ) + ) + (resConfUnit2): ResidualConvUnit( + (conv1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (activation): ReLU() + (skip_add): FloatFunctional( + (activation_post_process): Identity() + ) + ) + (skip_add): FloatFunctional( + (activation_post_process): Identity() + ) + ) + (output_conv1): Conv2d(256, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (output_conv2): Sequential( + (0): Conv2d(128, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (1): ReLU(inplace=True) + (2): Conv2d(32, 1, kernel_size=(1, 1), stride=(1, 1)) + (3): ReLU(inplace=True) + (4): Identity() + ) + ) + ) + ) + ) + (conv2): Conv2d(256, 256, kernel_size=(1, 1), stride=(1, 1)) + (seed_bin_regressor): SeedBinRegressorUnnormed( + (_net): Sequential( + (0): Conv2d(256, 256, kernel_size=(1, 1), stride=(1, 1)) + (1): ReLU(inplace=True) + (2): Conv2d(256, 64, kernel_size=(1, 1), stride=(1, 1)) + (3): Softplus(beta=1, threshold=20) + ) + ) + (seed_projector): Projector( + (_net): Sequential( + (0): Conv2d(256, 128, kernel_size=(1, 1), stride=(1, 1)) + (1): ReLU(inplace=True) + (2): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1)) + ) + ) + (projectors): ModuleList( + (0-3): 4 x Projector( + (_net): Sequential( + (0): Conv2d(256, 128, kernel_size=(1, 1), stride=(1, 1)) + (1): ReLU(inplace=True) + (2): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1)) + ) + ) + ) + (attractors): ModuleList( + (0): AttractorLayerUnnormed( + (_net): Sequential( + (0): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1)) + (1): ReLU(inplace=True) + (2): Conv2d(128, 16, kernel_size=(1, 1), stride=(1, 1)) + (3): Softplus(beta=1, threshold=20) + ) + ) + (1): AttractorLayerUnnormed( + (_net): Sequential( + (0): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1)) + (1): ReLU(inplace=True) + (2): Conv2d(128, 8, kernel_size=(1, 1), stride=(1, 1)) + (3): Softplus(beta=1, threshold=20) + ) + ) + (2): AttractorLayerUnnormed( + (_net): Sequential( + (0): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1)) + (1): ReLU(inplace=True) + (2): Conv2d(128, 4, kernel_size=(1, 1), stride=(1, 1)) + (3): Softplus(beta=1, threshold=20) + ) + ) + (3): AttractorLayerUnnormed( + (_net): Sequential( + (0): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1)) + (1): ReLU(inplace=True) + (2): Conv2d(128, 1, kernel_size=(1, 1), stride=(1, 1)) + (3): Softplus(beta=1, threshold=20) + ) + ) + ) + (conditional_log_binomial): ConditionalLogBinomial( + (log_binomial_transform): LogBinomial() + (mlp): Sequential( + (0): Conv2d(161, 80, kernel_size=(1, 1), stride=(1, 1)) + (1): GELU(approximate='none') + (2): Conv2d(80, 4, kernel_size=(1, 1), stride=(1, 1)) + (3): Softplus(beta=1, threshold=20) + ) + ) + ) + (fine_branch): ZoeDepth( + (core): DepthAnythingCore( + (core): DPT_DINOv2( + (pretrained): DinoVisionTransformer( + (patch_embed): PatchEmbed( + (proj): Conv2d(3, 1024, kernel_size=(14, 14), stride=(14, 14)) + (norm): Identity() + ) + (blocks): ModuleList( + (0-23): 24 x NestedTensorBlock( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): MemEffAttention( + (qkv): Linear(in_features=1024, out_features=3072, bias=True) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (ls1): LayerScale() + (drop_path1): Identity() + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU(approximate='none') + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + (ls2): LayerScale() + (drop_path2): Identity() + ) + ) + (norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (head): Identity() + ) + (depth_head): DPTHead( + (projects): ModuleList( + (0): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1)) + (1): Conv2d(1024, 512, kernel_size=(1, 1), stride=(1, 1)) + (2-3): 2 x Conv2d(1024, 1024, kernel_size=(1, 1), stride=(1, 1)) + ) + (resize_layers): ModuleList( + (0): ConvTranspose2d(256, 256, kernel_size=(4, 4), stride=(4, 4)) + (1): ConvTranspose2d(512, 512, kernel_size=(2, 2), stride=(2, 2)) + (2): Identity() + (3): Conv2d(1024, 1024, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1)) + ) + (scratch): Module( + (layer1_rn): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) + (layer2_rn): Conv2d(512, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) + (layer3_rn): Conv2d(1024, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) + (layer4_rn): Conv2d(1024, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) + (refinenet1): FeatureFusionBlock( + (out_conv): Conv2d(256, 256, kernel_size=(1, 1), stride=(1, 1)) + (resConfUnit1): ResidualConvUnit( + (conv1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (activation): ReLU() + (skip_add): FloatFunctional( + (activation_post_process): Identity() + ) + ) + (resConfUnit2): ResidualConvUnit( + (conv1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (activation): ReLU() + (skip_add): FloatFunctional( + (activation_post_process): Identity() + ) + ) + (skip_add): FloatFunctional( + (activation_post_process): Identity() + ) + ) + (refinenet2): FeatureFusionBlock( + (out_conv): Conv2d(256, 256, kernel_size=(1, 1), stride=(1, 1)) + (resConfUnit1): ResidualConvUnit( + (conv1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (activation): ReLU() + (skip_add): FloatFunctional( + (activation_post_process): Identity() + ) + ) + (resConfUnit2): ResidualConvUnit( + (conv1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (activation): ReLU() + (skip_add): FloatFunctional( + (activation_post_process): Identity() + ) + ) + (skip_add): FloatFunctional( + (activation_post_process): Identity() + ) + ) + (refinenet3): FeatureFusionBlock( + (out_conv): Conv2d(256, 256, kernel_size=(1, 1), stride=(1, 1)) + (resConfUnit1): ResidualConvUnit( + (conv1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (activation): ReLU() + (skip_add): FloatFunctional( + (activation_post_process): Identity() + ) + ) + (resConfUnit2): ResidualConvUnit( + (conv1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (activation): ReLU() + (skip_add): FloatFunctional( + (activation_post_process): Identity() + ) + ) + (skip_add): FloatFunctional( + (activation_post_process): Identity() + ) + ) + (refinenet4): FeatureFusionBlock( + (out_conv): Conv2d(256, 256, kernel_size=(1, 1), stride=(1, 1)) + (resConfUnit1): ResidualConvUnit( + (conv1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (activation): ReLU() + (skip_add): FloatFunctional( + (activation_post_process): Identity() + ) + ) + (resConfUnit2): ResidualConvUnit( + (conv1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (activation): ReLU() + (skip_add): FloatFunctional( + (activation_post_process): Identity() + ) + ) + (skip_add): FloatFunctional( + (activation_post_process): Identity() + ) + ) + (output_conv1): Conv2d(256, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (output_conv2): Sequential( + (0): Conv2d(128, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (1): ReLU(inplace=True) + (2): Conv2d(32, 1, kernel_size=(1, 1), stride=(1, 1)) + (3): ReLU(inplace=True) + (4): Identity() + ) + ) + ) + ) + ) + (conv2): Conv2d(256, 256, kernel_size=(1, 1), stride=(1, 1)) + (seed_bin_regressor): SeedBinRegressorUnnormed( + (_net): Sequential( + (0): Conv2d(256, 256, kernel_size=(1, 1), stride=(1, 1)) + (1): ReLU(inplace=True) + (2): Conv2d(256, 64, kernel_size=(1, 1), stride=(1, 1)) + (3): Softplus(beta=1, threshold=20) + ) + ) + (seed_projector): Projector( + (_net): Sequential( + (0): Conv2d(256, 128, kernel_size=(1, 1), stride=(1, 1)) + (1): ReLU(inplace=True) + (2): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1)) + ) + ) + (projectors): ModuleList( + (0-3): 4 x Projector( + (_net): Sequential( + (0): Conv2d(256, 128, kernel_size=(1, 1), stride=(1, 1)) + (1): ReLU(inplace=True) + (2): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1)) + ) + ) + ) + (attractors): ModuleList( + (0): AttractorLayerUnnormed( + (_net): Sequential( + (0): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1)) + (1): ReLU(inplace=True) + (2): Conv2d(128, 16, kernel_size=(1, 1), stride=(1, 1)) + (3): Softplus(beta=1, threshold=20) + ) + ) + (1): AttractorLayerUnnormed( + (_net): Sequential( + (0): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1)) + (1): ReLU(inplace=True) + (2): Conv2d(128, 8, kernel_size=(1, 1), stride=(1, 1)) + (3): Softplus(beta=1, threshold=20) + ) + ) + (2): AttractorLayerUnnormed( + (_net): Sequential( + (0): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1)) + (1): ReLU(inplace=True) + (2): Conv2d(128, 4, kernel_size=(1, 1), stride=(1, 1)) + (3): Softplus(beta=1, threshold=20) + ) + ) + (3): AttractorLayerUnnormed( + (_net): Sequential( + (0): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1)) + (1): ReLU(inplace=True) + (2): Conv2d(128, 1, kernel_size=(1, 1), stride=(1, 1)) + (3): Softplus(beta=1, threshold=20) + ) + ) + ) + (conditional_log_binomial): ConditionalLogBinomial( + (log_binomial_transform): LogBinomial() + (mlp): Sequential( + (0): Conv2d(161, 80, kernel_size=(1, 1), stride=(1, 1)) + (1): GELU(approximate='none') + (2): Conv2d(80, 4, kernel_size=(1, 1), stride=(1, 1)) + (3): Softplus(beta=1, threshold=20) + ) + ) + ) + (sigloss): SILogLoss() + (fusion_conv_list): ModuleList( + (0-4): 5 x Conv2d(512, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (5): Conv2d(64, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + ) + (guided_fusion): GuidedFusionPatchFusion( + (inc): DoubleConv( + (double_conv): Sequential( + (0): Conv2d(5, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) + (1): SyncBatchNorm(32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (2): ReLU(inplace=True) + (3): Conv2d(32, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) + (4): SyncBatchNorm(32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (5): ReLU(inplace=True) + ) + ) + (down_conv_list): ModuleList( + (0): Down( + (maxpool_conv): Sequential( + (0): MaxPool2d(kernel_size=2, stride=2, padding=0, dilation=1, ceil_mode=False) + (1): DoubleConv( + (double_conv): Sequential( + (0): Conv2d(32, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) + (1): SyncBatchNorm(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (2): ReLU(inplace=True) + (3): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) + (4): SyncBatchNorm(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (5): ReLU(inplace=True) + ) + ) + ) + ) + (1-4): 4 x Down( + (maxpool_conv): Sequential( + (0): MaxPool2d(kernel_size=2, stride=2, padding=0, dilation=1, ceil_mode=False) + (1): DoubleConv( + (double_conv): Sequential( + (0): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) + (1): SyncBatchNorm(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (2): ReLU(inplace=True) + (3): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) + (4): SyncBatchNorm(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (5): ReLU(inplace=True) + ) + ) + ) + ) + ) + (up_conv_list): ModuleList( + (0-3): 4 x Upv1( + (conv): DoubleConvWOBN( + (double_conv): Sequential( + (0): Conv2d(768, 768, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (1): ReLU(inplace=True) + (2): Conv2d(768, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (3): ReLU(inplace=True) + ) + ) + ) + (4): Upv1( + (conv): DoubleConvWOBN( + (double_conv): Sequential( + (0): Conv2d(544, 544, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (1): ReLU(inplace=True) + (2): Conv2d(544, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (3): ReLU(inplace=True) + ) + ) + ) + ) + (g2l_att): ModuleList() + (g2l_list): ModuleList( + (0-1): 2 x G2LFusion( + (g2l_layer): G2LBasicLayer( + (blocks): ModuleList( + (0-3): 4 x SwinTransformerBlock( + (norm1): LayerNorm((256,), eps=1e-05, elementwise_affine=True) + (attn): WindowAttention( + dim=256, window_size=(12, 12), num_heads=32 + (qkv): Linear(in_features=256, out_features=768, bias=True) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=256, out_features=256, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + (softmax): Softmax(dim=-1) + ) + (drop_path): Identity() + (norm2): LayerNorm((256,), eps=1e-05, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=256, out_features=1024, bias=True) + (act): GELU(approximate='none') + (fc2): Linear(in_features=1024, out_features=256, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + ) + ) + (g2l_layer_norm): LayerNorm((256,), eps=1e-05, elementwise_affine=True) + (embed_proj): Conv2d(1, 256, kernel_size=(1, 1), stride=(1, 1)) + ) + (2-3): 2 x G2LFusion( + (g2l_layer): G2LBasicLayer( + (blocks): ModuleList( + (0-2): 3 x SwinTransformerBlock( + (norm1): LayerNorm((256,), eps=1e-05, elementwise_affine=True) + (attn): WindowAttention( + dim=256, window_size=(12, 12), num_heads=16 + (qkv): Linear(in_features=256, out_features=768, bias=True) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=256, out_features=256, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + (softmax): Softmax(dim=-1) + ) + (drop_path): Identity() + (norm2): LayerNorm((256,), eps=1e-05, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=256, out_features=1024, bias=True) + (act): GELU(approximate='none') + (fc2): Linear(in_features=1024, out_features=256, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + ) + ) + (g2l_layer_norm): LayerNorm((256,), eps=1e-05, elementwise_affine=True) + (embed_proj): Conv2d(1, 256, kernel_size=(1, 1), stride=(1, 1)) + ) + (4): G2LFusion( + (g2l_layer): G2LBasicLayer( + (blocks): ModuleList( + (0-1): 2 x SwinTransformerBlock( + (norm1): LayerNorm((256,), eps=1e-05, elementwise_affine=True) + (attn): WindowAttention( + dim=256, window_size=(12, 12), num_heads=8 + (qkv): Linear(in_features=256, out_features=768, bias=True) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=256, out_features=256, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + (softmax): Softmax(dim=-1) + ) + (drop_path): Identity() + (norm2): LayerNorm((256,), eps=1e-05, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=256, out_features=1024, bias=True) + (act): GELU(approximate='none') + (fc2): Linear(in_features=1024, out_features=256, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + ) + ) + (g2l_layer_norm): LayerNorm((256,), eps=1e-05, elementwise_affine=True) + (embed_proj): Conv2d(1, 256, kernel_size=(1, 1), stride=(1, 1)) + ) + (5): G2LFusion( + (g2l_layer): G2LBasicLayer( + (blocks): ModuleList( + (0-1): 2 x SwinTransformerBlock( + (norm1): LayerNorm((32,), eps=1e-05, elementwise_affine=True) + (attn): WindowAttention( + dim=32, window_size=(12, 12), num_heads=8 + (qkv): Linear(in_features=32, out_features=96, bias=True) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=32, out_features=32, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + (softmax): Softmax(dim=-1) + ) + (drop_path): Identity() + (norm2): LayerNorm((32,), eps=1e-05, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=32, out_features=128, bias=True) + (act): GELU(approximate='none') + (fc2): Linear(in_features=128, out_features=32, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + ) + ) + (g2l_layer_norm): LayerNorm((32,), eps=1e-05, elementwise_affine=True) + (embed_proj): Conv2d(1, 32, kernel_size=(1, 1), stride=(1, 1)) + ) + ) + (convs): ModuleList( + (0-4): 5 x DoubleConvWOBN( + (double_conv): Sequential( + (0): Conv2d(512, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (1): ReLU(inplace=True) + (2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (3): ReLU(inplace=True) + ) + ) + (5): DoubleConvWOBN( + (double_conv): Sequential( + (0): Conv2d(64, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (1): ReLU(inplace=True) + (2): Conv2d(32, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (3): ReLU(inplace=True) + ) + ) + ) + ) + (seed_bin_regressor): SeedBinRegressorUnnormed( + (_net): Sequential( + (0): Conv2d(256, 256, kernel_size=(1, 1), stride=(1, 1)) + (1): ReLU(inplace=True) + (2): Conv2d(256, 64, kernel_size=(1, 1), stride=(1, 1)) + (3): Softplus(beta=1, threshold=20) + ) + ) + (seed_projector): Projector( + (_net): Sequential( + (0): Conv2d(256, 128, kernel_size=(1, 1), stride=(1, 1)) + (1): ReLU(inplace=True) + (2): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1)) + ) + ) + (projectors): ModuleList( + (0-3): 4 x Projector( + (_net): Sequential( + (0): Conv2d(256, 128, kernel_size=(1, 1), stride=(1, 1)) + (1): ReLU(inplace=True) + (2): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1)) + ) + ) + ) + (attractors): ModuleList( + (0): AttractorLayerUnnormed( + (_net): Sequential( + (0): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1)) + (1): ReLU(inplace=True) + (2): Conv2d(128, 16, kernel_size=(1, 1), stride=(1, 1)) + (3): Softplus(beta=1, threshold=20) + ) + ) + (1): AttractorLayerUnnormed( + (_net): Sequential( + (0): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1)) + (1): ReLU(inplace=True) + (2): Conv2d(128, 8, kernel_size=(1, 1), stride=(1, 1)) + (3): Softplus(beta=1, threshold=20) + ) + ) + (2): AttractorLayerUnnormed( + (_net): Sequential( + (0): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1)) + (1): ReLU(inplace=True) + (2): Conv2d(128, 4, kernel_size=(1, 1), stride=(1, 1)) + (3): Softplus(beta=1, threshold=20) + ) + ) + (3): AttractorLayerUnnormed( + (_net): Sequential( + (0): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1)) + (1): ReLU(inplace=True) + (2): Conv2d(128, 1, kernel_size=(1, 1), stride=(1, 1)) + (3): Softplus(beta=1, threshold=20) + ) + ) + ) + (conditional_log_binomial): ConditionalLogBinomial( + (log_binomial_transform): LogBinomial() + (mlp): Sequential( + (0): Conv2d(161, 80, kernel_size=(1, 1), stride=(1, 1)) + (1): GELU(approximate='none') + (2): Conv2d(80, 4, kernel_size=(1, 1), stride=(1, 1)) + (3): Softplus(beta=1, threshold=20) + ) + ) + ) +) +2024/03/15 17:53:06 - patchstitcher - INFO - successfully init trainer +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.fusion_conv_list.0.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.fusion_conv_list.0.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.fusion_conv_list.1.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.fusion_conv_list.1.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.fusion_conv_list.2.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.fusion_conv_list.2.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.fusion_conv_list.3.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.fusion_conv_list.3.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.fusion_conv_list.4.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.fusion_conv_list.4.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.fusion_conv_list.5.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.fusion_conv_list.5.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.inc.double_conv.0.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.inc.double_conv.1.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.inc.double_conv.1.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.inc.double_conv.3.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.inc.double_conv.4.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.inc.double_conv.4.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.down_conv_list.0.maxpool_conv.1.double_conv.0.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.down_conv_list.0.maxpool_conv.1.double_conv.1.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.down_conv_list.0.maxpool_conv.1.double_conv.1.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.down_conv_list.0.maxpool_conv.1.double_conv.3.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.down_conv_list.0.maxpool_conv.1.double_conv.4.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.down_conv_list.0.maxpool_conv.1.double_conv.4.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.down_conv_list.1.maxpool_conv.1.double_conv.0.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.down_conv_list.1.maxpool_conv.1.double_conv.1.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.down_conv_list.1.maxpool_conv.1.double_conv.1.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.down_conv_list.1.maxpool_conv.1.double_conv.3.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.down_conv_list.1.maxpool_conv.1.double_conv.4.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.down_conv_list.1.maxpool_conv.1.double_conv.4.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.down_conv_list.2.maxpool_conv.1.double_conv.0.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.down_conv_list.2.maxpool_conv.1.double_conv.1.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.down_conv_list.2.maxpool_conv.1.double_conv.1.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.down_conv_list.2.maxpool_conv.1.double_conv.3.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.down_conv_list.2.maxpool_conv.1.double_conv.4.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.down_conv_list.2.maxpool_conv.1.double_conv.4.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.down_conv_list.3.maxpool_conv.1.double_conv.0.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.down_conv_list.3.maxpool_conv.1.double_conv.1.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.down_conv_list.3.maxpool_conv.1.double_conv.1.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.down_conv_list.3.maxpool_conv.1.double_conv.3.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.down_conv_list.3.maxpool_conv.1.double_conv.4.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.down_conv_list.3.maxpool_conv.1.double_conv.4.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.down_conv_list.4.maxpool_conv.1.double_conv.0.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.down_conv_list.4.maxpool_conv.1.double_conv.1.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.down_conv_list.4.maxpool_conv.1.double_conv.1.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.down_conv_list.4.maxpool_conv.1.double_conv.3.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.down_conv_list.4.maxpool_conv.1.double_conv.4.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.down_conv_list.4.maxpool_conv.1.double_conv.4.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.up_conv_list.0.conv.double_conv.0.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.up_conv_list.0.conv.double_conv.0.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.up_conv_list.0.conv.double_conv.2.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.up_conv_list.0.conv.double_conv.2.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.up_conv_list.1.conv.double_conv.0.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.up_conv_list.1.conv.double_conv.0.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.up_conv_list.1.conv.double_conv.2.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.up_conv_list.1.conv.double_conv.2.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.up_conv_list.2.conv.double_conv.0.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.up_conv_list.2.conv.double_conv.0.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.up_conv_list.2.conv.double_conv.2.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.up_conv_list.2.conv.double_conv.2.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.up_conv_list.3.conv.double_conv.0.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.up_conv_list.3.conv.double_conv.0.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.up_conv_list.3.conv.double_conv.2.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.up_conv_list.3.conv.double_conv.2.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.up_conv_list.4.conv.double_conv.0.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.up_conv_list.4.conv.double_conv.0.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.up_conv_list.4.conv.double_conv.2.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.up_conv_list.4.conv.double_conv.2.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.absolute_pos_embed +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.0.norm1.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.0.norm1.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.0.attn.relative_position_bias_table +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.0.attn.qkv.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.0.attn.qkv.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.0.attn.proj.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.0.attn.proj.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.0.norm2.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.0.norm2.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.0.mlp.fc1.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.0.mlp.fc1.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.0.mlp.fc2.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.0.mlp.fc2.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.1.norm1.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.1.norm1.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.1.attn.relative_position_bias_table +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.1.attn.qkv.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.1.attn.qkv.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.1.attn.proj.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.1.attn.proj.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.1.norm2.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.1.norm2.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.1.mlp.fc1.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.1.mlp.fc1.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.1.mlp.fc2.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.1.mlp.fc2.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.2.norm1.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.2.norm1.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.2.attn.relative_position_bias_table +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.2.attn.qkv.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.2.attn.qkv.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.2.attn.proj.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.2.attn.proj.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.2.norm2.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.2.norm2.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.2.mlp.fc1.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.2.mlp.fc1.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.2.mlp.fc2.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.2.mlp.fc2.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.3.norm1.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.3.norm1.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.3.attn.relative_position_bias_table +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.3.attn.qkv.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.3.attn.qkv.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.3.attn.proj.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.3.attn.proj.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.3.norm2.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.3.norm2.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.3.mlp.fc1.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.3.mlp.fc1.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.3.mlp.fc2.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.3.mlp.fc2.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer_norm.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer_norm.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.embed_proj.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.embed_proj.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.absolute_pos_embed +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.0.norm1.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.0.norm1.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.0.attn.relative_position_bias_table +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.0.attn.qkv.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.0.attn.qkv.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.0.attn.proj.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.0.attn.proj.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.0.norm2.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.0.norm2.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.0.mlp.fc1.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.0.mlp.fc1.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.0.mlp.fc2.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.0.mlp.fc2.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.1.norm1.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.1.norm1.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.1.attn.relative_position_bias_table +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.1.attn.qkv.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.1.attn.qkv.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.1.attn.proj.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.1.attn.proj.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.1.norm2.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.1.norm2.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.1.mlp.fc1.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.1.mlp.fc1.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.1.mlp.fc2.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.1.mlp.fc2.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.2.norm1.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.2.norm1.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.2.attn.relative_position_bias_table +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.2.attn.qkv.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.2.attn.qkv.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.2.attn.proj.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.2.attn.proj.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.2.norm2.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.2.norm2.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.2.mlp.fc1.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.2.mlp.fc1.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.2.mlp.fc2.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.2.mlp.fc2.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.3.norm1.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.3.norm1.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.3.attn.relative_position_bias_table +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.3.attn.qkv.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.3.attn.qkv.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.3.attn.proj.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.3.attn.proj.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.3.norm2.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.3.norm2.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.3.mlp.fc1.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.3.mlp.fc1.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.3.mlp.fc2.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.3.mlp.fc2.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer_norm.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer_norm.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.embed_proj.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.embed_proj.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.absolute_pos_embed +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.0.norm1.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.0.norm1.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.0.attn.relative_position_bias_table +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.0.attn.qkv.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.0.attn.qkv.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.0.attn.proj.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.0.attn.proj.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.0.norm2.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.0.norm2.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.0.mlp.fc1.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.0.mlp.fc1.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.0.mlp.fc2.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.0.mlp.fc2.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.1.norm1.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.1.norm1.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.1.attn.relative_position_bias_table +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.1.attn.qkv.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.1.attn.qkv.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.1.attn.proj.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.1.attn.proj.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.1.norm2.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.1.norm2.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.1.mlp.fc1.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.1.mlp.fc1.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.1.mlp.fc2.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.1.mlp.fc2.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.2.norm1.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.2.norm1.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.2.attn.relative_position_bias_table +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.2.attn.qkv.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.2.attn.qkv.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.2.attn.proj.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.2.attn.proj.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.2.norm2.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.2.norm2.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.2.mlp.fc1.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.2.mlp.fc1.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.2.mlp.fc2.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.2.mlp.fc2.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer_norm.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer_norm.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.embed_proj.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.embed_proj.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.absolute_pos_embed +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.0.norm1.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.0.norm1.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.0.attn.relative_position_bias_table +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.0.attn.qkv.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.0.attn.qkv.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.0.attn.proj.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.0.attn.proj.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.0.norm2.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.0.norm2.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.0.mlp.fc1.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.0.mlp.fc1.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.0.mlp.fc2.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.0.mlp.fc2.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.1.norm1.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.1.norm1.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.1.attn.relative_position_bias_table +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.1.attn.qkv.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.1.attn.qkv.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.1.attn.proj.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.1.attn.proj.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.1.norm2.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.1.norm2.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.1.mlp.fc1.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.1.mlp.fc1.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.1.mlp.fc2.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.1.mlp.fc2.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.2.norm1.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.2.norm1.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.2.attn.relative_position_bias_table +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.2.attn.qkv.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.2.attn.qkv.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.2.attn.proj.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.2.attn.proj.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.2.norm2.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.2.norm2.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.2.mlp.fc1.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.2.mlp.fc1.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.2.mlp.fc2.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.2.mlp.fc2.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer_norm.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer_norm.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.embed_proj.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.embed_proj.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.4.absolute_pos_embed +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.4.g2l_layer.blocks.0.norm1.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.4.g2l_layer.blocks.0.norm1.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.4.g2l_layer.blocks.0.attn.relative_position_bias_table +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.4.g2l_layer.blocks.0.attn.qkv.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.4.g2l_layer.blocks.0.attn.qkv.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.4.g2l_layer.blocks.0.attn.proj.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.4.g2l_layer.blocks.0.attn.proj.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.4.g2l_layer.blocks.0.norm2.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.4.g2l_layer.blocks.0.norm2.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.4.g2l_layer.blocks.0.mlp.fc1.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.4.g2l_layer.blocks.0.mlp.fc1.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.4.g2l_layer.blocks.0.mlp.fc2.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.4.g2l_layer.blocks.0.mlp.fc2.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.4.g2l_layer.blocks.1.norm1.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.4.g2l_layer.blocks.1.norm1.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.4.g2l_layer.blocks.1.attn.relative_position_bias_table +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.4.g2l_layer.blocks.1.attn.qkv.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.4.g2l_layer.blocks.1.attn.qkv.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.4.g2l_layer.blocks.1.attn.proj.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.4.g2l_layer.blocks.1.attn.proj.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.4.g2l_layer.blocks.1.norm2.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.4.g2l_layer.blocks.1.norm2.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.4.g2l_layer.blocks.1.mlp.fc1.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.4.g2l_layer.blocks.1.mlp.fc1.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.4.g2l_layer.blocks.1.mlp.fc2.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.4.g2l_layer.blocks.1.mlp.fc2.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.4.g2l_layer_norm.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.4.g2l_layer_norm.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.4.embed_proj.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.4.embed_proj.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.5.absolute_pos_embed +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.5.g2l_layer.blocks.0.norm1.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.5.g2l_layer.blocks.0.norm1.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.5.g2l_layer.blocks.0.attn.relative_position_bias_table +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.5.g2l_layer.blocks.0.attn.qkv.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.5.g2l_layer.blocks.0.attn.qkv.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.5.g2l_layer.blocks.0.attn.proj.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.5.g2l_layer.blocks.0.attn.proj.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.5.g2l_layer.blocks.0.norm2.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.5.g2l_layer.blocks.0.norm2.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.5.g2l_layer.blocks.0.mlp.fc1.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.5.g2l_layer.blocks.0.mlp.fc1.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.5.g2l_layer.blocks.0.mlp.fc2.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.5.g2l_layer.blocks.0.mlp.fc2.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.5.g2l_layer.blocks.1.norm1.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.5.g2l_layer.blocks.1.norm1.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.5.g2l_layer.blocks.1.attn.relative_position_bias_table +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.5.g2l_layer.blocks.1.attn.qkv.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.5.g2l_layer.blocks.1.attn.qkv.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.5.g2l_layer.blocks.1.attn.proj.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.5.g2l_layer.blocks.1.attn.proj.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.5.g2l_layer.blocks.1.norm2.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.5.g2l_layer.blocks.1.norm2.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.5.g2l_layer.blocks.1.mlp.fc1.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.5.g2l_layer.blocks.1.mlp.fc1.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.5.g2l_layer.blocks.1.mlp.fc2.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.5.g2l_layer.blocks.1.mlp.fc2.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.5.g2l_layer_norm.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.5.g2l_layer_norm.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.5.embed_proj.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.5.embed_proj.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.convs.0.double_conv.0.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.convs.0.double_conv.0.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.convs.0.double_conv.2.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.convs.0.double_conv.2.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.convs.1.double_conv.0.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.convs.1.double_conv.0.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.convs.1.double_conv.2.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.convs.1.double_conv.2.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.convs.2.double_conv.0.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.convs.2.double_conv.0.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.convs.2.double_conv.2.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.convs.2.double_conv.2.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.convs.3.double_conv.0.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.convs.3.double_conv.0.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.convs.3.double_conv.2.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.convs.3.double_conv.2.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.convs.4.double_conv.0.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.convs.4.double_conv.0.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.convs.4.double_conv.2.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.convs.4.double_conv.2.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.convs.5.double_conv.0.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.convs.5.double_conv.0.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.convs.5.double_conv.2.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.guided_fusion.convs.5.double_conv.2.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.seed_bin_regressor._net.0.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.seed_bin_regressor._net.0.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.seed_bin_regressor._net.2.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.seed_bin_regressor._net.2.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.seed_projector._net.0.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.seed_projector._net.0.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.seed_projector._net.2.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.seed_projector._net.2.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.projectors.0._net.0.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.projectors.0._net.0.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.projectors.0._net.2.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.projectors.0._net.2.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.projectors.1._net.0.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.projectors.1._net.0.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.projectors.1._net.2.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.projectors.1._net.2.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.projectors.2._net.0.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.projectors.2._net.0.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.projectors.2._net.2.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.projectors.2._net.2.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.projectors.3._net.0.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.projectors.3._net.0.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.projectors.3._net.2.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.projectors.3._net.2.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.attractors.0._net.0.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.attractors.0._net.0.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.attractors.0._net.2.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.attractors.0._net.2.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.attractors.1._net.0.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.attractors.1._net.0.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.attractors.1._net.2.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.attractors.1._net.2.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.attractors.2._net.0.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.attractors.2._net.0.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.attractors.2._net.2.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.attractors.2._net.2.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.attractors.3._net.0.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.attractors.3._net.0.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.attractors.3._net.2.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.attractors.3._net.2.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.conditional_log_binomial.mlp.0.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.conditional_log_binomial.mlp.0.bias +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.conditional_log_binomial.mlp.2.weight +2024/03/15 17:53:06 - patchstitcher - INFO - training param: module.conditional_log_binomial.mlp.2.bias +2024/03/15 17:57:49 - patchstitcher - INFO - Epoch: [01/16] - Step: [00100/00475] - Time: [1/1] - Total Loss: 6.651620864868164 - sig_loss: 6.651620864868164 +2024/03/15 18:01:09 - patchstitcher - INFO - Epoch: [01/16] - Step: [00200/00475] - Time: [1/1] - Total Loss: 3.1729226112365723 - sig_loss: 3.1729226112365723 +2024/03/15 18:04:30 - patchstitcher - INFO - Epoch: [01/16] - Step: [00300/00475] - Time: [1/1] - Total Loss: 6.4036359786987305 - sig_loss: 6.4036359786987305 +2024/03/15 18:07:51 - patchstitcher - INFO - Epoch: [01/16] - Step: [00400/00475] - Time: [1/1] - Total Loss: 1.5950872898101807 - sig_loss: 1.5950872898101807 +2024/03/15 18:14:41 - patchstitcher - INFO - Epoch: [02/16] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.5556060075759888 - sig_loss: 0.5556060075759888 +2024/03/15 18:18:02 - patchstitcher - INFO - Epoch: [02/16] - Step: [00200/00475] - Time: [1/1] - Total Loss: 1.4476476907730103 - sig_loss: 1.4476476907730103 +2024/03/15 18:21:23 - patchstitcher - INFO - Epoch: [02/16] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.7769662141799927 - sig_loss: 0.7769662141799927 +2024/03/15 18:24:44 - patchstitcher - INFO - Epoch: [02/16] - Step: [00400/00475] - Time: [1/1] - Total Loss: 1.0473182201385498 - sig_loss: 1.0473182201385498 +2024/03/15 18:28:21 - patchstitcher - INFO - Evaluation Summary: ++-----------+----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+ +| a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see | ++-----------+----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+ +| 0.9604923 | 0.992972 | 0.9974152 | 0.0768845 | 1.3931862 | 0.0329559 | 0.1041152 | 8.8729713 | 0.1381766 | 1.0569959 | ++-----------+----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+ +2024/03/15 18:31:46 - patchstitcher - INFO - Epoch: [03/16] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.40465447306632996 - sig_loss: 0.40465447306632996 +2024/03/15 18:35:08 - patchstitcher - INFO - Epoch: [03/16] - Step: [00200/00475] - Time: [1/1] - Total Loss: 1.1749241352081299 - sig_loss: 1.1749241352081299 +2024/03/15 18:38:29 - patchstitcher - INFO - Epoch: [03/16] - Step: [00300/00475] - Time: [1/1] - Total Loss: 1.2686662673950195 - sig_loss: 1.2686662673950195 +2024/03/15 18:41:50 - patchstitcher - INFO - Epoch: [03/16] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.711976170539856 - sig_loss: 0.711976170539856 +2024/03/15 18:47:46 - patchstitcher - INFO - Epoch: [04/16] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.5024728775024414 - sig_loss: 0.5024728775024414 +2024/03/15 18:51:07 - patchstitcher - INFO - Epoch: [04/16] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.527965784072876 - sig_loss: 0.527965784072876 +2024/03/15 18:54:28 - patchstitcher - INFO - Epoch: [04/16] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.8440424203872681 - sig_loss: 0.8440424203872681 +2024/03/15 18:57:48 - patchstitcher - INFO - Epoch: [04/16] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.26558583974838257 - sig_loss: 0.26558583974838257 +2024/03/15 19:01:10 - patchstitcher - INFO - Evaluation Summary: ++-----------+----------+-----------+----------+----------+-----------+-----------+---------+-----------+-----------+ +| a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see | ++-----------+----------+-----------+----------+----------+-----------+-----------+---------+-----------+-----------+ +| 0.9812026 | 0.994635 | 0.9978743 | 0.059769 | 1.174001 | 0.0255657 | 0.0845318 | 7.16214 | 0.1029762 | 0.9235216 | ++-----------+----------+-----------+----------+----------+-----------+-----------+---------+-----------+-----------+ +2024/03/15 19:04:36 - patchstitcher - INFO - Epoch: [05/16] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.6908500790596008 - sig_loss: 0.6908500790596008 +2024/03/15 19:07:57 - patchstitcher - INFO - Epoch: [05/16] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.33774641156196594 - sig_loss: 0.33774641156196594 +2024/03/15 19:11:17 - patchstitcher - INFO - Epoch: [05/16] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.6475552916526794 - sig_loss: 0.6475552916526794 +2024/03/15 19:14:38 - patchstitcher - INFO - Epoch: [05/16] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.7433701753616333 - sig_loss: 0.7433701753616333 +2024/03/15 19:20:34 - patchstitcher - INFO - Epoch: [06/16] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.999753475189209 - sig_loss: 0.999753475189209 +2024/03/15 19:23:55 - patchstitcher - INFO - Epoch: [06/16] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.8219634890556335 - sig_loss: 0.8219634890556335 +2024/03/15 19:27:15 - patchstitcher - INFO - Epoch: [06/16] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.4194517433643341 - sig_loss: 0.4194517433643341 +2024/03/15 19:30:36 - patchstitcher - INFO - Epoch: [06/16] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.7954123616218567 - sig_loss: 0.7954123616218567 +2024/03/15 19:33:58 - patchstitcher - INFO - Evaluation Summary: ++-----------+-----------+-----------+----------+-----------+-----------+-----------+-----------+----------+-----------+ +| a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see | ++-----------+-----------+-----------+----------+-----------+-----------+-----------+-----------+----------+-----------+ +| 0.9570371 | 0.9941043 | 0.9978197 | 0.101201 | 1.2812451 | 0.0417833 | 0.1185188 | 8.3019764 | 0.123725 | 1.0220292 | ++-----------+-----------+-----------+----------+-----------+-----------+-----------+-----------+----------+-----------+ +2024/03/15 19:37:26 - patchstitcher - INFO - Epoch: [07/16] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.47829562425613403 - sig_loss: 0.47829562425613403 +2024/03/15 19:40:47 - patchstitcher - INFO - Epoch: [07/16] - Step: [00200/00475] - Time: [1/1] - Total Loss: 1.0946928262710571 - sig_loss: 1.0946928262710571 +2024/03/15 19:44:07 - patchstitcher - INFO - Epoch: [07/16] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.653084397315979 - sig_loss: 0.653084397315979 +2024/03/15 19:47:28 - patchstitcher - INFO - Epoch: [07/16] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.49332454800605774 - sig_loss: 0.49332454800605774 +2024/03/15 19:53:23 - patchstitcher - INFO - Epoch: [08/16] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.4712163209915161 - sig_loss: 0.4712163209915161 +2024/03/15 19:56:43 - patchstitcher - INFO - Epoch: [08/16] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.852905809879303 - sig_loss: 0.852905809879303 +2024/03/15 20:00:04 - patchstitcher - INFO - Epoch: [08/16] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.7322371602058411 - sig_loss: 0.7322371602058411 +2024/03/15 20:03:24 - patchstitcher - INFO - Epoch: [08/16] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.8382786512374878 - sig_loss: 0.8382786512374878 +2024/03/15 20:06:46 - patchstitcher - INFO - Evaluation Summary: ++-----------+-----------+-----------+-----------+-----------+----------+-----------+-----------+-----------+-----------+ +| a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see | ++-----------+-----------+-----------+-----------+-----------+----------+-----------+-----------+-----------+-----------+ +| 0.9800802 | 0.9948133 | 0.9979888 | 0.0667494 | 1.1031989 | 0.027872 | 0.0859998 | 6.1991318 | 0.0983083 | 0.9544899 | ++-----------+-----------+-----------+-----------+-----------+----------+-----------+-----------+-----------+-----------+ +2024/03/15 20:10:12 - patchstitcher - INFO - Epoch: [09/16] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.6576782464981079 - sig_loss: 0.6576782464981079 +2024/03/15 20:13:33 - patchstitcher - INFO - Epoch: [09/16] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.3798004686832428 - sig_loss: 0.3798004686832428 +2024/03/15 20:16:53 - patchstitcher - INFO - Epoch: [09/16] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.27597683668136597 - sig_loss: 0.27597683668136597 +2024/03/15 20:20:14 - patchstitcher - INFO - Epoch: [09/16] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.4274792969226837 - sig_loss: 0.4274792969226837 +2024/03/15 20:26:10 - patchstitcher - INFO - Epoch: [10/16] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.28685206174850464 - sig_loss: 0.28685206174850464 +2024/03/15 20:29:30 - patchstitcher - INFO - Epoch: [10/16] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.6088235378265381 - sig_loss: 0.6088235378265381 +2024/03/15 20:32:51 - patchstitcher - INFO - Epoch: [10/16] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.8431264758110046 - sig_loss: 0.8431264758110046 +2024/03/15 20:36:11 - patchstitcher - INFO - Epoch: [10/16] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.4401911497116089 - sig_loss: 0.4401911497116089 +2024/03/15 20:39:33 - patchstitcher - INFO - Evaluation Summary: ++-----------+----------+-----------+-----------+-----------+-----------+-----------+----------+-----------+----------+ +| a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see | ++-----------+----------+-----------+-----------+-----------+-----------+-----------+----------+-----------+----------+ +| 0.9842327 | 0.995032 | 0.9980113 | 0.0420645 | 1.0650526 | 0.0181017 | 0.0686697 | 6.248743 | 0.0888892 | 0.887989 | ++-----------+----------+-----------+-----------+-----------+-----------+-----------+----------+-----------+----------+ +2024/03/15 20:42:58 - patchstitcher - INFO - Epoch: [11/16] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.4226158559322357 - sig_loss: 0.4226158559322357 +2024/03/15 20:46:19 - patchstitcher - INFO - Epoch: [11/16] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.5998555421829224 - sig_loss: 0.5998555421829224 +2024/03/15 20:49:39 - patchstitcher - INFO - Epoch: [11/16] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.8624639511108398 - sig_loss: 0.8624639511108398 +2024/03/15 20:52:59 - patchstitcher - INFO - Epoch: [11/16] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.5691335797309875 - sig_loss: 0.5691335797309875 +2024/03/15 20:58:54 - patchstitcher - INFO - Epoch: [12/16] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.3543495535850525 - sig_loss: 0.3543495535850525 +2024/03/15 21:02:14 - patchstitcher - INFO - Epoch: [12/16] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.17586150765419006 - sig_loss: 0.17586150765419006 +2024/03/15 21:05:35 - patchstitcher - INFO - Epoch: [12/16] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.8585277795791626 - sig_loss: 0.8585277795791626 +2024/03/15 21:08:55 - patchstitcher - INFO - Epoch: [12/16] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.39675745368003845 - sig_loss: 0.39675745368003845 +2024/03/15 21:12:17 - patchstitcher - INFO - Evaluation Summary: ++-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+ +| a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see | ++-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+ +| 0.9845838 | 0.9949521 | 0.9980043 | 0.0384732 | 1.0613158 | 0.0166137 | 0.0663726 | 6.1042054 | 0.0890013 | 0.8891889 | ++-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+ +2024/03/15 21:15:43 - patchstitcher - INFO - Epoch: [13/16] - Step: [00100/00475] - Time: [1/1] - Total Loss: 1.4550189971923828 - sig_loss: 1.4550189971923828 +2024/03/15 21:19:03 - patchstitcher - INFO - Epoch: [13/16] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.4285925626754761 - sig_loss: 0.4285925626754761 +2024/03/15 21:22:24 - patchstitcher - INFO - Epoch: [13/16] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.5604073405265808 - sig_loss: 0.5604073405265808 +2024/03/15 21:25:45 - patchstitcher - INFO - Epoch: [13/16] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.7231113314628601 - sig_loss: 0.7231113314628601 +2024/03/15 21:31:38 - patchstitcher - INFO - Epoch: [14/16] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.22688111662864685 - sig_loss: 0.22688111662864685 +2024/03/15 21:34:58 - patchstitcher - INFO - Epoch: [14/16] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.4432392120361328 - sig_loss: 0.4432392120361328 +2024/03/15 21:38:19 - patchstitcher - INFO - Epoch: [14/16] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.5041521191596985 - sig_loss: 0.5041521191596985 +2024/03/15 21:41:40 - patchstitcher - INFO - Epoch: [14/16] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.6649416089057922 - sig_loss: 0.6649416089057922 +2024/03/15 21:45:01 - patchstitcher - INFO - Evaluation Summary: ++-----------+-----------+-----------+-----------+----------+-----------+-----------+-----------+-----------+----------+ +| a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see | ++-----------+-----------+-----------+-----------+----------+-----------+-----------+-----------+-----------+----------+ +| 0.9851979 | 0.9949358 | 0.9979829 | 0.0377008 | 1.029974 | 0.0165044 | 0.0661251 | 6.0282001 | 0.0796015 | 0.812903 | ++-----------+-----------+-----------+-----------+----------+-----------+-----------+-----------+-----------+----------+ +2024/03/15 21:48:25 - patchstitcher - INFO - Epoch: [15/16] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.41866496205329895 - sig_loss: 0.41866496205329895 +2024/03/15 21:51:46 - patchstitcher - INFO - Epoch: [15/16] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.8762396574020386 - sig_loss: 0.8762396574020386 +2024/03/15 21:55:06 - patchstitcher - INFO - Epoch: [15/16] - Step: [00300/00475] - Time: [1/1] - Total Loss: 2.038621664047241 - sig_loss: 2.038621664047241 +2024/03/15 21:58:27 - patchstitcher - INFO - Epoch: [15/16] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.2506561875343323 - sig_loss: 0.2506561875343323 +2024/03/15 22:04:22 - patchstitcher - INFO - Epoch: [16/16] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.37795141339302063 - sig_loss: 0.37795141339302063 +2024/03/15 22:07:42 - patchstitcher - INFO - Epoch: [16/16] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.24302588403224945 - sig_loss: 0.24302588403224945 +2024/03/15 22:11:03 - patchstitcher - INFO - Epoch: [16/16] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.4261029362678528 - sig_loss: 0.4261029362678528 +2024/03/15 22:14:24 - patchstitcher - INFO - Epoch: [16/16] - Step: [00400/00475] - Time: [1/1] - Total Loss: 1.648262858390808 - sig_loss: 1.648262858390808 +2024/03/15 22:17:45 - patchstitcher - INFO - Evaluation Summary: ++-----------+----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+ +| a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see | ++-----------+----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+ +| 0.9852235 | 0.994962 | 0.9979886 | 0.0368362 | 1.0233804 | 0.0160672 | 0.0651711 | 5.9528971 | 0.0792726 | 0.8155836 | ++-----------+----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+ +2024/03/15 22:17:45 - patchstitcher - INFO - Saving ckp, but use the inner get_save_dict fuction to get model_dict +2024/03/15 22:17:45 - patchstitcher - INFO - For saving space. Would you like to save base model several times? :> +2024/03/15 22:17:46 - patchstitcher - INFO - save checkpoint_16.pth at ./work_dir/depthanything_vitl_u4k/patchfusion