diff --git "a/depthanything_vitb_u4k/patchfusion/20240315_193032.log" "b/depthanything_vitb_u4k/patchfusion/20240315_193032.log" new file mode 100644--- /dev/null +++ "b/depthanything_vitb_u4k/patchfusion/20240315_193032.log" @@ -0,0 +1,1606 @@ +2024/03/15 19:30:41 - patchstitcher - INFO - +------------------------------------------------------------ +System environment: + sys.platform: linux + Python: 3.8.18 | packaged by conda-forge | (default, Oct 10 2023, 15:44:36) [GCC 12.3.0] + CUDA available: True + numpy_random_seed: 621 + GPU 0,1,2,3: NVIDIA A100-SXM4-80GB + CUDA_HOME: /sw/rl9g/cuda/11.8/rl9_binary + NVCC: Cuda compilation tools, release 11.8, V11.8.89 + GCC: gcc (GCC) 11.3.1 20220421 (Red Hat 11.3.1-2) + PyTorch: 2.1.2 + PyTorch compiling details: PyTorch built with: + - GCC 9.3 + - C++ Version: 201703 + - Intel(R) oneAPI Math Kernel Library Version 2022.1-Product Build 20220311 for Intel(R) 64 architecture applications + - Intel(R) MKL-DNN v3.1.1 (Git Hash 64f6bcbcbab628e96f33a62c3e975f8535a7bde4) + - OpenMP 201511 (a.k.a. OpenMP 4.5) + - LAPACK is enabled (usually provided by MKL) + - NNPACK is enabled + - CPU capability usage: AVX2 + - CUDA Runtime 11.8 + - NVCC architecture flags: -gencode;arch=compute_50,code=sm_50;-gencode;arch=compute_60,code=sm_60;-gencode;arch=compute_61,code=sm_61;-gencode;arch=compute_70,code=sm_70;-gencode;arch=compute_75,code=sm_75;-gencode;arch=compute_80,code=sm_80;-gencode;arch=compute_86,code=sm_86;-gencode;arch=compute_37,code=sm_37;-gencode;arch=compute_90,code=sm_90;-gencode;arch=compute_37,code=compute_37 + - CuDNN 8.7 + - Magma 2.6.1 + - Build settings: BLAS_INFO=mkl, BUILD_TYPE=Release, CUDA_VERSION=11.8, CUDNN_VERSION=8.7.0, CXX_COMPILER=/opt/rh/devtoolset-9/root/usr/bin/c++, CXX_FLAGS= -D_GLIBCXX_USE_CXX11_ABI=0 -fabi-version=11 -fvisibility-inlines-hidden -DUSE_PTHREADPOOL -DNDEBUG -DUSE_KINETO -DLIBKINETO_NOROCTRACER -DUSE_FBGEMM -DUSE_QNNPACK -DUSE_PYTORCH_QNNPACK -DUSE_XNNPACK -DSYMBOLICATE_MOBILE_DEBUG_HANDLE -O2 -fPIC -Wall -Wextra -Werror=return-type -Werror=non-virtual-dtor -Werror=bool-operation -Wnarrowing -Wno-missing-field-initializers -Wno-type-limits -Wno-array-bounds -Wno-unknown-pragmas -Wno-unused-parameter -Wno-unused-function -Wno-unused-result -Wno-strict-overflow -Wno-strict-aliasing -Wno-stringop-overflow -Wno-psabi -Wno-error=pedantic -Wno-error=old-style-cast -Wno-invalid-partial-specialization -Wno-unused-private-field -Wno-aligned-allocation-unavailable -Wno-missing-braces -fdiagnostics-color=always -faligned-new -Wno-unused-but-set-variable -Wno-maybe-uninitialized -fno-math-errno -fno-trapping-math -Werror=format -Werror=cast-function-type -Wno-stringop-overflow, LAPACK_INFO=mkl, PERF_WITH_AVX=1, PERF_WITH_AVX2=1, PERF_WITH_AVX512=1, TORCH_DISABLE_GPU_ASSERTS=ON, TORCH_VERSION=2.1.2, USE_CUDA=ON, USE_CUDNN=ON, USE_EXCEPTION_PTR=1, USE_GFLAGS=OFF, USE_GLOG=OFF, USE_MKL=ON, USE_MKLDNN=ON, USE_MPI=OFF, USE_NCCL=ON, USE_NNPACK=ON, USE_OPENMP=ON, USE_ROCM=OFF, + + TorchVision: 0.16.2 + OpenCV: 4.8.1 + MMEngine: 0.10.2 + +Runtime environment: + cudnn_benchmark: True + mp_cfg: {'mp_start_method': 'forkserver'} + dist_cfg: {'backend': 'nccl'} + seed: 621 + Distributed launcher: pytorch + Distributed training: True + GPU number: 4 +------------------------------------------------------------ + +2024/03/15 19:30:41 - patchstitcher - INFO - Config: +collect_input_args = [ + 'image_lr', + 'crops_image_hr', + 'depth_gt', + 'crop_depths', + 'bboxs', + 'image_hr', +] +convert_syncbn = True +debug = False +env_cfg = dict( + cudnn_benchmark=True, + dist_cfg=dict(backend='nccl'), + mp_cfg=dict(mp_start_method='forkserver')) +find_unused_parameters = True +general_dataloader = dict( + batch_size=1, + dataset=dict( + dataset_name='', gt_dir=None, rgb_image_dir='', type='ImageDataset'), + num_workers=2) +launcher = 'pytorch' +log_name = 'patchfusion' +max_depth = 80 +min_depth = 0.001 +model = dict( + coarse_branch=dict( + attractor_alpha=1000, + attractor_gamma=2, + attractor_kind='mean', + attractor_type='inv', + aug=True, + bin_centers_type='softplus', + bin_embedding_dim=128, + clip_grad=0.1, + dataset='nyu', + depth_anything=True, + distributed=True, + do_resize=False, + force_keep_ar=True, + freeze_midas_bn=True, + gpu='NULL', + img_size=[ + 392, + 518, + ], + inverse_midas=False, + log_images_every=0.1, + max_depth=80, + max_temp=50.0, + max_translation=100, + memory_efficient=True, + midas_model_type='vitb', + min_depth=0.001, + min_temp=0.0212, + model='zoedepth', + n_attractors=[ + 16, + 8, + 4, + 1, + ], + n_bins=64, + name='ZoeDepth', + notes='', + output_distribution='logbinomial', + prefetch=False, + pretrained_resource='local::./work_dir/DepthAnything_vitb.pt', + print_losses=False, + project='ZoeDepth', + random_crop=False, + random_translate=False, + root='.', + save_dir='', + shared_dict='NULL', + tags='', + train_midas=True, + translate_prob=0.2, + type='DA-ZoeDepth', + uid='NULL', + use_amp=False, + use_pretrained_midas=True, + use_shared_dict=False, + validate_every=0.25, + version_name='v1', + workers=16), + fine_branch=dict( + attractor_alpha=1000, + attractor_gamma=2, + attractor_kind='mean', + attractor_type='inv', + aug=True, + bin_centers_type='softplus', + bin_embedding_dim=128, + clip_grad=0.1, + dataset='nyu', + depth_anything=True, + distributed=True, + do_resize=False, + force_keep_ar=True, + freeze_midas_bn=True, + gpu='NULL', + img_size=[ + 392, + 518, + ], + inverse_midas=False, + log_images_every=0.1, + max_depth=80, + max_temp=50.0, + max_translation=100, + memory_efficient=True, + midas_model_type='vitb', + min_depth=0.001, + min_temp=0.0212, + model='zoedepth', + n_attractors=[ + 16, + 8, + 4, + 1, + ], + n_bins=64, + name='ZoeDepth', + notes='', + output_distribution='logbinomial', + prefetch=False, + pretrained_resource='local::./work_dir/DepthAnything_vitb.pt', + print_losses=False, + project='ZoeDepth', + random_crop=False, + random_translate=False, + root='.', + save_dir='', + shared_dict='NULL', + tags='', + train_midas=True, + translate_prob=0.2, + type='DA-ZoeDepth', + uid='NULL', + use_amp=False, + use_pretrained_midas=True, + use_shared_dict=False, + validate_every=0.25, + version_name='v1', + workers=16), + guided_fusion=dict( + g2l=True, + in_channels=[ + 32, + 128, + 128, + 128, + 128, + 128, + ], + n_channels=5, + num_patches=[ + 203056, + 66304, + 16576, + 4144, + 1036, + 266, + ], + patch_process_shape=( + 392, + 518, + ), + type='GuidedFusionPatchFusion'), + max_depth=80, + min_depth=0.001, + patch_process_shape=( + 392, + 518, + ), + pretrain_model=[ + './work_dir/depthanything_vitb_u4k/coarse_pretrain/checkpoint_24.pth', + './work_dir/depthanything_vitb_u4k/fine_pretrain/checkpoint_24.pth', + ], + sigloss=dict(type='SILogLoss'), + type='PatchFusion') +optim_wrapper = dict( + clip_grad=dict(max_norm=0.1, norm_type=2, type='norm'), + optimizer=dict(lr=0.0001, type='AdamW', weight_decay=0.001), + paramwise_cfg=dict(bypass_duplicate=True, custom_keys=dict())) +param_scheduler = dict( + base_momentum=0.85, + cycle_momentum=True, + div_factor=10, + final_div_factor=10000, + max_momentum=0.95, + pct_start=0.25, + three_phase=False) +project = 'patchfusion' +tags = [ + 'patchfusion', + 'da', + 'vitb', +] +test_in_dataloader = dict( + batch_size=1, + dataset=dict( + data_root='./data/u4k', + max_depth=80, + min_depth=0.001, + mode='infer', + split='./data/u4k/splits/test.txt', + transform_cfg=dict(network_process_size=[ + 384, + 512, + ]), + type='UnrealStereo4kDataset'), + num_workers=2) +test_out_dataloader = dict( + batch_size=1, + dataset=dict( + data_root='./data/u4k', + max_depth=80, + min_depth=0.001, + mode='infer', + split='./data/u4k/splits/test_out.txt', + transform_cfg=dict(network_process_size=[ + 384, + 512, + ]), + type='UnrealStereo4kDataset'), + num_workers=2) +train_cfg = dict( + eval_start=0, + log_interval=100, + max_epochs=16, + save_checkpoint_interval=16, + train_log_img_interval=500, + val_interval=2, + val_log_img_interval=50, + val_type='epoch_base') +train_dataloader = dict( + batch_size=4, + dataset=dict( + data_root='./data/u4k', + max_depth=80, + min_depth=0.001, + mode='train', + resize_mode='depth-anything', + split='./data/u4k/splits/train.txt', + transform_cfg=dict( + degree=1.0, network_process_size=[ + 392, + 518, + ], random_crop=True), + type='UnrealStereo4kDataset'), + num_workers=4) +val_dataloader = dict( + batch_size=1, + dataset=dict( + data_root='./data/u4k', + max_depth=80, + min_depth=0.001, + mode='infer', + resize_mode='depth-anything', + split='./data/u4k/splits/val.txt', + transform_cfg=dict(degree=1.0, network_process_size=[ + 392, + 518, + ]), + type='UnrealStereo4kDataset'), + num_workers=2) +work_dir = './work_dir/depthanything_vitb_u4k/patchfusion' +zoe_depth_config = dict( + attractor_alpha=1000, + attractor_gamma=2, + attractor_kind='mean', + attractor_type='inv', + aug=True, + bin_centers_type='softplus', + bin_embedding_dim=128, + clip_grad=0.1, + dataset='nyu', + depth_anything=True, + distributed=True, + do_resize=False, + force_keep_ar=True, + freeze_midas_bn=True, + gpu='NULL', + img_size=[ + 392, + 518, + ], + inverse_midas=False, + log_images_every=0.1, + max_depth=80, + max_temp=50.0, + max_translation=100, + memory_efficient=True, + midas_model_type='vitb', + min_depth=0.001, + min_temp=0.0212, + model='zoedepth', + n_attractors=[ + 16, + 8, + 4, + 1, + ], + n_bins=64, + name='ZoeDepth', + notes='', + output_distribution='logbinomial', + prefetch=False, + pretrained_resource='local::./work_dir/DepthAnything_vitb.pt', + print_losses=False, + project='ZoeDepth', + random_crop=False, + random_translate=False, + root='.', + save_dir='', + shared_dict='NULL', + tags='', + train_midas=True, + translate_prob=0.2, + type='DA-ZoeDepth', + uid='NULL', + use_amp=False, + use_pretrained_midas=True, + use_shared_dict=False, + validate_every=0.25, + version_name='v1', + workers=16) + +2024/03/15 19:30:43 - patchstitcher - INFO - Loading deepnet from local::./work_dir/DepthAnything_vitb.pt +2024/03/15 19:30:43 - patchstitcher - INFO - Current zoedepth.core.prep.resizer is +2024/03/15 19:30:43 - patchstitcher - INFO - Loading coarse_branch from ./work_dir/depthanything_vitb_u4k/coarse_pretrain/checkpoint_24.pth +2024/03/15 19:30:43 - patchstitcher - INFO - +2024/03/15 19:30:44 - patchstitcher - INFO - Loading deepnet from local::./work_dir/DepthAnything_vitb.pt +2024/03/15 19:30:44 - patchstitcher - INFO - Current zoedepth.core.prep.resizer is +2024/03/15 19:30:44 - patchstitcher - INFO - Loading fine_branch from ./work_dir/depthanything_vitb_u4k/fine_pretrain/checkpoint_24.pth +2024/03/15 19:30:44 - patchstitcher - INFO - +2024/03/15 19:30:45 - patchstitcher - INFO - DistributedDataParallel( + (module): PatchFusion( + (coarse_branch): ZoeDepth( + (core): DepthAnythingCore( + (core): DPT_DINOv2( + (pretrained): DinoVisionTransformer( + (patch_embed): PatchEmbed( + (proj): Conv2d(3, 768, kernel_size=(14, 14), stride=(14, 14)) + (norm): Identity() + ) + (blocks): ModuleList( + (0-11): 12 x NestedTensorBlock( + (norm1): LayerNorm((768,), eps=1e-06, elementwise_affine=True) + (attn): MemEffAttention( + (qkv): Linear(in_features=768, out_features=2304, bias=True) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=768, out_features=768, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (ls1): LayerScale() + (drop_path1): Identity() + (norm2): LayerNorm((768,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=768, out_features=3072, bias=True) + (act): GELU(approximate='none') + (fc2): Linear(in_features=3072, out_features=768, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + (ls2): LayerScale() + (drop_path2): Identity() + ) + ) + (norm): LayerNorm((768,), eps=1e-06, elementwise_affine=True) + (head): Identity() + ) + (depth_head): DPTHead( + (projects): ModuleList( + (0): Conv2d(768, 96, kernel_size=(1, 1), stride=(1, 1)) + (1): Conv2d(768, 192, kernel_size=(1, 1), stride=(1, 1)) + (2): Conv2d(768, 384, kernel_size=(1, 1), stride=(1, 1)) + (3): Conv2d(768, 768, kernel_size=(1, 1), stride=(1, 1)) + ) + (resize_layers): ModuleList( + (0): ConvTranspose2d(96, 96, kernel_size=(4, 4), stride=(4, 4)) + (1): ConvTranspose2d(192, 192, kernel_size=(2, 2), stride=(2, 2)) + (2): Identity() + (3): Conv2d(768, 768, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1)) + ) + (scratch): Module( + (layer1_rn): Conv2d(96, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) + (layer2_rn): Conv2d(192, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) + (layer3_rn): Conv2d(384, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) + (layer4_rn): Conv2d(768, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) + (refinenet1): FeatureFusionBlock( + (out_conv): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1)) + (resConfUnit1): ResidualConvUnit( + (conv1): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (activation): ReLU() + (skip_add): FloatFunctional( + (activation_post_process): Identity() + ) + ) + (resConfUnit2): ResidualConvUnit( + (conv1): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (activation): ReLU() + (skip_add): FloatFunctional( + (activation_post_process): Identity() + ) + ) + (skip_add): FloatFunctional( + (activation_post_process): Identity() + ) + ) + (refinenet2): FeatureFusionBlock( + (out_conv): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1)) + (resConfUnit1): ResidualConvUnit( + (conv1): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (activation): ReLU() + (skip_add): FloatFunctional( + (activation_post_process): Identity() + ) + ) + (resConfUnit2): ResidualConvUnit( + (conv1): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (activation): ReLU() + (skip_add): FloatFunctional( + (activation_post_process): Identity() + ) + ) + (skip_add): FloatFunctional( + (activation_post_process): Identity() + ) + ) + (refinenet3): FeatureFusionBlock( + (out_conv): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1)) + (resConfUnit1): ResidualConvUnit( + (conv1): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (activation): ReLU() + (skip_add): FloatFunctional( + (activation_post_process): Identity() + ) + ) + (resConfUnit2): ResidualConvUnit( + (conv1): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (activation): ReLU() + (skip_add): FloatFunctional( + (activation_post_process): Identity() + ) + ) + (skip_add): FloatFunctional( + (activation_post_process): Identity() + ) + ) + (refinenet4): FeatureFusionBlock( + (out_conv): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1)) + (resConfUnit1): ResidualConvUnit( + (conv1): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (activation): ReLU() + (skip_add): FloatFunctional( + (activation_post_process): Identity() + ) + ) + (resConfUnit2): ResidualConvUnit( + (conv1): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (activation): ReLU() + (skip_add): FloatFunctional( + (activation_post_process): Identity() + ) + ) + (skip_add): FloatFunctional( + (activation_post_process): Identity() + ) + ) + (output_conv1): Conv2d(128, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (output_conv2): Sequential( + (0): Conv2d(64, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (1): ReLU(inplace=True) + (2): Conv2d(32, 1, kernel_size=(1, 1), stride=(1, 1)) + (3): ReLU(inplace=True) + (4): Identity() + ) + ) + ) + ) + ) + (conv2): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1)) + (seed_bin_regressor): SeedBinRegressorUnnormed( + (_net): Sequential( + (0): Conv2d(128, 256, kernel_size=(1, 1), stride=(1, 1)) + (1): ReLU(inplace=True) + (2): Conv2d(256, 64, kernel_size=(1, 1), stride=(1, 1)) + (3): Softplus(beta=1, threshold=20) + ) + ) + (seed_projector): Projector( + (_net): Sequential( + (0): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1)) + (1): ReLU(inplace=True) + (2): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1)) + ) + ) + (projectors): ModuleList( + (0-3): 4 x Projector( + (_net): Sequential( + (0): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1)) + (1): ReLU(inplace=True) + (2): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1)) + ) + ) + ) + (attractors): ModuleList( + (0): AttractorLayerUnnormed( + (_net): Sequential( + (0): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1)) + (1): ReLU(inplace=True) + (2): Conv2d(128, 16, kernel_size=(1, 1), stride=(1, 1)) + (3): Softplus(beta=1, threshold=20) + ) + ) + (1): AttractorLayerUnnormed( + (_net): Sequential( + (0): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1)) + (1): ReLU(inplace=True) + (2): Conv2d(128, 8, kernel_size=(1, 1), stride=(1, 1)) + (3): Softplus(beta=1, threshold=20) + ) + ) + (2): AttractorLayerUnnormed( + (_net): Sequential( + (0): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1)) + (1): ReLU(inplace=True) + (2): Conv2d(128, 4, kernel_size=(1, 1), stride=(1, 1)) + (3): Softplus(beta=1, threshold=20) + ) + ) + (3): AttractorLayerUnnormed( + (_net): Sequential( + (0): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1)) + (1): ReLU(inplace=True) + (2): Conv2d(128, 1, kernel_size=(1, 1), stride=(1, 1)) + (3): Softplus(beta=1, threshold=20) + ) + ) + ) + (conditional_log_binomial): ConditionalLogBinomial( + (log_binomial_transform): LogBinomial() + (mlp): Sequential( + (0): Conv2d(161, 80, kernel_size=(1, 1), stride=(1, 1)) + (1): GELU(approximate='none') + (2): Conv2d(80, 4, kernel_size=(1, 1), stride=(1, 1)) + (3): Softplus(beta=1, threshold=20) + ) + ) + ) + (fine_branch): ZoeDepth( + (core): DepthAnythingCore( + (core): DPT_DINOv2( + (pretrained): DinoVisionTransformer( + (patch_embed): PatchEmbed( + (proj): Conv2d(3, 768, kernel_size=(14, 14), stride=(14, 14)) + (norm): Identity() + ) + (blocks): ModuleList( + (0-11): 12 x NestedTensorBlock( + (norm1): LayerNorm((768,), eps=1e-06, elementwise_affine=True) + (attn): MemEffAttention( + (qkv): Linear(in_features=768, out_features=2304, bias=True) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=768, out_features=768, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (ls1): LayerScale() + (drop_path1): Identity() + (norm2): LayerNorm((768,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=768, out_features=3072, bias=True) + (act): GELU(approximate='none') + (fc2): Linear(in_features=3072, out_features=768, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + (ls2): LayerScale() + (drop_path2): Identity() + ) + ) + (norm): LayerNorm((768,), eps=1e-06, elementwise_affine=True) + (head): Identity() + ) + (depth_head): DPTHead( + (projects): ModuleList( + (0): Conv2d(768, 96, kernel_size=(1, 1), stride=(1, 1)) + (1): Conv2d(768, 192, kernel_size=(1, 1), stride=(1, 1)) + (2): Conv2d(768, 384, kernel_size=(1, 1), stride=(1, 1)) + (3): Conv2d(768, 768, kernel_size=(1, 1), stride=(1, 1)) + ) + (resize_layers): ModuleList( + (0): ConvTranspose2d(96, 96, kernel_size=(4, 4), stride=(4, 4)) + (1): ConvTranspose2d(192, 192, kernel_size=(2, 2), stride=(2, 2)) + (2): Identity() + (3): Conv2d(768, 768, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1)) + ) + (scratch): Module( + (layer1_rn): Conv2d(96, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) + (layer2_rn): Conv2d(192, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) + (layer3_rn): Conv2d(384, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) + (layer4_rn): Conv2d(768, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) + (refinenet1): FeatureFusionBlock( + (out_conv): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1)) + (resConfUnit1): ResidualConvUnit( + (conv1): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (activation): ReLU() + (skip_add): FloatFunctional( + (activation_post_process): Identity() + ) + ) + (resConfUnit2): ResidualConvUnit( + (conv1): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (activation): ReLU() + (skip_add): FloatFunctional( + (activation_post_process): Identity() + ) + ) + (skip_add): FloatFunctional( + (activation_post_process): Identity() + ) + ) + (refinenet2): FeatureFusionBlock( + (out_conv): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1)) + (resConfUnit1): ResidualConvUnit( + (conv1): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (activation): ReLU() + (skip_add): FloatFunctional( + (activation_post_process): Identity() + ) + ) + (resConfUnit2): ResidualConvUnit( + (conv1): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (activation): ReLU() + (skip_add): FloatFunctional( + (activation_post_process): Identity() + ) + ) + (skip_add): FloatFunctional( + (activation_post_process): Identity() + ) + ) + (refinenet3): FeatureFusionBlock( + (out_conv): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1)) + (resConfUnit1): ResidualConvUnit( + (conv1): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (activation): ReLU() + (skip_add): FloatFunctional( + (activation_post_process): Identity() + ) + ) + (resConfUnit2): ResidualConvUnit( + (conv1): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (activation): ReLU() + (skip_add): FloatFunctional( + (activation_post_process): Identity() + ) + ) + (skip_add): FloatFunctional( + (activation_post_process): Identity() + ) + ) + (refinenet4): FeatureFusionBlock( + (out_conv): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1)) + (resConfUnit1): ResidualConvUnit( + (conv1): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (activation): ReLU() + (skip_add): FloatFunctional( + (activation_post_process): Identity() + ) + ) + (resConfUnit2): ResidualConvUnit( + (conv1): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (activation): ReLU() + (skip_add): FloatFunctional( + (activation_post_process): Identity() + ) + ) + (skip_add): FloatFunctional( + (activation_post_process): Identity() + ) + ) + (output_conv1): Conv2d(128, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (output_conv2): Sequential( + (0): Conv2d(64, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (1): ReLU(inplace=True) + (2): Conv2d(32, 1, kernel_size=(1, 1), stride=(1, 1)) + (3): ReLU(inplace=True) + (4): Identity() + ) + ) + ) + ) + ) + (conv2): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1)) + (seed_bin_regressor): SeedBinRegressorUnnormed( + (_net): Sequential( + (0): Conv2d(128, 256, kernel_size=(1, 1), stride=(1, 1)) + (1): ReLU(inplace=True) + (2): Conv2d(256, 64, kernel_size=(1, 1), stride=(1, 1)) + (3): Softplus(beta=1, threshold=20) + ) + ) + (seed_projector): Projector( + (_net): Sequential( + (0): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1)) + (1): ReLU(inplace=True) + (2): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1)) + ) + ) + (projectors): ModuleList( + (0-3): 4 x Projector( + (_net): Sequential( + (0): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1)) + (1): ReLU(inplace=True) + (2): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1)) + ) + ) + ) + (attractors): ModuleList( + (0): AttractorLayerUnnormed( + (_net): Sequential( + (0): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1)) + (1): ReLU(inplace=True) + (2): Conv2d(128, 16, kernel_size=(1, 1), stride=(1, 1)) + (3): Softplus(beta=1, threshold=20) + ) + ) + (1): AttractorLayerUnnormed( + (_net): Sequential( + (0): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1)) + (1): ReLU(inplace=True) + (2): Conv2d(128, 8, kernel_size=(1, 1), stride=(1, 1)) + (3): Softplus(beta=1, threshold=20) + ) + ) + (2): AttractorLayerUnnormed( + (_net): Sequential( + (0): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1)) + (1): ReLU(inplace=True) + (2): Conv2d(128, 4, kernel_size=(1, 1), stride=(1, 1)) + (3): Softplus(beta=1, threshold=20) + ) + ) + (3): AttractorLayerUnnormed( + (_net): Sequential( + (0): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1)) + (1): ReLU(inplace=True) + (2): Conv2d(128, 1, kernel_size=(1, 1), stride=(1, 1)) + (3): Softplus(beta=1, threshold=20) + ) + ) + ) + (conditional_log_binomial): ConditionalLogBinomial( + (log_binomial_transform): LogBinomial() + (mlp): Sequential( + (0): Conv2d(161, 80, kernel_size=(1, 1), stride=(1, 1)) + (1): GELU(approximate='none') + (2): Conv2d(80, 4, kernel_size=(1, 1), stride=(1, 1)) + (3): Softplus(beta=1, threshold=20) + ) + ) + ) + (sigloss): SILogLoss() + (fusion_conv_list): ModuleList( + (0-4): 5 x Conv2d(256, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (5): Conv2d(64, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + ) + (guided_fusion): GuidedFusionPatchFusion( + (inc): DoubleConv( + (double_conv): Sequential( + (0): Conv2d(5, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) + (1): SyncBatchNorm(32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (2): ReLU(inplace=True) + (3): Conv2d(32, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) + (4): SyncBatchNorm(32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (5): ReLU(inplace=True) + ) + ) + (down_conv_list): ModuleList( + (0): Down( + (maxpool_conv): Sequential( + (0): MaxPool2d(kernel_size=2, stride=2, padding=0, dilation=1, ceil_mode=False) + (1): DoubleConv( + (double_conv): Sequential( + (0): Conv2d(32, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) + (1): SyncBatchNorm(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (2): ReLU(inplace=True) + (3): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) + (4): SyncBatchNorm(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (5): ReLU(inplace=True) + ) + ) + ) + ) + (1-4): 4 x Down( + (maxpool_conv): Sequential( + (0): MaxPool2d(kernel_size=2, stride=2, padding=0, dilation=1, ceil_mode=False) + (1): DoubleConv( + (double_conv): Sequential( + (0): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) + (1): SyncBatchNorm(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (2): ReLU(inplace=True) + (3): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) + (4): SyncBatchNorm(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (5): ReLU(inplace=True) + ) + ) + ) + ) + ) + (up_conv_list): ModuleList( + (0-3): 4 x Upv1( + (conv): DoubleConvWOBN( + (double_conv): Sequential( + (0): Conv2d(384, 384, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (1): ReLU(inplace=True) + (2): Conv2d(384, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (3): ReLU(inplace=True) + ) + ) + ) + (4): Upv1( + (conv): DoubleConvWOBN( + (double_conv): Sequential( + (0): Conv2d(288, 288, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (1): ReLU(inplace=True) + (2): Conv2d(288, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (3): ReLU(inplace=True) + ) + ) + ) + ) + (g2l_att): ModuleList() + (g2l_list): ModuleList( + (0-1): 2 x G2LFusion( + (g2l_layer): G2LBasicLayer( + (blocks): ModuleList( + (0-3): 4 x SwinTransformerBlock( + (norm1): LayerNorm((128,), eps=1e-05, elementwise_affine=True) + (attn): WindowAttention( + dim=128, window_size=(12, 12), num_heads=32 + (qkv): Linear(in_features=128, out_features=384, bias=True) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=128, out_features=128, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + (softmax): Softmax(dim=-1) + ) + (drop_path): Identity() + (norm2): LayerNorm((128,), eps=1e-05, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=128, out_features=512, bias=True) + (act): GELU(approximate='none') + (fc2): Linear(in_features=512, out_features=128, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + ) + ) + (g2l_layer_norm): LayerNorm((128,), eps=1e-05, elementwise_affine=True) + (embed_proj): Conv2d(1, 128, kernel_size=(1, 1), stride=(1, 1)) + ) + (2-3): 2 x G2LFusion( + (g2l_layer): G2LBasicLayer( + (blocks): ModuleList( + (0-2): 3 x SwinTransformerBlock( + (norm1): LayerNorm((128,), eps=1e-05, elementwise_affine=True) + (attn): WindowAttention( + dim=128, window_size=(12, 12), num_heads=16 + (qkv): Linear(in_features=128, out_features=384, bias=True) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=128, out_features=128, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + (softmax): Softmax(dim=-1) + ) + (drop_path): Identity() + (norm2): LayerNorm((128,), eps=1e-05, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=128, out_features=512, bias=True) + (act): GELU(approximate='none') + (fc2): Linear(in_features=512, out_features=128, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + ) + ) + (g2l_layer_norm): LayerNorm((128,), eps=1e-05, elementwise_affine=True) + (embed_proj): Conv2d(1, 128, kernel_size=(1, 1), stride=(1, 1)) + ) + (4): G2LFusion( + (g2l_layer): G2LBasicLayer( + (blocks): ModuleList( + (0-1): 2 x SwinTransformerBlock( + (norm1): LayerNorm((128,), eps=1e-05, elementwise_affine=True) + (attn): WindowAttention( + dim=128, window_size=(12, 12), num_heads=8 + (qkv): Linear(in_features=128, out_features=384, bias=True) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=128, out_features=128, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + (softmax): Softmax(dim=-1) + ) + (drop_path): Identity() + (norm2): LayerNorm((128,), eps=1e-05, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=128, out_features=512, bias=True) + (act): GELU(approximate='none') + (fc2): Linear(in_features=512, out_features=128, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + ) + ) + (g2l_layer_norm): LayerNorm((128,), eps=1e-05, elementwise_affine=True) + (embed_proj): Conv2d(1, 128, kernel_size=(1, 1), stride=(1, 1)) + ) + (5): G2LFusion( + (g2l_layer): G2LBasicLayer( + (blocks): ModuleList( + (0-1): 2 x SwinTransformerBlock( + (norm1): LayerNorm((32,), eps=1e-05, elementwise_affine=True) + (attn): WindowAttention( + dim=32, window_size=(12, 12), num_heads=8 + (qkv): Linear(in_features=32, out_features=96, bias=True) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=32, out_features=32, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + (softmax): Softmax(dim=-1) + ) + (drop_path): Identity() + (norm2): LayerNorm((32,), eps=1e-05, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=32, out_features=128, bias=True) + (act): GELU(approximate='none') + (fc2): Linear(in_features=128, out_features=32, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + ) + ) + ) + (g2l_layer_norm): LayerNorm((32,), eps=1e-05, elementwise_affine=True) + (embed_proj): Conv2d(1, 32, kernel_size=(1, 1), stride=(1, 1)) + ) + ) + (convs): ModuleList( + (0-4): 5 x DoubleConvWOBN( + (double_conv): Sequential( + (0): Conv2d(256, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (1): ReLU(inplace=True) + (2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (3): ReLU(inplace=True) + ) + ) + (5): DoubleConvWOBN( + (double_conv): Sequential( + (0): Conv2d(64, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (1): ReLU(inplace=True) + (2): Conv2d(32, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (3): ReLU(inplace=True) + ) + ) + ) + ) + (seed_bin_regressor): SeedBinRegressorUnnormed( + (_net): Sequential( + (0): Conv2d(128, 256, kernel_size=(1, 1), stride=(1, 1)) + (1): ReLU(inplace=True) + (2): Conv2d(256, 64, kernel_size=(1, 1), stride=(1, 1)) + (3): Softplus(beta=1, threshold=20) + ) + ) + (seed_projector): Projector( + (_net): Sequential( + (0): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1)) + (1): ReLU(inplace=True) + (2): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1)) + ) + ) + (projectors): ModuleList( + (0-3): 4 x Projector( + (_net): Sequential( + (0): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1)) + (1): ReLU(inplace=True) + (2): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1)) + ) + ) + ) + (attractors): ModuleList( + (0): AttractorLayerUnnormed( + (_net): Sequential( + (0): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1)) + (1): ReLU(inplace=True) + (2): Conv2d(128, 16, kernel_size=(1, 1), stride=(1, 1)) + (3): Softplus(beta=1, threshold=20) + ) + ) + (1): AttractorLayerUnnormed( + (_net): Sequential( + (0): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1)) + (1): ReLU(inplace=True) + (2): Conv2d(128, 8, kernel_size=(1, 1), stride=(1, 1)) + (3): Softplus(beta=1, threshold=20) + ) + ) + (2): AttractorLayerUnnormed( + (_net): Sequential( + (0): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1)) + (1): ReLU(inplace=True) + (2): Conv2d(128, 4, kernel_size=(1, 1), stride=(1, 1)) + (3): Softplus(beta=1, threshold=20) + ) + ) + (3): AttractorLayerUnnormed( + (_net): Sequential( + (0): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1)) + (1): ReLU(inplace=True) + (2): Conv2d(128, 1, kernel_size=(1, 1), stride=(1, 1)) + (3): Softplus(beta=1, threshold=20) + ) + ) + ) + (conditional_log_binomial): ConditionalLogBinomial( + (log_binomial_transform): LogBinomial() + (mlp): Sequential( + (0): Conv2d(161, 80, kernel_size=(1, 1), stride=(1, 1)) + (1): GELU(approximate='none') + (2): Conv2d(80, 4, kernel_size=(1, 1), stride=(1, 1)) + (3): Softplus(beta=1, threshold=20) + ) + ) + ) +) +2024/03/15 19:30:51 - patchstitcher - INFO - successfully init trainer +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.fusion_conv_list.0.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.fusion_conv_list.0.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.fusion_conv_list.1.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.fusion_conv_list.1.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.fusion_conv_list.2.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.fusion_conv_list.2.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.fusion_conv_list.3.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.fusion_conv_list.3.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.fusion_conv_list.4.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.fusion_conv_list.4.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.fusion_conv_list.5.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.fusion_conv_list.5.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.inc.double_conv.0.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.inc.double_conv.1.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.inc.double_conv.1.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.inc.double_conv.3.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.inc.double_conv.4.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.inc.double_conv.4.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.down_conv_list.0.maxpool_conv.1.double_conv.0.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.down_conv_list.0.maxpool_conv.1.double_conv.1.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.down_conv_list.0.maxpool_conv.1.double_conv.1.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.down_conv_list.0.maxpool_conv.1.double_conv.3.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.down_conv_list.0.maxpool_conv.1.double_conv.4.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.down_conv_list.0.maxpool_conv.1.double_conv.4.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.down_conv_list.1.maxpool_conv.1.double_conv.0.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.down_conv_list.1.maxpool_conv.1.double_conv.1.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.down_conv_list.1.maxpool_conv.1.double_conv.1.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.down_conv_list.1.maxpool_conv.1.double_conv.3.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.down_conv_list.1.maxpool_conv.1.double_conv.4.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.down_conv_list.1.maxpool_conv.1.double_conv.4.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.down_conv_list.2.maxpool_conv.1.double_conv.0.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.down_conv_list.2.maxpool_conv.1.double_conv.1.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.down_conv_list.2.maxpool_conv.1.double_conv.1.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.down_conv_list.2.maxpool_conv.1.double_conv.3.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.down_conv_list.2.maxpool_conv.1.double_conv.4.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.down_conv_list.2.maxpool_conv.1.double_conv.4.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.down_conv_list.3.maxpool_conv.1.double_conv.0.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.down_conv_list.3.maxpool_conv.1.double_conv.1.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.down_conv_list.3.maxpool_conv.1.double_conv.1.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.down_conv_list.3.maxpool_conv.1.double_conv.3.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.down_conv_list.3.maxpool_conv.1.double_conv.4.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.down_conv_list.3.maxpool_conv.1.double_conv.4.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.down_conv_list.4.maxpool_conv.1.double_conv.0.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.down_conv_list.4.maxpool_conv.1.double_conv.1.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.down_conv_list.4.maxpool_conv.1.double_conv.1.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.down_conv_list.4.maxpool_conv.1.double_conv.3.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.down_conv_list.4.maxpool_conv.1.double_conv.4.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.down_conv_list.4.maxpool_conv.1.double_conv.4.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.up_conv_list.0.conv.double_conv.0.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.up_conv_list.0.conv.double_conv.0.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.up_conv_list.0.conv.double_conv.2.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.up_conv_list.0.conv.double_conv.2.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.up_conv_list.1.conv.double_conv.0.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.up_conv_list.1.conv.double_conv.0.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.up_conv_list.1.conv.double_conv.2.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.up_conv_list.1.conv.double_conv.2.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.up_conv_list.2.conv.double_conv.0.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.up_conv_list.2.conv.double_conv.0.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.up_conv_list.2.conv.double_conv.2.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.up_conv_list.2.conv.double_conv.2.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.up_conv_list.3.conv.double_conv.0.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.up_conv_list.3.conv.double_conv.0.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.up_conv_list.3.conv.double_conv.2.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.up_conv_list.3.conv.double_conv.2.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.up_conv_list.4.conv.double_conv.0.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.up_conv_list.4.conv.double_conv.0.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.up_conv_list.4.conv.double_conv.2.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.up_conv_list.4.conv.double_conv.2.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.absolute_pos_embed +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.0.norm1.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.0.norm1.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.0.attn.relative_position_bias_table +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.0.attn.qkv.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.0.attn.qkv.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.0.attn.proj.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.0.attn.proj.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.0.norm2.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.0.norm2.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.0.mlp.fc1.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.0.mlp.fc1.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.0.mlp.fc2.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.0.mlp.fc2.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.1.norm1.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.1.norm1.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.1.attn.relative_position_bias_table +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.1.attn.qkv.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.1.attn.qkv.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.1.attn.proj.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.1.attn.proj.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.1.norm2.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.1.norm2.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.1.mlp.fc1.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.1.mlp.fc1.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.1.mlp.fc2.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.1.mlp.fc2.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.2.norm1.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.2.norm1.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.2.attn.relative_position_bias_table +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.2.attn.qkv.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.2.attn.qkv.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.2.attn.proj.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.2.attn.proj.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.2.norm2.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.2.norm2.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.2.mlp.fc1.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.2.mlp.fc1.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.2.mlp.fc2.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.2.mlp.fc2.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.3.norm1.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.3.norm1.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.3.attn.relative_position_bias_table +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.3.attn.qkv.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.3.attn.qkv.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.3.attn.proj.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.3.attn.proj.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.3.norm2.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.3.norm2.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.3.mlp.fc1.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.3.mlp.fc1.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.3.mlp.fc2.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer.blocks.3.mlp.fc2.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer_norm.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.g2l_layer_norm.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.embed_proj.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.0.embed_proj.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.absolute_pos_embed +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.0.norm1.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.0.norm1.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.0.attn.relative_position_bias_table +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.0.attn.qkv.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.0.attn.qkv.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.0.attn.proj.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.0.attn.proj.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.0.norm2.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.0.norm2.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.0.mlp.fc1.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.0.mlp.fc1.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.0.mlp.fc2.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.0.mlp.fc2.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.1.norm1.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.1.norm1.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.1.attn.relative_position_bias_table +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.1.attn.qkv.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.1.attn.qkv.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.1.attn.proj.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.1.attn.proj.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.1.norm2.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.1.norm2.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.1.mlp.fc1.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.1.mlp.fc1.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.1.mlp.fc2.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.1.mlp.fc2.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.2.norm1.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.2.norm1.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.2.attn.relative_position_bias_table +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.2.attn.qkv.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.2.attn.qkv.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.2.attn.proj.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.2.attn.proj.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.2.norm2.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.2.norm2.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.2.mlp.fc1.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.2.mlp.fc1.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.2.mlp.fc2.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.2.mlp.fc2.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.3.norm1.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.3.norm1.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.3.attn.relative_position_bias_table +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.3.attn.qkv.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.3.attn.qkv.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.3.attn.proj.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.3.attn.proj.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.3.norm2.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.3.norm2.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.3.mlp.fc1.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.3.mlp.fc1.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.3.mlp.fc2.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer.blocks.3.mlp.fc2.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer_norm.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.g2l_layer_norm.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.embed_proj.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.1.embed_proj.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.absolute_pos_embed +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.0.norm1.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.0.norm1.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.0.attn.relative_position_bias_table +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.0.attn.qkv.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.0.attn.qkv.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.0.attn.proj.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.0.attn.proj.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.0.norm2.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.0.norm2.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.0.mlp.fc1.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.0.mlp.fc1.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.0.mlp.fc2.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.0.mlp.fc2.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.1.norm1.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.1.norm1.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.1.attn.relative_position_bias_table +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.1.attn.qkv.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.1.attn.qkv.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.1.attn.proj.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.1.attn.proj.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.1.norm2.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.1.norm2.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.1.mlp.fc1.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.1.mlp.fc1.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.1.mlp.fc2.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.1.mlp.fc2.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.2.norm1.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.2.norm1.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.2.attn.relative_position_bias_table +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.2.attn.qkv.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.2.attn.qkv.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.2.attn.proj.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.2.attn.proj.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.2.norm2.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.2.norm2.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.2.mlp.fc1.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.2.mlp.fc1.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.2.mlp.fc2.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer.blocks.2.mlp.fc2.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer_norm.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.g2l_layer_norm.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.embed_proj.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.2.embed_proj.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.absolute_pos_embed +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.0.norm1.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.0.norm1.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.0.attn.relative_position_bias_table +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.0.attn.qkv.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.0.attn.qkv.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.0.attn.proj.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.0.attn.proj.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.0.norm2.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.0.norm2.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.0.mlp.fc1.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.0.mlp.fc1.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.0.mlp.fc2.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.0.mlp.fc2.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.1.norm1.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.1.norm1.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.1.attn.relative_position_bias_table +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.1.attn.qkv.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.1.attn.qkv.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.1.attn.proj.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.1.attn.proj.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.1.norm2.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.1.norm2.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.1.mlp.fc1.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.1.mlp.fc1.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.1.mlp.fc2.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.1.mlp.fc2.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.2.norm1.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.2.norm1.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.2.attn.relative_position_bias_table +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.2.attn.qkv.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.2.attn.qkv.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.2.attn.proj.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.2.attn.proj.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.2.norm2.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.2.norm2.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.2.mlp.fc1.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.2.mlp.fc1.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.2.mlp.fc2.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer.blocks.2.mlp.fc2.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer_norm.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.g2l_layer_norm.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.embed_proj.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.3.embed_proj.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.4.absolute_pos_embed +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.4.g2l_layer.blocks.0.norm1.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.4.g2l_layer.blocks.0.norm1.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.4.g2l_layer.blocks.0.attn.relative_position_bias_table +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.4.g2l_layer.blocks.0.attn.qkv.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.4.g2l_layer.blocks.0.attn.qkv.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.4.g2l_layer.blocks.0.attn.proj.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.4.g2l_layer.blocks.0.attn.proj.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.4.g2l_layer.blocks.0.norm2.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.4.g2l_layer.blocks.0.norm2.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.4.g2l_layer.blocks.0.mlp.fc1.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.4.g2l_layer.blocks.0.mlp.fc1.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.4.g2l_layer.blocks.0.mlp.fc2.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.4.g2l_layer.blocks.0.mlp.fc2.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.4.g2l_layer.blocks.1.norm1.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.4.g2l_layer.blocks.1.norm1.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.4.g2l_layer.blocks.1.attn.relative_position_bias_table +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.4.g2l_layer.blocks.1.attn.qkv.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.4.g2l_layer.blocks.1.attn.qkv.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.4.g2l_layer.blocks.1.attn.proj.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.4.g2l_layer.blocks.1.attn.proj.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.4.g2l_layer.blocks.1.norm2.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.4.g2l_layer.blocks.1.norm2.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.4.g2l_layer.blocks.1.mlp.fc1.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.4.g2l_layer.blocks.1.mlp.fc1.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.4.g2l_layer.blocks.1.mlp.fc2.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.4.g2l_layer.blocks.1.mlp.fc2.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.4.g2l_layer_norm.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.4.g2l_layer_norm.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.4.embed_proj.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.4.embed_proj.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.5.absolute_pos_embed +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.5.g2l_layer.blocks.0.norm1.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.5.g2l_layer.blocks.0.norm1.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.5.g2l_layer.blocks.0.attn.relative_position_bias_table +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.5.g2l_layer.blocks.0.attn.qkv.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.5.g2l_layer.blocks.0.attn.qkv.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.5.g2l_layer.blocks.0.attn.proj.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.5.g2l_layer.blocks.0.attn.proj.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.5.g2l_layer.blocks.0.norm2.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.5.g2l_layer.blocks.0.norm2.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.5.g2l_layer.blocks.0.mlp.fc1.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.5.g2l_layer.blocks.0.mlp.fc1.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.5.g2l_layer.blocks.0.mlp.fc2.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.5.g2l_layer.blocks.0.mlp.fc2.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.5.g2l_layer.blocks.1.norm1.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.5.g2l_layer.blocks.1.norm1.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.5.g2l_layer.blocks.1.attn.relative_position_bias_table +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.5.g2l_layer.blocks.1.attn.qkv.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.5.g2l_layer.blocks.1.attn.qkv.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.5.g2l_layer.blocks.1.attn.proj.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.5.g2l_layer.blocks.1.attn.proj.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.5.g2l_layer.blocks.1.norm2.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.5.g2l_layer.blocks.1.norm2.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.5.g2l_layer.blocks.1.mlp.fc1.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.5.g2l_layer.blocks.1.mlp.fc1.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.5.g2l_layer.blocks.1.mlp.fc2.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.5.g2l_layer.blocks.1.mlp.fc2.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.5.g2l_layer_norm.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.5.g2l_layer_norm.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.5.embed_proj.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.g2l_list.5.embed_proj.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.convs.0.double_conv.0.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.convs.0.double_conv.0.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.convs.0.double_conv.2.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.convs.0.double_conv.2.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.convs.1.double_conv.0.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.convs.1.double_conv.0.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.convs.1.double_conv.2.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.convs.1.double_conv.2.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.convs.2.double_conv.0.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.convs.2.double_conv.0.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.convs.2.double_conv.2.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.convs.2.double_conv.2.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.convs.3.double_conv.0.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.convs.3.double_conv.0.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.convs.3.double_conv.2.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.convs.3.double_conv.2.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.convs.4.double_conv.0.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.convs.4.double_conv.0.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.convs.4.double_conv.2.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.convs.4.double_conv.2.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.convs.5.double_conv.0.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.convs.5.double_conv.0.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.convs.5.double_conv.2.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.guided_fusion.convs.5.double_conv.2.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.seed_bin_regressor._net.0.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.seed_bin_regressor._net.0.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.seed_bin_regressor._net.2.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.seed_bin_regressor._net.2.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.seed_projector._net.0.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.seed_projector._net.0.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.seed_projector._net.2.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.seed_projector._net.2.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.projectors.0._net.0.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.projectors.0._net.0.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.projectors.0._net.2.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.projectors.0._net.2.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.projectors.1._net.0.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.projectors.1._net.0.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.projectors.1._net.2.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.projectors.1._net.2.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.projectors.2._net.0.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.projectors.2._net.0.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.projectors.2._net.2.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.projectors.2._net.2.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.projectors.3._net.0.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.projectors.3._net.0.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.projectors.3._net.2.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.projectors.3._net.2.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.attractors.0._net.0.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.attractors.0._net.0.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.attractors.0._net.2.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.attractors.0._net.2.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.attractors.1._net.0.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.attractors.1._net.0.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.attractors.1._net.2.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.attractors.1._net.2.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.attractors.2._net.0.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.attractors.2._net.0.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.attractors.2._net.2.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.attractors.2._net.2.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.attractors.3._net.0.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.attractors.3._net.0.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.attractors.3._net.2.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.attractors.3._net.2.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.conditional_log_binomial.mlp.0.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.conditional_log_binomial.mlp.0.bias +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.conditional_log_binomial.mlp.2.weight +2024/03/15 19:30:51 - patchstitcher - INFO - training param: module.conditional_log_binomial.mlp.2.bias +2024/03/15 19:33:45 - patchstitcher - INFO - Epoch: [01/16] - Step: [00100/00475] - Time: [1/1] - Total Loss: 8.196772575378418 - sig_loss: 8.196772575378418 +2024/03/15 19:35:51 - patchstitcher - INFO - Epoch: [01/16] - Step: [00200/00475] - Time: [1/1] - Total Loss: 4.177845478057861 - sig_loss: 4.177845478057861 +2024/03/15 19:38:00 - patchstitcher - INFO - Epoch: [01/16] - Step: [00300/00475] - Time: [1/1] - Total Loss: 4.080796241760254 - sig_loss: 4.080796241760254 +2024/03/15 19:40:09 - patchstitcher - INFO - Epoch: [01/16] - Step: [00400/00475] - Time: [1/1] - Total Loss: 1.0100841522216797 - sig_loss: 1.0100841522216797 +2024/03/15 19:44:11 - patchstitcher - INFO - Epoch: [02/16] - Step: [00100/00475] - Time: [1/1] - Total Loss: 1.1344835758209229 - sig_loss: 1.1344835758209229 +2024/03/15 19:46:18 - patchstitcher - INFO - Epoch: [02/16] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.766528308391571 - sig_loss: 0.766528308391571 +2024/03/15 19:48:26 - patchstitcher - INFO - Epoch: [02/16] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.742828369140625 - sig_loss: 0.742828369140625 +2024/03/15 19:50:31 - patchstitcher - INFO - Epoch: [02/16] - Step: [00400/00475] - Time: [1/1] - Total Loss: 1.0008141994476318 - sig_loss: 1.0008141994476318 +2024/03/15 19:52:41 - patchstitcher - INFO - Evaluation Summary: ++-----------+-----------+----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+ +| a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see | ++-----------+-----------+----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+ +| 0.9690564 | 0.9931927 | 0.997483 | 0.0683934 | 1.4234179 | 0.0297297 | 0.0982972 | 8.7126409 | 0.1563224 | 1.2264686 | ++-----------+-----------+----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+ +2024/03/15 19:54:56 - patchstitcher - INFO - Epoch: [03/16] - Step: [00100/00475] - Time: [1/1] - Total Loss: 1.9230748414993286 - sig_loss: 1.9230748414993286 +2024/03/15 19:57:02 - patchstitcher - INFO - Epoch: [03/16] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.6135638952255249 - sig_loss: 0.6135638952255249 +2024/03/15 19:59:09 - patchstitcher - INFO - Epoch: [03/16] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.49874502420425415 - sig_loss: 0.49874502420425415 +2024/03/15 20:01:17 - patchstitcher - INFO - Epoch: [03/16] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.9610685110092163 - sig_loss: 0.9610685110092163 +2024/03/15 20:05:08 - patchstitcher - INFO - Epoch: [04/16] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.9942363500595093 - sig_loss: 0.9942363500595093 +2024/03/15 20:07:14 - patchstitcher - INFO - Epoch: [04/16] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.8103439807891846 - sig_loss: 0.8103439807891846 +2024/03/15 20:09:22 - patchstitcher - INFO - Epoch: [04/16] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.4848376214504242 - sig_loss: 0.4848376214504242 +2024/03/15 20:11:31 - patchstitcher - INFO - Epoch: [04/16] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.39297956228256226 - sig_loss: 0.39297956228256226 +2024/03/15 20:13:33 - patchstitcher - INFO - Evaluation Summary: ++-----------+-----------+-----------+-----------+----------+-----------+----------+-----------+-----------+-----------+ +| a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see | ++-----------+-----------+-----------+-----------+----------+-----------+----------+-----------+-----------+-----------+ +| 0.9792909 | 0.9939313 | 0.9976017 | 0.0472044 | 1.221259 | 0.0211389 | 0.079348 | 7.1940729 | 0.1013146 | 0.9066877 | ++-----------+-----------+-----------+-----------+----------+-----------+----------+-----------+-----------+-----------+ +2024/03/15 20:15:49 - patchstitcher - INFO - Epoch: [05/16] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.346112996339798 - sig_loss: 0.346112996339798 +2024/03/15 20:17:56 - patchstitcher - INFO - Epoch: [05/16] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.37177300453186035 - sig_loss: 0.37177300453186035 +2024/03/15 20:20:03 - patchstitcher - INFO - Epoch: [05/16] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.19638416171073914 - sig_loss: 0.19638416171073914 +2024/03/15 20:22:15 - patchstitcher - INFO - Epoch: [05/16] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.31483086943626404 - sig_loss: 0.31483086943626404 +2024/03/15 20:26:06 - patchstitcher - INFO - Epoch: [06/16] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.4689038395881653 - sig_loss: 0.4689038395881653 +2024/03/15 20:28:15 - patchstitcher - INFO - Epoch: [06/16] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.7205864191055298 - sig_loss: 0.7205864191055298 +2024/03/15 20:30:20 - patchstitcher - INFO - Epoch: [06/16] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.3387361168861389 - sig_loss: 0.3387361168861389 +2024/03/15 20:32:31 - patchstitcher - INFO - Epoch: [06/16] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.17904606461524963 - sig_loss: 0.17904606461524963 +2024/03/15 20:34:33 - patchstitcher - INFO - Evaluation Summary: ++-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+ +| a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see | ++-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+ +| 0.9811088 | 0.9944261 | 0.9976145 | 0.0587846 | 1.1488307 | 0.0250373 | 0.0835342 | 6.7835254 | 0.0985873 | 0.9090285 | ++-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+ +2024/03/15 20:36:45 - patchstitcher - INFO - Epoch: [07/16] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.598641037940979 - sig_loss: 0.598641037940979 +2024/03/15 20:38:52 - patchstitcher - INFO - Epoch: [07/16] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.6939734220504761 - sig_loss: 0.6939734220504761 +2024/03/15 20:40:58 - patchstitcher - INFO - Epoch: [07/16] - Step: [00300/00475] - Time: [1/1] - Total Loss: 1.2778879404067993 - sig_loss: 1.2778879404067993 +2024/03/15 20:43:04 - patchstitcher - INFO - Epoch: [07/16] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.25490495562553406 - sig_loss: 0.25490495562553406 +2024/03/15 20:46:52 - patchstitcher - INFO - Epoch: [08/16] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.6926709413528442 - sig_loss: 0.6926709413528442 +2024/03/15 20:49:01 - patchstitcher - INFO - Epoch: [08/16] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.4315345287322998 - sig_loss: 0.4315345287322998 +2024/03/15 20:51:10 - patchstitcher - INFO - Epoch: [08/16] - Step: [00300/00475] - Time: [1/1] - Total Loss: 1.5032333135604858 - sig_loss: 1.5032333135604858 +2024/03/15 20:53:16 - patchstitcher - INFO - Epoch: [08/16] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.5602918863296509 - sig_loss: 0.5602918863296509 +2024/03/15 20:55:19 - patchstitcher - INFO - Evaluation Summary: ++----------+-----------+----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+ +| a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see | ++----------+-----------+----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+ +| 0.981955 | 0.9944548 | 0.997687 | 0.0567417 | 1.1642307 | 0.0240603 | 0.0811943 | 6.7784523 | 0.1035022 | 0.9762238 | ++----------+-----------+----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+ +2024/03/15 20:57:34 - patchstitcher - INFO - Epoch: [09/16] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.4373475909233093 - sig_loss: 0.4373475909233093 +2024/03/15 20:59:40 - patchstitcher - INFO - Epoch: [09/16] - Step: [00200/00475] - Time: [1/1] - Total Loss: 1.5699481964111328 - sig_loss: 1.5699481964111328 +2024/03/15 21:01:47 - patchstitcher - INFO - Epoch: [09/16] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.3727685213088989 - sig_loss: 0.3727685213088989 +2024/03/15 21:03:55 - patchstitcher - INFO - Epoch: [09/16] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.8527534008026123 - sig_loss: 0.8527534008026123 +2024/03/15 21:07:41 - patchstitcher - INFO - Epoch: [10/16] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.3214010000228882 - sig_loss: 0.3214010000228882 +2024/03/15 21:09:47 - patchstitcher - INFO - Epoch: [10/16] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.24748793244361877 - sig_loss: 0.24748793244361877 +2024/03/15 21:11:53 - patchstitcher - INFO - Epoch: [10/16] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.9347164630889893 - sig_loss: 0.9347164630889893 +2024/03/15 21:14:01 - patchstitcher - INFO - Epoch: [10/16] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.5690074563026428 - sig_loss: 0.5690074563026428 +2024/03/15 21:16:03 - patchstitcher - INFO - Evaluation Summary: ++-----------+-----------+-----------+-----------+----------+-----------+-----------+-----------+-----------+-----------+ +| a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see | ++-----------+-----------+-----------+-----------+----------+-----------+-----------+-----------+-----------+-----------+ +| 0.9828057 | 0.9944213 | 0.9976482 | 0.0431387 | 1.153271 | 0.0186824 | 0.0717582 | 6.5566115 | 0.0970193 | 0.9094797 | ++-----------+-----------+-----------+-----------+----------+-----------+-----------+-----------+-----------+-----------+ +2024/03/15 21:18:19 - patchstitcher - INFO - Epoch: [11/16] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.4396677017211914 - sig_loss: 0.4396677017211914 +2024/03/15 21:20:26 - patchstitcher - INFO - Epoch: [11/16] - Step: [00200/00475] - Time: [1/1] - Total Loss: 1.0656664371490479 - sig_loss: 1.0656664371490479 +2024/03/15 21:22:32 - patchstitcher - INFO - Epoch: [11/16] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.9697193503379822 - sig_loss: 0.9697193503379822 +2024/03/15 21:24:40 - patchstitcher - INFO - Epoch: [11/16] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.3520055115222931 - sig_loss: 0.3520055115222931 +2024/03/15 21:28:27 - patchstitcher - INFO - Epoch: [12/16] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.5407581329345703 - sig_loss: 0.5407581329345703 +2024/03/15 21:30:35 - patchstitcher - INFO - Epoch: [12/16] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.402178019285202 - sig_loss: 0.402178019285202 +2024/03/15 21:32:44 - patchstitcher - INFO - Epoch: [12/16] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.3128345310688019 - sig_loss: 0.3128345310688019 +2024/03/15 21:34:48 - patchstitcher - INFO - Epoch: [12/16] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.2746131122112274 - sig_loss: 0.2746131122112274 +2024/03/15 21:36:50 - patchstitcher - INFO - Evaluation Summary: ++-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+ +| a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see | ++-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+ +| 0.9829779 | 0.9943216 | 0.9976086 | 0.0449465 | 1.1585257 | 0.0199354 | 0.0738602 | 6.4633956 | 0.0979459 | 0.8825008 | ++-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+ +2024/03/15 21:39:05 - patchstitcher - INFO - Epoch: [13/16] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.9073178768157959 - sig_loss: 0.9073178768157959 +2024/03/15 21:41:14 - patchstitcher - INFO - Epoch: [13/16] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.522519052028656 - sig_loss: 0.522519052028656 +2024/03/15 21:43:21 - patchstitcher - INFO - Epoch: [13/16] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.48742449283599854 - sig_loss: 0.48742449283599854 +2024/03/15 21:45:26 - patchstitcher - INFO - Epoch: [13/16] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.6288169622421265 - sig_loss: 0.6288169622421265 +2024/03/15 21:49:14 - patchstitcher - INFO - Epoch: [14/16] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.3309251368045807 - sig_loss: 0.3309251368045807 +2024/03/15 21:51:19 - patchstitcher - INFO - Epoch: [14/16] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.2369067668914795 - sig_loss: 0.2369067668914795 +2024/03/15 21:53:27 - patchstitcher - INFO - Epoch: [14/16] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.31572964787483215 - sig_loss: 0.31572964787483215 +2024/03/15 21:55:33 - patchstitcher - INFO - Epoch: [14/16] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.4297329783439636 - sig_loss: 0.4297329783439636 +2024/03/15 21:57:35 - patchstitcher - INFO - Evaluation Summary: ++-----------+-----------+-----------+----------+-----------+-----------+-----------+-----------+-----------+-----------+ +| a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see | ++-----------+-----------+-----------+----------+-----------+-----------+-----------+-----------+-----------+-----------+ +| 0.9839865 | 0.9945975 | 0.9976648 | 0.040681 | 1.0868772 | 0.0176451 | 0.0695904 | 6.3975404 | 0.0912883 | 0.8778101 | ++-----------+-----------+-----------+----------+-----------+-----------+-----------+-----------+-----------+-----------+ +2024/03/15 21:59:49 - patchstitcher - INFO - Epoch: [15/16] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.27064019441604614 - sig_loss: 0.27064019441604614 +2024/03/15 22:01:56 - patchstitcher - INFO - Epoch: [15/16] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.2585503160953522 - sig_loss: 0.2585503160953522 +2024/03/15 22:04:00 - patchstitcher - INFO - Epoch: [15/16] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.3952506482601166 - sig_loss: 0.3952506482601166 +2024/03/15 22:06:07 - patchstitcher - INFO - Epoch: [15/16] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.49016281962394714 - sig_loss: 0.49016281962394714 +2024/03/15 22:09:58 - patchstitcher - INFO - Epoch: [16/16] - Step: [00100/00475] - Time: [1/1] - Total Loss: 1.1630560159683228 - sig_loss: 1.1630560159683228 +2024/03/15 22:12:03 - patchstitcher - INFO - Epoch: [16/16] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.7704189419746399 - sig_loss: 0.7704189419746399 +2024/03/15 22:14:10 - patchstitcher - INFO - Epoch: [16/16] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.4150314927101135 - sig_loss: 0.4150314927101135 +2024/03/15 22:16:14 - patchstitcher - INFO - Epoch: [16/16] - Step: [00400/00475] - Time: [1/1] - Total Loss: 1.7451047897338867 - sig_loss: 1.7451047897338867 +2024/03/15 22:18:19 - patchstitcher - INFO - Evaluation Summary: ++-----------+-----------+-----------+-----------+-----------+-----------+-----------+----------+-----------+-----------+ +| a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see | ++-----------+-----------+-----------+-----------+-----------+-----------+-----------+----------+-----------+-----------+ +| 0.9840401 | 0.9945714 | 0.9976669 | 0.0400293 | 1.0903379 | 0.0174627 | 0.0692402 | 6.368767 | 0.0898903 | 0.8650321 | ++-----------+-----------+-----------+-----------+-----------+-----------+-----------+----------+-----------+-----------+ +2024/03/15 22:18:19 - patchstitcher - INFO - Saving ckp, but use the inner get_save_dict fuction to get model_dict +2024/03/15 22:18:19 - patchstitcher - INFO - For saving space. Would you like to save base model several times? :> +2024/03/15 22:18:20 - patchstitcher - INFO - save checkpoint_16.pth at ./work_dir/depthanything_vitb_u4k/patchfusion