diff --git "a/depthanything_vitl_u4k/fine_pretrain/20240315_140837.log" "b/depthanything_vitl_u4k/fine_pretrain/20240315_140837.log" new file mode 100644--- /dev/null +++ "b/depthanything_vitl_u4k/fine_pretrain/20240315_140837.log" @@ -0,0 +1,1195 @@ +2024/03/15 14:08:48 - patchstitcher - INFO - +------------------------------------------------------------ +System environment: + sys.platform: linux + Python: 3.8.18 | packaged by conda-forge | (default, Oct 10 2023, 15:44:36) [GCC 12.3.0] + CUDA available: True + numpy_random_seed: 621 + GPU 0,1,2,3: NVIDIA A100-SXM4-80GB + CUDA_HOME: /sw/rl9g/cuda/11.8/rl9_binary + NVCC: Cuda compilation tools, release 11.8, V11.8.89 + GCC: gcc (GCC) 11.3.1 20220421 (Red Hat 11.3.1-2) + PyTorch: 2.1.2 + PyTorch compiling details: PyTorch built with: + - GCC 9.3 + - C++ Version: 201703 + - Intel(R) oneAPI Math Kernel Library Version 2022.1-Product Build 20220311 for Intel(R) 64 architecture applications + - Intel(R) MKL-DNN v3.1.1 (Git Hash 64f6bcbcbab628e96f33a62c3e975f8535a7bde4) + - OpenMP 201511 (a.k.a. OpenMP 4.5) + - LAPACK is enabled (usually provided by MKL) + - NNPACK is enabled + - CPU capability usage: AVX2 + - CUDA Runtime 11.8 + - NVCC architecture flags: -gencode;arch=compute_50,code=sm_50;-gencode;arch=compute_60,code=sm_60;-gencode;arch=compute_61,code=sm_61;-gencode;arch=compute_70,code=sm_70;-gencode;arch=compute_75,code=sm_75;-gencode;arch=compute_80,code=sm_80;-gencode;arch=compute_86,code=sm_86;-gencode;arch=compute_37,code=sm_37;-gencode;arch=compute_90,code=sm_90;-gencode;arch=compute_37,code=compute_37 + - CuDNN 8.7 + - Magma 2.6.1 + - Build settings: BLAS_INFO=mkl, BUILD_TYPE=Release, CUDA_VERSION=11.8, CUDNN_VERSION=8.7.0, CXX_COMPILER=/opt/rh/devtoolset-9/root/usr/bin/c++, CXX_FLAGS= -D_GLIBCXX_USE_CXX11_ABI=0 -fabi-version=11 -fvisibility-inlines-hidden -DUSE_PTHREADPOOL -DNDEBUG -DUSE_KINETO -DLIBKINETO_NOROCTRACER -DUSE_FBGEMM -DUSE_QNNPACK -DUSE_PYTORCH_QNNPACK -DUSE_XNNPACK -DSYMBOLICATE_MOBILE_DEBUG_HANDLE -O2 -fPIC -Wall -Wextra -Werror=return-type -Werror=non-virtual-dtor -Werror=bool-operation -Wnarrowing -Wno-missing-field-initializers -Wno-type-limits -Wno-array-bounds -Wno-unknown-pragmas -Wno-unused-parameter -Wno-unused-function -Wno-unused-result -Wno-strict-overflow -Wno-strict-aliasing -Wno-stringop-overflow -Wno-psabi -Wno-error=pedantic -Wno-error=old-style-cast -Wno-invalid-partial-specialization -Wno-unused-private-field -Wno-aligned-allocation-unavailable -Wno-missing-braces -fdiagnostics-color=always -faligned-new -Wno-unused-but-set-variable -Wno-maybe-uninitialized -fno-math-errno -fno-trapping-math -Werror=format -Werror=cast-function-type -Wno-stringop-overflow, LAPACK_INFO=mkl, PERF_WITH_AVX=1, PERF_WITH_AVX2=1, PERF_WITH_AVX512=1, TORCH_DISABLE_GPU_ASSERTS=ON, TORCH_VERSION=2.1.2, USE_CUDA=ON, USE_CUDNN=ON, USE_EXCEPTION_PTR=1, USE_GFLAGS=OFF, USE_GLOG=OFF, USE_MKL=ON, USE_MKLDNN=ON, USE_MPI=OFF, USE_NCCL=ON, USE_NNPACK=ON, USE_OPENMP=ON, USE_ROCM=OFF, + + TorchVision: 0.16.2 + OpenCV: 4.8.1 + MMEngine: 0.10.2 + +Runtime environment: + cudnn_benchmark: True + mp_cfg: {'mp_start_method': 'forkserver'} + dist_cfg: {'backend': 'nccl'} + seed: 621 + Distributed launcher: pytorch + Distributed training: True + GPU number: 4 +------------------------------------------------------------ + +2024/03/15 14:08:49 - patchstitcher - INFO - Config: +collect_input_args = [ + 'image_lr', + 'crops_image_hr', + 'depth_gt', + 'crop_depths', + 'bboxs', + 'image_hr', +] +convert_syncbn = True +debug = False +env_cfg = dict( + cudnn_benchmark=True, + dist_cfg=dict(backend='nccl'), + mp_cfg=dict(mp_start_method='forkserver')) +find_unused_parameters = True +general_dataloader = dict( + batch_size=1, + dataset=dict( + dataset_name='', gt_dir=None, rgb_image_dir='', type='ImageDataset'), + num_workers=2) +launcher = 'pytorch' +log_name = 'fine_pretrain' +max_depth = 80 +min_depth = 0.001 +model = dict( + coarse_branch=dict( + attractor_alpha=1000, + attractor_gamma=2, + attractor_kind='mean', + attractor_type='inv', + aug=True, + bin_centers_type='softplus', + bin_embedding_dim=128, + clip_grad=0.1, + dataset='nyu', + depth_anything=True, + distributed=True, + do_resize=False, + force_keep_ar=True, + freeze_midas_bn=True, + gpu='NULL', + img_size=[ + 392, + 518, + ], + inverse_midas=False, + log_images_every=0.1, + max_depth=80, + max_temp=50.0, + max_translation=100, + memory_efficient=True, + midas_model_type='vitl', + min_depth=0.001, + min_temp=0.0212, + model='zoedepth', + n_attractors=[ + 16, + 8, + 4, + 1, + ], + n_bins=64, + name='ZoeDepth', + notes='', + output_distribution='logbinomial', + prefetch=False, + pretrained_resource='local::./work_dir/DepthAnything_vitl.pt', + print_losses=False, + project='ZoeDepth', + random_crop=False, + random_translate=False, + root='.', + save_dir='', + shared_dict='NULL', + tags='', + train_midas=True, + translate_prob=0.2, + type='DA-ZoeDepth', + uid='NULL', + use_amp=False, + use_pretrained_midas=True, + use_shared_dict=False, + validate_every=0.25, + version_name='v1', + workers=16), + fine_branch=dict( + attractor_alpha=1000, + attractor_gamma=2, + attractor_kind='mean', + attractor_type='inv', + aug=True, + bin_centers_type='softplus', + bin_embedding_dim=128, + clip_grad=0.1, + dataset='nyu', + depth_anything=True, + distributed=True, + do_resize=False, + force_keep_ar=True, + freeze_midas_bn=True, + gpu='NULL', + img_size=[ + 392, + 518, + ], + inverse_midas=False, + log_images_every=0.1, + max_depth=80, + max_temp=50.0, + max_translation=100, + memory_efficient=True, + midas_model_type='vitl', + min_depth=0.001, + min_temp=0.0212, + model='zoedepth', + n_attractors=[ + 16, + 8, + 4, + 1, + ], + n_bins=64, + name='ZoeDepth', + notes='', + output_distribution='logbinomial', + prefetch=False, + pretrained_resource='local::./work_dir/DepthAnything_vitl.pt', + print_losses=False, + project='ZoeDepth', + random_crop=False, + random_translate=False, + root='.', + save_dir='', + shared_dict='NULL', + tags='', + train_midas=True, + translate_prob=0.2, + type='DA-ZoeDepth', + uid='NULL', + use_amp=False, + use_pretrained_midas=True, + use_shared_dict=False, + validate_every=0.25, + version_name='v1', + workers=16), + max_depth=80, + min_depth=0.001, + patch_process_shape=( + 392, + 518, + ), + sigloss=dict(type='SILogLoss'), + target='fine', + type='BaselinePretrain') +optim_wrapper = dict( + clip_grad=dict(max_norm=0.1, norm_type=2, type='norm'), + optimizer=dict(lr=4e-06, type='AdamW', weight_decay=0.01), + paramwise_cfg=dict(bypass_duplicate=True, custom_keys=dict())) +param_scheduler = dict( + base_momentum=0.85, + cycle_momentum=True, + div_factor=1, + final_div_factor=10000, + max_momentum=0.95, + pct_start=0.5, + three_phase=False) +project = 'patchfusion' +tags = [ + 'fine', + 'da', + 'vitl', +] +test_in_dataloader = dict( + batch_size=1, + dataset=dict( + data_root='./data/u4k', + max_depth=80, + min_depth=0.001, + mode='infer', + split='./data/u4k/splits/test.txt', + transform_cfg=dict(network_process_size=[ + 384, + 512, + ]), + type='UnrealStereo4kDataset'), + num_workers=2) +test_out_dataloader = dict( + batch_size=1, + dataset=dict( + data_root='./data/u4k', + max_depth=80, + min_depth=0.001, + mode='infer', + split='./data/u4k/splits/test_out.txt', + transform_cfg=dict(network_process_size=[ + 384, + 512, + ]), + type='UnrealStereo4kDataset'), + num_workers=2) +train_cfg = dict( + eval_start=0, + log_interval=100, + max_epochs=24, + save_checkpoint_interval=24, + train_log_img_interval=500, + val_interval=2, + val_log_img_interval=50, + val_type='epoch_base') +train_dataloader = dict( + batch_size=4, + dataset=dict( + data_root='./data/u4k', + max_depth=80, + min_depth=0.001, + mode='train', + resize_mode='depth-anything', + split='./data/u4k/splits/train.txt', + transform_cfg=dict( + degree=1.0, network_process_size=[ + 392, + 518, + ], random_crop=True), + type='UnrealStereo4kDataset'), + num_workers=4) +val_dataloader = dict( + batch_size=1, + dataset=dict( + data_root='./data/u4k', + max_depth=80, + min_depth=0.001, + mode='infer', + resize_mode='depth-anything', + split='./data/u4k/splits/val.txt', + transform_cfg=dict(degree=1.0, network_process_size=[ + 392, + 518, + ]), + type='UnrealStereo4kDataset'), + num_workers=2) +work_dir = './work_dir/depthanything_vitl_u4k/fine_pretrain' +zoe_depth_config = dict( + attractor_alpha=1000, + attractor_gamma=2, + attractor_kind='mean', + attractor_type='inv', + aug=True, + bin_centers_type='softplus', + bin_embedding_dim=128, + clip_grad=0.1, + dataset='nyu', + depth_anything=True, + distributed=True, + do_resize=False, + force_keep_ar=True, + freeze_midas_bn=True, + gpu='NULL', + img_size=[ + 392, + 518, + ], + inverse_midas=False, + log_images_every=0.1, + max_depth=80, + max_temp=50.0, + max_translation=100, + memory_efficient=True, + midas_model_type='vitl', + min_depth=0.001, + min_temp=0.0212, + model='zoedepth', + n_attractors=[ + 16, + 8, + 4, + 1, + ], + n_bins=64, + name='ZoeDepth', + notes='', + output_distribution='logbinomial', + prefetch=False, + pretrained_resource='local::./work_dir/DepthAnything_vitl.pt', + print_losses=False, + project='ZoeDepth', + random_crop=False, + random_translate=False, + root='.', + save_dir='', + shared_dict='NULL', + tags='', + train_midas=True, + translate_prob=0.2, + type='DA-ZoeDepth', + uid='NULL', + use_amp=False, + use_pretrained_midas=True, + use_shared_dict=False, + validate_every=0.25, + version_name='v1', + workers=16) + +2024/03/15 14:08:53 - patchstitcher - INFO - Loading deepnet from local::./work_dir/DepthAnything_vitl.pt +2024/03/15 14:08:53 - patchstitcher - INFO - Current zoedepth.core.prep.resizer is +2024/03/15 14:08:53 - patchstitcher - INFO - DistributedDataParallel( + (module): BaselinePretrain( + (fine_branch): ZoeDepth( + (core): DepthAnythingCore( + (core): DPT_DINOv2( + (pretrained): DinoVisionTransformer( + (patch_embed): PatchEmbed( + (proj): Conv2d(3, 1024, kernel_size=(14, 14), stride=(14, 14)) + (norm): Identity() + ) + (blocks): ModuleList( + (0-23): 24 x NestedTensorBlock( + (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (attn): MemEffAttention( + (qkv): Linear(in_features=1024, out_features=3072, bias=True) + (attn_drop): Dropout(p=0.0, inplace=False) + (proj): Linear(in_features=1024, out_features=1024, bias=True) + (proj_drop): Dropout(p=0.0, inplace=False) + ) + (ls1): LayerScale() + (drop_path1): Identity() + (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (mlp): Mlp( + (fc1): Linear(in_features=1024, out_features=4096, bias=True) + (act): GELU(approximate='none') + (fc2): Linear(in_features=4096, out_features=1024, bias=True) + (drop): Dropout(p=0.0, inplace=False) + ) + (ls2): LayerScale() + (drop_path2): Identity() + ) + ) + (norm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) + (head): Identity() + ) + (depth_head): DPTHead( + (projects): ModuleList( + (0): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1)) + (1): Conv2d(1024, 512, kernel_size=(1, 1), stride=(1, 1)) + (2-3): 2 x Conv2d(1024, 1024, kernel_size=(1, 1), stride=(1, 1)) + ) + (resize_layers): ModuleList( + (0): ConvTranspose2d(256, 256, kernel_size=(4, 4), stride=(4, 4)) + (1): ConvTranspose2d(512, 512, kernel_size=(2, 2), stride=(2, 2)) + (2): Identity() + (3): Conv2d(1024, 1024, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1)) + ) + (scratch): Module( + (layer1_rn): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) + (layer2_rn): Conv2d(512, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) + (layer3_rn): Conv2d(1024, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) + (layer4_rn): Conv2d(1024, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) + (refinenet1): FeatureFusionBlock( + (out_conv): Conv2d(256, 256, kernel_size=(1, 1), stride=(1, 1)) + (resConfUnit1): ResidualConvUnit( + (conv1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (activation): ReLU() + (skip_add): FloatFunctional( + (activation_post_process): Identity() + ) + ) + (resConfUnit2): ResidualConvUnit( + (conv1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (activation): ReLU() + (skip_add): FloatFunctional( + (activation_post_process): Identity() + ) + ) + (skip_add): FloatFunctional( + (activation_post_process): Identity() + ) + ) + (refinenet2): FeatureFusionBlock( + (out_conv): Conv2d(256, 256, kernel_size=(1, 1), stride=(1, 1)) + (resConfUnit1): ResidualConvUnit( + (conv1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (activation): ReLU() + (skip_add): FloatFunctional( + (activation_post_process): Identity() + ) + ) + (resConfUnit2): ResidualConvUnit( + (conv1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (activation): ReLU() + (skip_add): FloatFunctional( + (activation_post_process): Identity() + ) + ) + (skip_add): FloatFunctional( + (activation_post_process): Identity() + ) + ) + (refinenet3): FeatureFusionBlock( + (out_conv): Conv2d(256, 256, kernel_size=(1, 1), stride=(1, 1)) + (resConfUnit1): ResidualConvUnit( + (conv1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (activation): ReLU() + (skip_add): FloatFunctional( + (activation_post_process): Identity() + ) + ) + (resConfUnit2): ResidualConvUnit( + (conv1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (activation): ReLU() + (skip_add): FloatFunctional( + (activation_post_process): Identity() + ) + ) + (skip_add): FloatFunctional( + (activation_post_process): Identity() + ) + ) + (refinenet4): FeatureFusionBlock( + (out_conv): Conv2d(256, 256, kernel_size=(1, 1), stride=(1, 1)) + (resConfUnit1): ResidualConvUnit( + (conv1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (activation): ReLU() + (skip_add): FloatFunctional( + (activation_post_process): Identity() + ) + ) + (resConfUnit2): ResidualConvUnit( + (conv1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (activation): ReLU() + (skip_add): FloatFunctional( + (activation_post_process): Identity() + ) + ) + (skip_add): FloatFunctional( + (activation_post_process): Identity() + ) + ) + (output_conv1): Conv2d(256, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (output_conv2): Sequential( + (0): Conv2d(128, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (1): ReLU(inplace=True) + (2): Conv2d(32, 1, kernel_size=(1, 1), stride=(1, 1)) + (3): ReLU(inplace=True) + (4): Identity() + ) + ) + ) + ) + ) + (conv2): Conv2d(256, 256, kernel_size=(1, 1), stride=(1, 1)) + (seed_bin_regressor): SeedBinRegressorUnnormed( + (_net): Sequential( + (0): Conv2d(256, 256, kernel_size=(1, 1), stride=(1, 1)) + (1): ReLU(inplace=True) + (2): Conv2d(256, 64, kernel_size=(1, 1), stride=(1, 1)) + (3): Softplus(beta=1, threshold=20) + ) + ) + (seed_projector): Projector( + (_net): Sequential( + (0): Conv2d(256, 128, kernel_size=(1, 1), stride=(1, 1)) + (1): ReLU(inplace=True) + (2): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1)) + ) + ) + (projectors): ModuleList( + (0-3): 4 x Projector( + (_net): Sequential( + (0): Conv2d(256, 128, kernel_size=(1, 1), stride=(1, 1)) + (1): ReLU(inplace=True) + (2): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1)) + ) + ) + ) + (attractors): ModuleList( + (0): AttractorLayerUnnormed( + (_net): Sequential( + (0): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1)) + (1): ReLU(inplace=True) + (2): Conv2d(128, 16, kernel_size=(1, 1), stride=(1, 1)) + (3): Softplus(beta=1, threshold=20) + ) + ) + (1): AttractorLayerUnnormed( + (_net): Sequential( + (0): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1)) + (1): ReLU(inplace=True) + (2): Conv2d(128, 8, kernel_size=(1, 1), stride=(1, 1)) + (3): Softplus(beta=1, threshold=20) + ) + ) + (2): AttractorLayerUnnormed( + (_net): Sequential( + (0): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1)) + (1): ReLU(inplace=True) + (2): Conv2d(128, 4, kernel_size=(1, 1), stride=(1, 1)) + (3): Softplus(beta=1, threshold=20) + ) + ) + (3): AttractorLayerUnnormed( + (_net): Sequential( + (0): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1)) + (1): ReLU(inplace=True) + (2): Conv2d(128, 1, kernel_size=(1, 1), stride=(1, 1)) + (3): Softplus(beta=1, threshold=20) + ) + ) + ) + (conditional_log_binomial): ConditionalLogBinomial( + (log_binomial_transform): LogBinomial() + (mlp): Sequential( + (0): Conv2d(161, 80, kernel_size=(1, 1), stride=(1, 1)) + (1): GELU(approximate='none') + (2): Conv2d(80, 4, kernel_size=(1, 1), stride=(1, 1)) + (3): Softplus(beta=1, threshold=20) + ) + ) + ) + (sigloss): SILogLoss() + ) +) +2024/03/15 14:09:00 - patchstitcher - INFO - successfully init trainer +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.cls_token +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.pos_embed +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.mask_token +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.patch_embed.proj.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.patch_embed.proj.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.0.norm1.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.0.norm1.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.0.attn.qkv.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.0.attn.qkv.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.0.attn.proj.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.0.attn.proj.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.0.ls1.gamma +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.0.norm2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.0.norm2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.0.mlp.fc1.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.0.mlp.fc1.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.0.mlp.fc2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.0.mlp.fc2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.0.ls2.gamma +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.1.norm1.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.1.norm1.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.1.attn.qkv.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.1.attn.qkv.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.1.attn.proj.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.1.attn.proj.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.1.ls1.gamma +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.1.norm2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.1.norm2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.1.mlp.fc1.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.1.mlp.fc1.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.1.mlp.fc2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.1.mlp.fc2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.1.ls2.gamma +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.2.norm1.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.2.norm1.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.2.attn.qkv.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.2.attn.qkv.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.2.attn.proj.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.2.attn.proj.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.2.ls1.gamma +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.2.norm2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.2.norm2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.2.mlp.fc1.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.2.mlp.fc1.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.2.mlp.fc2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.2.mlp.fc2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.2.ls2.gamma +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.3.norm1.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.3.norm1.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.3.attn.qkv.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.3.attn.qkv.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.3.attn.proj.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.3.attn.proj.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.3.ls1.gamma +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.3.norm2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.3.norm2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.3.mlp.fc1.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.3.mlp.fc1.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.3.mlp.fc2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.3.mlp.fc2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.3.ls2.gamma +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.4.norm1.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.4.norm1.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.4.attn.qkv.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.4.attn.qkv.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.4.attn.proj.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.4.attn.proj.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.4.ls1.gamma +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.4.norm2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.4.norm2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.4.mlp.fc1.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.4.mlp.fc1.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.4.mlp.fc2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.4.mlp.fc2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.4.ls2.gamma +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.5.norm1.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.5.norm1.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.5.attn.qkv.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.5.attn.qkv.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.5.attn.proj.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.5.attn.proj.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.5.ls1.gamma +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.5.norm2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.5.norm2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.5.mlp.fc1.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.5.mlp.fc1.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.5.mlp.fc2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.5.mlp.fc2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.5.ls2.gamma +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.6.norm1.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.6.norm1.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.6.attn.qkv.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.6.attn.qkv.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.6.attn.proj.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.6.attn.proj.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.6.ls1.gamma +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.6.norm2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.6.norm2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.6.mlp.fc1.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.6.mlp.fc1.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.6.mlp.fc2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.6.mlp.fc2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.6.ls2.gamma +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.7.norm1.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.7.norm1.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.7.attn.qkv.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.7.attn.qkv.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.7.attn.proj.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.7.attn.proj.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.7.ls1.gamma +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.7.norm2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.7.norm2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.7.mlp.fc1.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.7.mlp.fc1.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.7.mlp.fc2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.7.mlp.fc2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.7.ls2.gamma +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.8.norm1.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.8.norm1.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.8.attn.qkv.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.8.attn.qkv.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.8.attn.proj.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.8.attn.proj.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.8.ls1.gamma +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.8.norm2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.8.norm2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.8.mlp.fc1.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.8.mlp.fc1.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.8.mlp.fc2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.8.mlp.fc2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.8.ls2.gamma +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.9.norm1.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.9.norm1.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.9.attn.qkv.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.9.attn.qkv.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.9.attn.proj.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.9.attn.proj.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.9.ls1.gamma +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.9.norm2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.9.norm2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.9.mlp.fc1.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.9.mlp.fc1.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.9.mlp.fc2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.9.mlp.fc2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.9.ls2.gamma +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.10.norm1.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.10.norm1.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.10.attn.qkv.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.10.attn.qkv.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.10.attn.proj.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.10.attn.proj.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.10.ls1.gamma +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.10.norm2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.10.norm2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.10.mlp.fc1.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.10.mlp.fc1.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.10.mlp.fc2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.10.mlp.fc2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.10.ls2.gamma +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.11.norm1.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.11.norm1.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.11.attn.qkv.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.11.attn.qkv.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.11.attn.proj.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.11.attn.proj.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.11.ls1.gamma +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.11.norm2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.11.norm2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.11.mlp.fc1.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.11.mlp.fc1.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.11.mlp.fc2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.11.mlp.fc2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.11.ls2.gamma +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.12.norm1.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.12.norm1.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.12.attn.qkv.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.12.attn.qkv.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.12.attn.proj.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.12.attn.proj.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.12.ls1.gamma +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.12.norm2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.12.norm2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.12.mlp.fc1.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.12.mlp.fc1.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.12.mlp.fc2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.12.mlp.fc2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.12.ls2.gamma +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.13.norm1.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.13.norm1.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.13.attn.qkv.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.13.attn.qkv.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.13.attn.proj.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.13.attn.proj.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.13.ls1.gamma +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.13.norm2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.13.norm2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.13.mlp.fc1.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.13.mlp.fc1.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.13.mlp.fc2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.13.mlp.fc2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.13.ls2.gamma +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.14.norm1.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.14.norm1.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.14.attn.qkv.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.14.attn.qkv.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.14.attn.proj.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.14.attn.proj.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.14.ls1.gamma +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.14.norm2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.14.norm2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.14.mlp.fc1.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.14.mlp.fc1.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.14.mlp.fc2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.14.mlp.fc2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.14.ls2.gamma +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.15.norm1.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.15.norm1.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.15.attn.qkv.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.15.attn.qkv.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.15.attn.proj.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.15.attn.proj.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.15.ls1.gamma +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.15.norm2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.15.norm2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.15.mlp.fc1.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.15.mlp.fc1.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.15.mlp.fc2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.15.mlp.fc2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.15.ls2.gamma +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.16.norm1.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.16.norm1.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.16.attn.qkv.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.16.attn.qkv.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.16.attn.proj.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.16.attn.proj.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.16.ls1.gamma +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.16.norm2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.16.norm2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.16.mlp.fc1.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.16.mlp.fc1.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.16.mlp.fc2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.16.mlp.fc2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.16.ls2.gamma +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.17.norm1.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.17.norm1.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.17.attn.qkv.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.17.attn.qkv.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.17.attn.proj.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.17.attn.proj.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.17.ls1.gamma +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.17.norm2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.17.norm2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.17.mlp.fc1.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.17.mlp.fc1.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.17.mlp.fc2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.17.mlp.fc2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.17.ls2.gamma +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.18.norm1.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.18.norm1.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.18.attn.qkv.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.18.attn.qkv.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.18.attn.proj.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.18.attn.proj.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.18.ls1.gamma +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.18.norm2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.18.norm2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.18.mlp.fc1.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.18.mlp.fc1.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.18.mlp.fc2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.18.mlp.fc2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.18.ls2.gamma +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.19.norm1.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.19.norm1.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.19.attn.qkv.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.19.attn.qkv.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.19.attn.proj.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.19.attn.proj.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.19.ls1.gamma +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.19.norm2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.19.norm2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.19.mlp.fc1.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.19.mlp.fc1.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.19.mlp.fc2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.19.mlp.fc2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.19.ls2.gamma +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.20.norm1.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.20.norm1.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.20.attn.qkv.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.20.attn.qkv.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.20.attn.proj.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.20.attn.proj.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.20.ls1.gamma +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.20.norm2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.20.norm2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.20.mlp.fc1.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.20.mlp.fc1.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.20.mlp.fc2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.20.mlp.fc2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.20.ls2.gamma +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.21.norm1.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.21.norm1.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.21.attn.qkv.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.21.attn.qkv.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.21.attn.proj.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.21.attn.proj.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.21.ls1.gamma +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.21.norm2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.21.norm2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.21.mlp.fc1.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.21.mlp.fc1.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.21.mlp.fc2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.21.mlp.fc2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.21.ls2.gamma +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.22.norm1.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.22.norm1.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.22.attn.qkv.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.22.attn.qkv.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.22.attn.proj.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.22.attn.proj.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.22.ls1.gamma +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.22.norm2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.22.norm2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.22.mlp.fc1.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.22.mlp.fc1.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.22.mlp.fc2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.22.mlp.fc2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.22.ls2.gamma +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.23.norm1.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.23.norm1.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.23.attn.qkv.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.23.attn.qkv.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.23.attn.proj.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.23.attn.proj.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.23.ls1.gamma +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.23.norm2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.23.norm2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.23.mlp.fc1.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.23.mlp.fc1.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.23.mlp.fc2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.23.mlp.fc2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.blocks.23.ls2.gamma +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.norm.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.pretrained.norm.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.projects.0.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.projects.0.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.projects.1.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.projects.1.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.projects.2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.projects.2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.projects.3.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.projects.3.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.resize_layers.0.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.resize_layers.0.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.resize_layers.1.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.resize_layers.1.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.resize_layers.3.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.resize_layers.3.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.layer1_rn.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.layer2_rn.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.layer3_rn.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.layer4_rn.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet1.out_conv.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet1.out_conv.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet1.resConfUnit1.conv1.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet1.resConfUnit1.conv1.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet1.resConfUnit1.conv2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet1.resConfUnit1.conv2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet1.resConfUnit2.conv1.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet1.resConfUnit2.conv1.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet1.resConfUnit2.conv2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet1.resConfUnit2.conv2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet2.out_conv.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet2.out_conv.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet2.resConfUnit1.conv1.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet2.resConfUnit1.conv1.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet2.resConfUnit1.conv2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet2.resConfUnit1.conv2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet2.resConfUnit2.conv1.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet2.resConfUnit2.conv1.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet2.resConfUnit2.conv2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet2.resConfUnit2.conv2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet3.out_conv.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet3.out_conv.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet3.resConfUnit1.conv1.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet3.resConfUnit1.conv1.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet3.resConfUnit1.conv2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet3.resConfUnit1.conv2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet3.resConfUnit2.conv1.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet3.resConfUnit2.conv1.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet3.resConfUnit2.conv2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet3.resConfUnit2.conv2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet4.out_conv.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet4.out_conv.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet4.resConfUnit1.conv1.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet4.resConfUnit1.conv1.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet4.resConfUnit1.conv2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet4.resConfUnit1.conv2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet4.resConfUnit2.conv1.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet4.resConfUnit2.conv1.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet4.resConfUnit2.conv2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.refinenet4.resConfUnit2.conv2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.output_conv1.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.output_conv1.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.output_conv2.0.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.output_conv2.0.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.output_conv2.2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.core.core.depth_head.scratch.output_conv2.2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.conv2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.conv2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.seed_bin_regressor._net.0.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.seed_bin_regressor._net.0.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.seed_bin_regressor._net.2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.seed_bin_regressor._net.2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.seed_projector._net.0.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.seed_projector._net.0.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.seed_projector._net.2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.seed_projector._net.2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.projectors.0._net.0.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.projectors.0._net.0.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.projectors.0._net.2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.projectors.0._net.2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.projectors.1._net.0.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.projectors.1._net.0.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.projectors.1._net.2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.projectors.1._net.2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.projectors.2._net.0.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.projectors.2._net.0.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.projectors.2._net.2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.projectors.2._net.2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.projectors.3._net.0.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.projectors.3._net.0.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.projectors.3._net.2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.projectors.3._net.2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.attractors.0._net.0.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.attractors.0._net.0.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.attractors.0._net.2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.attractors.0._net.2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.attractors.1._net.0.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.attractors.1._net.0.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.attractors.1._net.2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.attractors.1._net.2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.attractors.2._net.0.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.attractors.2._net.0.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.attractors.2._net.2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.attractors.2._net.2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.attractors.3._net.0.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.attractors.3._net.0.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.attractors.3._net.2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.attractors.3._net.2.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.conditional_log_binomial.mlp.0.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.conditional_log_binomial.mlp.0.bias +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.conditional_log_binomial.mlp.2.weight +2024/03/15 14:09:00 - patchstitcher - INFO - training param: module.fine_branch.conditional_log_binomial.mlp.2.bias +2024/03/15 14:11:58 - patchstitcher - INFO - Epoch: [01/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 3.4369797706604004 - fine_loss: 3.4369797706604004 +2024/03/15 14:13:53 - patchstitcher - INFO - Epoch: [01/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 1.7487813234329224 - fine_loss: 1.7487813234329224 +2024/03/15 14:15:47 - patchstitcher - INFO - Epoch: [01/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 2.9658074378967285 - fine_loss: 2.9658074378967285 +2024/03/15 14:17:39 - patchstitcher - INFO - Epoch: [01/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 1.7623008489608765 - fine_loss: 1.7623008489608765 +2024/03/15 14:21:19 - patchstitcher - INFO - Epoch: [02/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 2.5338351726531982 - fine_loss: 2.5338351726531982 +2024/03/15 14:23:13 - patchstitcher - INFO - Epoch: [02/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 1.949808120727539 - fine_loss: 1.949808120727539 +2024/03/15 14:25:07 - patchstitcher - INFO - Epoch: [02/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 1.5897963047027588 - fine_loss: 1.5897963047027588 +2024/03/15 14:26:57 - patchstitcher - INFO - Epoch: [02/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 1.5241825580596924 - fine_loss: 1.5241825580596924 +2024/03/15 14:29:02 - patchstitcher - INFO - Evaluation Summary: ++-----------+-----------+-----------+-----------+-----------+----------+-----------+------------+-----------+-----------+ +| a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see | ++-----------+-----------+-----------+-----------+-----------+----------+-----------+------------+-----------+-----------+ +| 0.7777531 | 0.9691491 | 0.9920921 | 0.1407966 | 2.1086142 | 0.064992 | 0.1937843 | 15.8287334 | 0.3172734 | 1.5069758 | ++-----------+-----------+-----------+-----------+-----------+----------+-----------+------------+-----------+-----------+ +2024/03/15 14:30:59 - patchstitcher - INFO - Epoch: [03/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 1.046777367591858 - fine_loss: 1.046777367591858 +2024/03/15 14:32:52 - patchstitcher - INFO - Epoch: [03/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 1.0578508377075195 - fine_loss: 1.0578508377075195 +2024/03/15 14:34:43 - patchstitcher - INFO - Epoch: [03/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 1.1272225379943848 - fine_loss: 1.1272225379943848 +2024/03/15 14:36:33 - patchstitcher - INFO - Epoch: [03/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 1.4212095737457275 - fine_loss: 1.4212095737457275 +2024/03/15 14:39:52 - patchstitcher - INFO - Epoch: [04/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 2.175830125808716 - fine_loss: 2.175830125808716 +2024/03/15 14:41:46 - patchstitcher - INFO - Epoch: [04/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 1.6945712566375732 - fine_loss: 1.6945712566375732 +2024/03/15 14:43:36 - patchstitcher - INFO - Epoch: [04/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 1.6684590578079224 - fine_loss: 1.6684590578079224 +2024/03/15 14:45:27 - patchstitcher - INFO - Epoch: [04/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.978624165058136 - fine_loss: 0.978624165058136 +2024/03/15 14:47:24 - patchstitcher - INFO - Evaluation Summary: ++-----------+-----------+-----------+-----------+-----------+-----------+-----------+------------+-----------+-----------+ +| a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see | ++-----------+-----------+-----------+-----------+-----------+-----------+-----------+------------+-----------+-----------+ +| 0.8861725 | 0.9834373 | 0.9954867 | 0.1163805 | 1.6259623 | 0.0493756 | 0.1501408 | 13.2967868 | 0.2082237 | 1.2041435 | ++-----------+-----------+-----------+-----------+-----------+-----------+-----------+------------+-----------+-----------+ +2024/03/15 14:49:20 - patchstitcher - INFO - Epoch: [05/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 1.0877785682678223 - fine_loss: 1.0877785682678223 +2024/03/15 14:51:13 - patchstitcher - INFO - Epoch: [05/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 1.627789855003357 - fine_loss: 1.627789855003357 +2024/03/15 14:53:00 - patchstitcher - INFO - Epoch: [05/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 1.852139949798584 - fine_loss: 1.852139949798584 +2024/03/15 14:54:56 - patchstitcher - INFO - Epoch: [05/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.4405653774738312 - fine_loss: 0.4405653774738312 +2024/03/15 14:58:15 - patchstitcher - INFO - Epoch: [06/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 1.5338048934936523 - fine_loss: 1.5338048934936523 +2024/03/15 15:00:08 - patchstitcher - INFO - Epoch: [06/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.6242114901542664 - fine_loss: 0.6242114901542664 +2024/03/15 15:01:57 - patchstitcher - INFO - Epoch: [06/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.961182177066803 - fine_loss: 0.961182177066803 +2024/03/15 15:03:48 - patchstitcher - INFO - Epoch: [06/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 1.0484611988067627 - fine_loss: 1.0484611988067627 +2024/03/15 15:05:46 - patchstitcher - INFO - Evaluation Summary: ++-----------+-----------+-----------+-----------+-----------+-----------+-----------+------------+----------+----------+ +| a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see | ++-----------+-----------+-----------+-----------+-----------+-----------+-----------+------------+----------+----------+ +| 0.9134527 | 0.9875949 | 0.9960728 | 0.1049193 | 1.7395931 | 0.0450357 | 0.1404951 | 12.8880463 | 0.238056 | 1.314327 | ++-----------+-----------+-----------+-----------+-----------+-----------+-----------+------------+----------+----------+ +2024/03/15 15:07:43 - patchstitcher - INFO - Epoch: [07/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.7090848684310913 - fine_loss: 0.7090848684310913 +2024/03/15 15:09:36 - patchstitcher - INFO - Epoch: [07/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 2.1976611614227295 - fine_loss: 2.1976611614227295 +2024/03/15 15:11:25 - patchstitcher - INFO - Epoch: [07/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.8840272426605225 - fine_loss: 0.8840272426605225 +2024/03/15 15:13:16 - patchstitcher - INFO - Epoch: [07/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.7640607357025146 - fine_loss: 0.7640607357025146 +2024/03/15 15:16:35 - patchstitcher - INFO - Epoch: [08/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 1.2251319885253906 - fine_loss: 1.2251319885253906 +2024/03/15 15:18:29 - patchstitcher - INFO - Epoch: [08/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 1.059991717338562 - fine_loss: 1.059991717338562 +2024/03/15 15:20:19 - patchstitcher - INFO - Epoch: [08/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 2.0797297954559326 - fine_loss: 2.0797297954559326 +2024/03/15 15:22:08 - patchstitcher - INFO - Epoch: [08/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 1.7019829750061035 - fine_loss: 1.7019829750061035 +2024/03/15 15:24:01 - patchstitcher - INFO - Evaluation Summary: ++-----------+-----------+----------+-----------+-----------+-----------+-----------+------------+----------+-----------+ +| a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see | ++-----------+-----------+----------+-----------+-----------+-----------+-----------+------------+----------+-----------+ +| 0.9341925 | 0.9901051 | 0.996222 | 0.0896415 | 1.4864388 | 0.0385839 | 0.1228082 | 11.3385688 | 0.174791 | 1.2037814 | ++-----------+-----------+----------+-----------+-----------+-----------+-----------+------------+----------+-----------+ +2024/03/15 15:25:57 - patchstitcher - INFO - Epoch: [09/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.8242141604423523 - fine_loss: 0.8242141604423523 +2024/03/15 15:27:48 - patchstitcher - INFO - Epoch: [09/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 2.5683438777923584 - fine_loss: 2.5683438777923584 +2024/03/15 15:29:37 - patchstitcher - INFO - Epoch: [09/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 1.3079029321670532 - fine_loss: 1.3079029321670532 +2024/03/15 15:31:28 - patchstitcher - INFO - Epoch: [09/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 1.6061334609985352 - fine_loss: 1.6061334609985352 +2024/03/15 15:34:46 - patchstitcher - INFO - Epoch: [10/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.8605632185935974 - fine_loss: 0.8605632185935974 +2024/03/15 15:36:39 - patchstitcher - INFO - Epoch: [10/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 1.0743176937103271 - fine_loss: 1.0743176937103271 +2024/03/15 15:38:29 - patchstitcher - INFO - Epoch: [10/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 1.351250410079956 - fine_loss: 1.351250410079956 +2024/03/15 15:40:23 - patchstitcher - INFO - Epoch: [10/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 1.0148663520812988 - fine_loss: 1.0148663520812988 +2024/03/15 15:42:18 - patchstitcher - INFO - Evaluation Summary: ++-----------+-----------+-----------+-----------+----------+-----------+----------+------------+-----------+-----------+ +| a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see | ++-----------+-----------+-----------+-----------+----------+-----------+----------+------------+-----------+-----------+ +| 0.9474117 | 0.9913495 | 0.9966189 | 0.0881257 | 1.489531 | 0.0376722 | 0.119152 | 10.8393766 | 0.1784139 | 1.1567976 | ++-----------+-----------+-----------+-----------+----------+-----------+----------+------------+-----------+-----------+ +2024/03/15 15:44:20 - patchstitcher - INFO - Epoch: [11/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 1.1992911100387573 - fine_loss: 1.1992911100387573 +2024/03/15 15:46:09 - patchstitcher - INFO - Epoch: [11/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 1.1084281206130981 - fine_loss: 1.1084281206130981 +2024/03/15 15:48:04 - patchstitcher - INFO - Epoch: [11/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 1.6499404907226562 - fine_loss: 1.6499404907226562 +2024/03/15 15:49:54 - patchstitcher - INFO - Epoch: [11/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.508489191532135 - fine_loss: 0.508489191532135 +2024/03/15 15:53:12 - patchstitcher - INFO - Epoch: [12/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.8108782768249512 - fine_loss: 0.8108782768249512 +2024/03/15 15:55:02 - patchstitcher - INFO - Epoch: [12/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.48491033911705017 - fine_loss: 0.48491033911705017 +2024/03/15 15:56:57 - patchstitcher - INFO - Epoch: [12/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 1.0899862051010132 - fine_loss: 1.0899862051010132 +2024/03/15 15:58:51 - patchstitcher - INFO - Epoch: [12/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.8050335645675659 - fine_loss: 0.8050335645675659 +2024/03/15 16:00:51 - patchstitcher - INFO - Evaluation Summary: ++-----------+-----------+-----------+----------+-----------+-----------+-----------+------------+----------+-----------+ +| a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see | ++-----------+-----------+-----------+----------+-----------+-----------+-----------+------------+----------+-----------+ +| 0.9395763 | 0.9908747 | 0.9968585 | 0.092961 | 1.3657981 | 0.0391972 | 0.1228719 | 10.8688248 | 0.149488 | 1.0754925 | ++-----------+-----------+-----------+----------+-----------+-----------+-----------+------------+----------+-----------+ +2024/03/15 16:02:50 - patchstitcher - INFO - Epoch: [13/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.4576977491378784 - fine_loss: 0.4576977491378784 +2024/03/15 16:04:41 - patchstitcher - INFO - Epoch: [13/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.8896196484565735 - fine_loss: 0.8896196484565735 +2024/03/15 16:06:39 - patchstitcher - INFO - Epoch: [13/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.7209967374801636 - fine_loss: 0.7209967374801636 +2024/03/15 16:08:35 - patchstitcher - INFO - Epoch: [13/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 1.045252799987793 - fine_loss: 1.045252799987793 +2024/03/15 16:11:57 - patchstitcher - INFO - Epoch: [14/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.4737454354763031 - fine_loss: 0.4737454354763031 +2024/03/15 16:13:47 - patchstitcher - INFO - Epoch: [14/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.8389743566513062 - fine_loss: 0.8389743566513062 +2024/03/15 16:15:43 - patchstitcher - INFO - Epoch: [14/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.7807110548019409 - fine_loss: 0.7807110548019409 +2024/03/15 16:17:37 - patchstitcher - INFO - Epoch: [14/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.445203959941864 - fine_loss: 0.445203959941864 +2024/03/15 16:19:33 - patchstitcher - INFO - Evaluation Summary: ++-----------+-----------+----------+-----------+-----------+-----------+-----------+------------+-----------+-----------+ +| a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see | ++-----------+-----------+----------+-----------+-----------+-----------+-----------+------------+-----------+-----------+ +| 0.9540785 | 0.9909157 | 0.996375 | 0.0777575 | 1.3239907 | 0.0337207 | 0.1110354 | 10.5282875 | 0.1364373 | 1.0023539 | ++-----------+-----------+----------+-----------+-----------+-----------+-----------+------------+-----------+-----------+ +2024/03/15 16:21:31 - patchstitcher - INFO - Epoch: [15/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.7916954159736633 - fine_loss: 0.7916954159736633 +2024/03/15 16:23:23 - patchstitcher - INFO - Epoch: [15/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.9854772686958313 - fine_loss: 0.9854772686958313 +2024/03/15 16:25:16 - patchstitcher - INFO - Epoch: [15/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.6816364526748657 - fine_loss: 0.6816364526748657 +2024/03/15 16:27:09 - patchstitcher - INFO - Epoch: [15/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.8884357810020447 - fine_loss: 0.8884357810020447 +2024/03/15 16:30:24 - patchstitcher - INFO - Epoch: [16/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.9741940498352051 - fine_loss: 0.9741940498352051 +2024/03/15 16:32:15 - patchstitcher - INFO - Epoch: [16/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.682526707649231 - fine_loss: 0.682526707649231 +2024/03/15 16:34:08 - patchstitcher - INFO - Epoch: [16/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.49463391304016113 - fine_loss: 0.49463391304016113 +2024/03/15 16:36:01 - patchstitcher - INFO - Epoch: [16/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 1.2172904014587402 - fine_loss: 1.2172904014587402 +2024/03/15 16:37:56 - patchstitcher - INFO - Evaluation Summary: ++-----------+-----------+-----------+-----------+-----------+-----------+-----------+---------+-----------+-----------+ +| a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see | ++-----------+-----------+-----------+-----------+-----------+-----------+-----------+---------+-----------+-----------+ +| 0.9618936 | 0.9925915 | 0.9967338 | 0.0710943 | 1.2761649 | 0.0314868 | 0.1050877 | 9.96655 | 0.1254682 | 0.9844736 | ++-----------+-----------+-----------+-----------+-----------+-----------+-----------+---------+-----------+-----------+ +2024/03/15 16:39:53 - patchstitcher - INFO - Epoch: [17/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.5513850450515747 - fine_loss: 0.5513850450515747 +2024/03/15 16:41:44 - patchstitcher - INFO - Epoch: [17/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.6651225686073303 - fine_loss: 0.6651225686073303 +2024/03/15 16:43:36 - patchstitcher - INFO - Epoch: [17/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.3391605615615845 - fine_loss: 0.3391605615615845 +2024/03/15 16:45:28 - patchstitcher - INFO - Epoch: [17/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.7067725658416748 - fine_loss: 0.7067725658416748 +2024/03/15 16:48:48 - patchstitcher - INFO - Epoch: [18/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.48334598541259766 - fine_loss: 0.48334598541259766 +2024/03/15 16:50:39 - patchstitcher - INFO - Epoch: [18/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 1.020408034324646 - fine_loss: 1.020408034324646 +2024/03/15 16:52:31 - patchstitcher - INFO - Epoch: [18/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.7528542280197144 - fine_loss: 0.7528542280197144 +2024/03/15 16:54:26 - patchstitcher - INFO - Epoch: [18/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 1.5858664512634277 - fine_loss: 1.5858664512634277 +2024/03/15 16:56:21 - patchstitcher - INFO - Evaluation Summary: ++-----------+----------+-----------+----------+-----------+-----------+----------+-----------+-----------+-----------+ +| a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see | ++-----------+----------+-----------+----------+-----------+-----------+----------+-----------+-----------+-----------+ +| 0.9683023 | 0.992745 | 0.9975154 | 0.065207 | 1.2253438 | 0.0286473 | 0.097571 | 9.2976334 | 0.1133868 | 0.9503054 | ++-----------+----------+-----------+----------+-----------+-----------+----------+-----------+-----------+-----------+ +2024/03/15 16:58:18 - patchstitcher - INFO - Epoch: [19/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.6674519777297974 - fine_loss: 0.6674519777297974 +2024/03/15 17:00:14 - patchstitcher - INFO - Epoch: [19/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.6918603181838989 - fine_loss: 0.6918603181838989 +2024/03/15 17:02:09 - patchstitcher - INFO - Epoch: [19/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.8377310633659363 - fine_loss: 0.8377310633659363 +2024/03/15 17:04:01 - patchstitcher - INFO - Epoch: [19/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.5647892355918884 - fine_loss: 0.5647892355918884 +2024/03/15 17:07:21 - patchstitcher - INFO - Epoch: [20/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.8300132751464844 - fine_loss: 0.8300132751464844 +2024/03/15 17:09:18 - patchstitcher - INFO - Epoch: [20/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.42331066727638245 - fine_loss: 0.42331066727638245 +2024/03/15 17:11:11 - patchstitcher - INFO - Epoch: [20/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.5752788782119751 - fine_loss: 0.5752788782119751 +2024/03/15 17:13:05 - patchstitcher - INFO - Epoch: [20/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.2536908984184265 - fine_loss: 0.2536908984184265 +2024/03/15 17:15:05 - patchstitcher - INFO - Evaluation Summary: ++-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+----------+-----------+ +| a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see | ++-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+----------+-----------+ +| 0.9718122 | 0.9932088 | 0.9976712 | 0.0618038 | 1.2020189 | 0.0268609 | 0.0932138 | 8.8305159 | 0.108609 | 0.9153564 | ++-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+----------+-----------+ +2024/03/15 17:17:06 - patchstitcher - INFO - Epoch: [21/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.6014220118522644 - fine_loss: 0.6014220118522644 +2024/03/15 17:18:55 - patchstitcher - INFO - Epoch: [21/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.8239718675613403 - fine_loss: 0.8239718675613403 +2024/03/15 17:20:48 - patchstitcher - INFO - Epoch: [21/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.2863079607486725 - fine_loss: 0.2863079607486725 +2024/03/15 17:22:41 - patchstitcher - INFO - Epoch: [21/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 1.1374967098236084 - fine_loss: 1.1374967098236084 +2024/03/15 17:26:06 - patchstitcher - INFO - Epoch: [22/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.7279921770095825 - fine_loss: 0.7279921770095825 +2024/03/15 17:28:00 - patchstitcher - INFO - Epoch: [22/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 1.7760436534881592 - fine_loss: 1.7760436534881592 +2024/03/15 17:29:50 - patchstitcher - INFO - Epoch: [22/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.5720145106315613 - fine_loss: 0.5720145106315613 +2024/03/15 17:31:45 - patchstitcher - INFO - Epoch: [22/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.39483922719955444 - fine_loss: 0.39483922719955444 +2024/03/15 17:33:44 - patchstitcher - INFO - Evaluation Summary: ++-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+---------+-----------+ +| a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see | ++-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+---------+-----------+ +| 0.9754718 | 0.9935436 | 0.9978637 | 0.0557471 | 1.1464982 | 0.0244453 | 0.0875094 | 8.4613936 | 0.09984 | 0.8968383 | ++-----------+-----------+-----------+-----------+-----------+-----------+-----------+-----------+---------+-----------+ +2024/03/15 17:35:41 - patchstitcher - INFO - Epoch: [23/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.47871437668800354 - fine_loss: 0.47871437668800354 +2024/03/15 17:37:33 - patchstitcher - INFO - Epoch: [23/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 1.1419689655303955 - fine_loss: 1.1419689655303955 +2024/03/15 17:39:23 - patchstitcher - INFO - Epoch: [23/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.9582790732383728 - fine_loss: 0.9582790732383728 +2024/03/15 17:41:19 - patchstitcher - INFO - Epoch: [23/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.5557586550712585 - fine_loss: 0.5557586550712585 +2024/03/15 17:44:39 - patchstitcher - INFO - Epoch: [24/24] - Step: [00100/00475] - Time: [1/1] - Total Loss: 0.7695197463035583 - fine_loss: 0.7695197463035583 +2024/03/15 17:46:31 - patchstitcher - INFO - Epoch: [24/24] - Step: [00200/00475] - Time: [1/1] - Total Loss: 0.5754735469818115 - fine_loss: 0.5754735469818115 +2024/03/15 17:48:24 - patchstitcher - INFO - Epoch: [24/24] - Step: [00300/00475] - Time: [1/1] - Total Loss: 0.4625895321369171 - fine_loss: 0.4625895321369171 +2024/03/15 17:50:13 - patchstitcher - INFO - Epoch: [24/24] - Step: [00400/00475] - Time: [1/1] - Total Loss: 0.4534989297389984 - fine_loss: 0.4534989297389984 +2024/03/15 17:52:12 - patchstitcher - INFO - Evaluation Summary: ++-----------+-----------+----------+-----------+----------+-----------+-----------+-----------+-----------+-----------+ +| a1 | a2 | a3 | abs_rel | rmse | log_10 | rmse_log | silog | sq_rel | see | ++-----------+-----------+----------+-----------+----------+-----------+-----------+-----------+-----------+-----------+ +| 0.9761539 | 0.9936435 | 0.997857 | 0.0550249 | 1.141468 | 0.0240967 | 0.0863539 | 8.3515906 | 0.0991177 | 0.8908329 | ++-----------+-----------+----------+-----------+----------+-----------+-----------+-----------+-----------+-----------+ +2024/03/15 17:52:12 - patchstitcher - INFO - Saving ckp, but use the inner get_save_dict fuction to get model_dict +2024/03/15 17:52:12 - patchstitcher - INFO - For saving space. Would you like to save base model several times? :> +2024/03/15 17:52:15 - patchstitcher - INFO - save checkpoint_24.pth at ./work_dir/depthanything_vitl_u4k/fine_pretrain