nnUNet_calvingfront_detection / nnunet /experiment_planning /alternative_experiment_planning /target_spacing /experiment_planner_baseline_3DUNet_targetSpacingForAnisoAxis.py
ho11laqe's picture
init
ecf08bc
raw
history blame
No virus
3.62 kB
# Copyright 2020 Division of Medical Image Computing, German Cancer Research Center (DKFZ), Heidelberg, Germany
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
from nnunet.experiment_planning.experiment_planner_baseline_3DUNet import ExperimentPlanner
from nnunet.paths import *
class ExperimentPlannerTargetSpacingForAnisoAxis(ExperimentPlanner):
def __init__(self, folder_with_cropped_data, preprocessed_output_folder):
super().__init__(folder_with_cropped_data, preprocessed_output_folder)
self.data_identifier = "nnUNetData_targetSpacingForAnisoAxis"
self.plans_fname = join(self.preprocessed_output_folder,
"nnUNetPlans" + "targetSpacingForAnisoAxis_plans_3D.pkl")
def get_target_spacing(self):
"""
per default we use the 50th percentile=median for the target spacing. Higher spacing results in smaller data
and thus faster and easier training. Smaller spacing results in larger data and thus longer and harder training
For some datasets the median is not a good choice. Those are the datasets where the spacing is very anisotropic
(for example ACDC with (10, 1.5, 1.5)). These datasets still have examples with a pacing of 5 or 6 mm in the low
resolution axis. Choosing the median here will result in bad interpolation artifacts that can substantially
impact performance (due to the low number of slices).
"""
spacings = self.dataset_properties['all_spacings']
sizes = self.dataset_properties['all_sizes']
target = np.percentile(np.vstack(spacings), self.target_spacing_percentile, 0)
target_size = np.percentile(np.vstack(sizes), self.target_spacing_percentile, 0)
target_size_mm = np.array(target) * np.array(target_size)
# we need to identify datasets for which a different target spacing could be beneficial. These datasets have
# the following properties:
# - one axis which much lower resolution than the others
# - the lowres axis has much less voxels than the others
# - (the size in mm of the lowres axis is also reduced)
worst_spacing_axis = np.argmax(target)
other_axes = [i for i in range(len(target)) if i != worst_spacing_axis]
other_spacings = [target[i] for i in other_axes]
other_sizes = [target_size[i] for i in other_axes]
has_aniso_spacing = target[worst_spacing_axis] > (self.anisotropy_threshold * max(other_spacings))
has_aniso_voxels = target_size[worst_spacing_axis] * self.anisotropy_threshold < max(other_sizes)
# we don't use the last one for now
#median_size_in_mm = target[target_size_mm] * RESAMPLING_SEPARATE_Z_ANISOTROPY_THRESHOLD < max(target_size_mm)
if has_aniso_spacing and has_aniso_voxels:
spacings_of_that_axis = np.vstack(spacings)[:, worst_spacing_axis]
target_spacing_of_that_axis = np.percentile(spacings_of_that_axis, 10)
target[worst_spacing_axis] = target_spacing_of_that_axis
return target