INSTRUCTION
stringlengths
1
46.3k
RESPONSE
stringlengths
75
80.2k
Reserve a set of values for this execution. No other process can reserve the same set of values while the set is reserved. Acquired value set needs to be released after use to allow other processes to access it. Add tags to limit the possible value sets that this returns.
def acquire_value_set(self, *tags): """ Reserve a set of values for this execution. No other process can reserve the same set of values while the set is reserved. Acquired value set needs to be released after use to allow other processes to access it. Add tags to limit the possible value sets that this returns. """ setname = self._acquire_value_set(*tags) if setname is None: raise ValueError("Could not aquire a value set") return setname
Get a value from previously reserved value set.
def get_value_from_set(self, key): """ Get a value from previously reserved value set. """ #TODO: This should be done locally. # We do not really need to call centralised server if the set is already # reserved as the data there is immutable during execution key = key.lower() if self._remotelib: while True: value = self._remotelib.run_keyword('get_value_from_set', [key, self._my_id], {}) if value: return value time.sleep(0.1) logger.debug('waiting for a value') else: return _PabotLib.get_value_from_set(self, key, self._my_id)
Release a reserved value set so that other executions can use it also.
def release_value_set(self): """ Release a reserved value set so that other executions can use it also. """ if self._remotelib: self._remotelib.run_keyword('release_value_set', [self._my_id], {}) else: _PabotLib.release_value_set(self, self._my_id)
A convenience method that installs all available hooks. If a specific module is not available on the path, it is ignored.
def install_all_patches(): """ A convenience method that installs all available hooks. If a specific module is not available on the path, it is ignored. """ from . import mysqldb from . import psycopg2 from . import strict_redis from . import sqlalchemy from . import tornado_http from . import urllib from . import urllib2 from . import requests mysqldb.install_patches() psycopg2.install_patches() strict_redis.install_patches() sqlalchemy.install_patches() tornado_http.install_patches() urllib.install_patches() urllib2.install_patches() requests.install_patches()
Usually called from middleware to install client hooks specified in the client_hooks section of the configuration. :param patchers: a list of patchers to run. Acceptable values include: * None - installs all client patches * 'all' - installs all client patches * empty list - does not install any patches * list of function names - executes the functions
def install_patches(patchers='all'): """ Usually called from middleware to install client hooks specified in the client_hooks section of the configuration. :param patchers: a list of patchers to run. Acceptable values include: * None - installs all client patches * 'all' - installs all client patches * empty list - does not install any patches * list of function names - executes the functions """ if patchers is None or patchers == 'all': install_all_patches() return if not _valid_args(patchers): raise ValueError('patchers argument must be None, "all", or a list') for patch_func_name in patchers: logging.info('Loading client hook %s', patch_func_name) patch_func = _load_symbol(patch_func_name) logging.info('Applying client hook %s', patch_func_name) patch_func()
Install client interceptors for the patchers. :param client_interceptors: a list of client interceptors to install. Should be a list of classes
def install_client_interceptors(client_interceptors=()): """ Install client interceptors for the patchers. :param client_interceptors: a list of client interceptors to install. Should be a list of classes """ if not _valid_args(client_interceptors): raise ValueError('client_interceptors argument must be a list') from ..http_client import ClientInterceptors for client_interceptor in client_interceptors: logging.info('Loading client interceptor %s', client_interceptor) interceptor_class = _load_symbol(client_interceptor) logging.info('Adding client interceptor %s', client_interceptor) ClientInterceptors.append(interceptor_class())
Load a symbol by name. :param str name: The name to load, specified by `module.attr`. :returns: The attribute value. If the specified module does not contain the requested attribute then `None` is returned.
def _load_symbol(name): """Load a symbol by name. :param str name: The name to load, specified by `module.attr`. :returns: The attribute value. If the specified module does not contain the requested attribute then `None` is returned. """ module_name, key = name.rsplit('.', 1) try: module = importlib.import_module(module_name) except ImportError as err: # it's possible the symbol is a class method module_name, class_name = module_name.rsplit('.', 1) module = importlib.import_module(module_name) cls = getattr(module, class_name, None) if cls: attr = getattr(cls, key, None) else: raise err else: attr = getattr(module, key, None) if not callable(attr): raise ValueError('%s is not callable (was %r)' % (name, attr)) return attr
Access current request context and extract current Span from it. :return: Return current span associated with the current request context. If no request context is present in thread local, or the context has no span, return None.
def get_current_span(): """ Access current request context and extract current Span from it. :return: Return current span associated with the current request context. If no request context is present in thread local, or the context has no span, return None. """ # Check against the old, ScopeManager-less implementation, # for backwards compatibility. context = RequestContextManager.current_context() if context is not None: return context.span active = opentracing.tracer.scope_manager.active return active.span if active else None
Create a context manager that stores the given span in the thread-local request context. This function should only be used in single-threaded applications like Flask / uWSGI. ## Usage example in WSGI middleware: .. code-block:: python from opentracing_instrumentation.http_server import WSGIRequestWrapper from opentracing_instrumentation.http_server import before_request from opentracing_instrumentation import request_context def create_wsgi_tracing_middleware(other_wsgi): def wsgi_tracing_middleware(environ, start_response): request = WSGIRequestWrapper.from_wsgi_environ(environ) span = before_request(request=request, tracer=tracer) # Wrapper around the real start_response object to log # additional information to opentracing Span def start_response_wrapper(status, response_headers, exc_info=None): if exc_info is not None: span.log(event='exception', payload=exc_info) span.finish() return start_response(status, response_headers) with request_context.span_in_context(span): return other_wsgi(environ, start_response_wrapper) return wsgi_tracing_middleware :param span: OpenTracing Span :return: Return context manager that wraps the request context.
def span_in_context(span): """ Create a context manager that stores the given span in the thread-local request context. This function should only be used in single-threaded applications like Flask / uWSGI. ## Usage example in WSGI middleware: .. code-block:: python from opentracing_instrumentation.http_server import WSGIRequestWrapper from opentracing_instrumentation.http_server import before_request from opentracing_instrumentation import request_context def create_wsgi_tracing_middleware(other_wsgi): def wsgi_tracing_middleware(environ, start_response): request = WSGIRequestWrapper.from_wsgi_environ(environ) span = before_request(request=request, tracer=tracer) # Wrapper around the real start_response object to log # additional information to opentracing Span def start_response_wrapper(status, response_headers, exc_info=None): if exc_info is not None: span.log(event='exception', payload=exc_info) span.finish() return start_response(status, response_headers) with request_context.span_in_context(span): return other_wsgi(environ, start_response_wrapper) return wsgi_tracing_middleware :param span: OpenTracing Span :return: Return context manager that wraps the request context. """ # Return a no-op Scope if None was specified. if span is None: return opentracing.Scope(None, None) return opentracing.tracer.scope_manager.activate(span, False)
Create Tornado's StackContext that stores the given span in the thread-local request context. This function is intended for use in Tornado applications based on IOLoop, although will work fine in single-threaded apps like Flask, albeit with more overhead. ## Usage example in Tornado application Suppose you have a method `handle_request(request)` in the http server. Instead of calling it directly, use a wrapper: .. code-block:: python from opentracing_instrumentation import request_context @tornado.gen.coroutine def handle_request_wrapper(request, actual_handler, *args, **kwargs) request_wrapper = TornadoRequestWrapper(request=request) span = http_server.before_request(request=request_wrapper) with request_context.span_in_stack_context(span): return actual_handler(*args, **kwargs) :param span: :return: Return StackContext that wraps the request context.
def span_in_stack_context(span): """ Create Tornado's StackContext that stores the given span in the thread-local request context. This function is intended for use in Tornado applications based on IOLoop, although will work fine in single-threaded apps like Flask, albeit with more overhead. ## Usage example in Tornado application Suppose you have a method `handle_request(request)` in the http server. Instead of calling it directly, use a wrapper: .. code-block:: python from opentracing_instrumentation import request_context @tornado.gen.coroutine def handle_request_wrapper(request, actual_handler, *args, **kwargs) request_wrapper = TornadoRequestWrapper(request=request) span = http_server.before_request(request=request_wrapper) with request_context.span_in_stack_context(span): return actual_handler(*args, **kwargs) :param span: :return: Return StackContext that wraps the request context. """ if not isinstance(opentracing.tracer.scope_manager, TornadoScopeManager): raise RuntimeError('scope_manager is not TornadoScopeManager') # Enter the newly created stack context so we have # storage available for Span activation. context = tracer_stack_context() entered_context = _TracerEnteredStackContext(context) if span is None: return entered_context opentracing.tracer.scope_manager.activate(span, False) assert opentracing.tracer.active_span is not None assert opentracing.tracer.active_span is span return entered_context
Creates a new local span for execution of the given `func`. The returned span is best used as a context manager, e.g. .. code-block:: python with func_span('my_function'): return my_function(...) At this time the func should be a string name. In the future this code can be enhanced to accept a real function and derive its qualified name. :param func: name of the function or method :param tags: optional tags to add to the child span :param require_active_trace: controls what to do when there is no active trace. If require_active_trace=True, then no span is created. If require_active_trace=False, a new trace is started. :return: new child span, or a dummy context manager if there is no active/current parent span
def func_span(func, tags=None, require_active_trace=False): """ Creates a new local span for execution of the given `func`. The returned span is best used as a context manager, e.g. .. code-block:: python with func_span('my_function'): return my_function(...) At this time the func should be a string name. In the future this code can be enhanced to accept a real function and derive its qualified name. :param func: name of the function or method :param tags: optional tags to add to the child span :param require_active_trace: controls what to do when there is no active trace. If require_active_trace=True, then no span is created. If require_active_trace=False, a new trace is started. :return: new child span, or a dummy context manager if there is no active/current parent span """ current_span = get_current_span() if current_span is None and require_active_trace: @contextlib2.contextmanager def empty_ctx_mgr(): yield None return empty_ctx_mgr() # TODO convert func to a proper name: module:class.func operation_name = str(func) return utils.start_child_span( operation_name=operation_name, parent=current_span, tags=tags)
A decorator that enables tracing of the wrapped function or Tornado co-routine provided there is a parent span already established. .. code-block:: python @traced_function def my_function1(arg1, arg2=None) ... :param func: decorated function or Tornado co-routine :param name: optional name to use as the Span.operation_name. If not provided, func.__name__ will be used. :param on_start: an optional callback to be executed once the child span is started, but before the decorated function is called. It can be used to set any additional tags on the span, perhaps by inspecting the decorated function arguments. The callback must have a signature `(span, *args, *kwargs)`, where the last two collections are the arguments passed to the actual decorated function. .. code-block:: python def extract_call_site_tag(span, *args, *kwargs) if 'call_site_tag' in kwargs: span.set_tag('call_site_tag', kwargs['call_site_tag']) @traced_function(on_start=extract_call_site_tag) @tornado.get.coroutine def my_function(arg1, arg2=None, call_site_tag=None) ... :param require_active_trace: controls what to do when there is no active trace. If require_active_trace=True, then no span is created. If require_active_trace=False, a new trace is started. :return: returns a tracing decorator
def traced_function(func=None, name=None, on_start=None, require_active_trace=False): """ A decorator that enables tracing of the wrapped function or Tornado co-routine provided there is a parent span already established. .. code-block:: python @traced_function def my_function1(arg1, arg2=None) ... :param func: decorated function or Tornado co-routine :param name: optional name to use as the Span.operation_name. If not provided, func.__name__ will be used. :param on_start: an optional callback to be executed once the child span is started, but before the decorated function is called. It can be used to set any additional tags on the span, perhaps by inspecting the decorated function arguments. The callback must have a signature `(span, *args, *kwargs)`, where the last two collections are the arguments passed to the actual decorated function. .. code-block:: python def extract_call_site_tag(span, *args, *kwargs) if 'call_site_tag' in kwargs: span.set_tag('call_site_tag', kwargs['call_site_tag']) @traced_function(on_start=extract_call_site_tag) @tornado.get.coroutine def my_function(arg1, arg2=None, call_site_tag=None) ... :param require_active_trace: controls what to do when there is no active trace. If require_active_trace=True, then no span is created. If require_active_trace=False, a new trace is started. :return: returns a tracing decorator """ if func is None: return functools.partial(traced_function, name=name, on_start=on_start, require_active_trace=require_active_trace) if name: operation_name = name else: operation_name = func.__name__ @functools.wraps(func) def decorator(*args, **kwargs): parent_span = get_current_span() if parent_span is None and require_active_trace: return func(*args, **kwargs) span = utils.start_child_span( operation_name=operation_name, parent=parent_span) if callable(on_start): on_start(span, *args, **kwargs) # We explicitly invoke deactivation callback for the StackContext, # because there are scenarios when it gets retained forever, for # example when a Periodic Callback is scheduled lazily while in the # scope of a tracing StackContext. with span_in_stack_context(span) as deactivate_cb: try: res = func(*args, **kwargs) # Tornado co-routines usually return futures, so we must wait # until the future is completed, in order to accurately # capture the function's execution time. if tornado.concurrent.is_future(res): def done_callback(future): deactivate_cb() exception = future.exception() if exception is not None: span.log(event='exception', payload=exception) span.set_tag('error', 'true') span.finish() res.add_done_callback(done_callback) else: deactivate_cb() span.finish() return res except Exception as e: deactivate_cb() span.log(event='exception', payload=e) span.set_tag('error', 'true') span.finish() raise return decorator
Start a new span as a child of parent_span. If parent_span is None, start a new root span. :param operation_name: operation name :param tracer: Tracer or None (defaults to opentracing.tracer) :param parent: parent Span or None :param tags: optional tags :return: new span
def start_child_span(operation_name, tracer=None, parent=None, tags=None): """ Start a new span as a child of parent_span. If parent_span is None, start a new root span. :param operation_name: operation name :param tracer: Tracer or None (defaults to opentracing.tracer) :param parent: parent Span or None :param tags: optional tags :return: new span """ tracer = tracer or opentracing.tracer return tracer.start_span( operation_name=operation_name, child_of=parent.context if parent else None, tags=tags )
Attempts to extract a tracing span from incoming request. If no tracing context is passed in the headers, or the data cannot be parsed, a new root span is started. :param request: HTTP request with `.headers` property exposed that satisfies a regular dictionary interface :param tracer: optional tracer instance to use. If not specified the global opentracing.tracer will be used. :return: returns a new, already started span.
def before_request(request, tracer=None): """ Attempts to extract a tracing span from incoming request. If no tracing context is passed in the headers, or the data cannot be parsed, a new root span is started. :param request: HTTP request with `.headers` property exposed that satisfies a regular dictionary interface :param tracer: optional tracer instance to use. If not specified the global opentracing.tracer will be used. :return: returns a new, already started span. """ if tracer is None: # pragma: no cover tracer = opentracing.tracer # we need to prepare tags upfront, mainly because RPC_SERVER tag must be # set when starting the span, to support Zipkin's one-span-per-RPC model tags_dict = { tags.SPAN_KIND: tags.SPAN_KIND_RPC_SERVER, tags.HTTP_URL: request.full_url, } remote_ip = request.remote_ip if remote_ip: tags_dict[tags.PEER_HOST_IPV4] = remote_ip caller_name = request.caller_name if caller_name: tags_dict[tags.PEER_SERVICE] = caller_name remote_port = request.remote_port if remote_port: tags_dict[tags.PEER_PORT] = remote_port operation = request.operation try: carrier = {} for key, value in six.iteritems(request.headers): carrier[key] = value parent_ctx = tracer.extract( format=Format.HTTP_HEADERS, carrier=carrier ) except Exception as e: logging.exception('trace extract failed: %s' % e) parent_ctx = None span = tracer.start_span( operation_name=operation, child_of=parent_ctx, tags=tags_dict) return span
HTTP headers are presented in WSGI environment with 'HTTP_' prefix. This method finds those headers, removes the prefix, converts underscores to dashes, and converts to lower case. :param wsgi_environ: :return: returns a dictionary of headers
def _parse_wsgi_headers(wsgi_environ): """ HTTP headers are presented in WSGI environment with 'HTTP_' prefix. This method finds those headers, removes the prefix, converts underscores to dashes, and converts to lower case. :param wsgi_environ: :return: returns a dictionary of headers """ prefix = 'HTTP_' p_len = len(prefix) # use .items() despite suspected memory pressure bc GC occasionally # collects wsgi_environ.iteritems() during iteration. headers = { key[p_len:].replace('_', '-').lower(): val for (key, val) in wsgi_environ.items() if key.startswith(prefix)} return headers
Taken from http://legacy.python.org/dev/peps/pep-3333/#url-reconstruction :return: Reconstructed URL from WSGI environment.
def full_url(self): """ Taken from http://legacy.python.org/dev/peps/pep-3333/#url-reconstruction :return: Reconstructed URL from WSGI environment. """ environ = self.wsgi_environ url = environ['wsgi.url_scheme'] + '://' if environ.get('HTTP_HOST'): url += environ['HTTP_HOST'] else: url += environ['SERVER_NAME'] if environ['wsgi.url_scheme'] == 'https': if environ['SERVER_PORT'] != '443': url += ':' + environ['SERVER_PORT'] else: if environ['SERVER_PORT'] != '80': url += ':' + environ['SERVER_PORT'] url += urllib.parse.quote(environ.get('SCRIPT_NAME', '')) url += urllib.parse.quote(environ.get('PATH_INFO', '')) if environ.get('QUERY_STRING'): url += '?' + environ['QUERY_STRING'] return url
Add interceptor to the end of the internal list. Note: Raises ``ValueError`` if interceptor does not extend ``OpenTracingInterceptor``
def append(cls, interceptor): """ Add interceptor to the end of the internal list. Note: Raises ``ValueError`` if interceptor does not extend ``OpenTracingInterceptor`` """ cls._check(interceptor) cls._interceptors.append(interceptor)
Add interceptor to the given index in the internal list. Note: Raises ``ValueError`` if interceptor does not extend ``OpenTracingInterceptor``
def insert(cls, index, interceptor): """ Add interceptor to the given index in the internal list. Note: Raises ``ValueError`` if interceptor does not extend ``OpenTracingInterceptor`` """ cls._check(interceptor) cls._interceptors.insert(index, interceptor)
This decorator allows you to make sure that a function is called once and only once. Note that recursive functions will still work. WARNING: Not thread-safe!!!
def singleton(func): """ This decorator allows you to make sure that a function is called once and only once. Note that recursive functions will still work. WARNING: Not thread-safe!!! """ @functools.wraps(func) def wrapper(*args, **kwargs): if wrapper.__call_state__ == CALLED: return ret = func(*args, **kwargs) wrapper.__call_state__ = CALLED return ret def reset(): wrapper.__call_state__ = NOT_CALLED wrapper.reset = reset reset() # save original func to be able to patch and restore multiple times from # unit tests wrapper.__original_func = func return wrapper
A hook to be executed before HTTP request is executed. It returns a Span object that can be used as a context manager around the actual HTTP call implementation, or in case of async callback, it needs its `finish()` method to be called explicitly. :param request: request must match API defined by AbstractRequestWrapper :param current_span_extractor: function that extracts current span from some context :return: returns child tracing span encapsulating this request
def before_http_request(request, current_span_extractor): """ A hook to be executed before HTTP request is executed. It returns a Span object that can be used as a context manager around the actual HTTP call implementation, or in case of async callback, it needs its `finish()` method to be called explicitly. :param request: request must match API defined by AbstractRequestWrapper :param current_span_extractor: function that extracts current span from some context :return: returns child tracing span encapsulating this request """ span = utils.start_child_span( operation_name=request.operation, parent=current_span_extractor() ) span.set_tag(tags.SPAN_KIND, tags.SPAN_KIND_RPC_CLIENT) span.set_tag(tags.HTTP_URL, request.full_url) service_name = request.service_name host, port = request.host_port if service_name: span.set_tag(tags.PEER_SERVICE, service_name) if host: span.set_tag(tags.PEER_HOST_IPV4, host) if port: span.set_tag(tags.PEER_PORT, port) # fire interceptors for interceptor in ClientInterceptors.get_interceptors(): interceptor.process(request=request, span=span) try: carrier = {} opentracing.tracer.inject(span_context=span.context, format=Format.HTTP_HEADERS, carrier=carrier) for key, value in six.iteritems(carrier): request.add_header(key, value) except opentracing.UnsupportedFormatException: pass return span
Smooth an image ANTsR function: `smoothImage` Arguments --------- image Image to smooth sigma Smoothing factor. Can be scalar, in which case the same sigma is applied to each dimension, or a vector of length dim(inimage) to specify a unique smoothness for each dimension. sigma_in_physical_coordinates : boolean If true, the smoothing factor is in millimeters; if false, it is in pixels. FWHM : boolean If true, sigma is interpreted as the full-width-half-max (FWHM) of the filter, not the sigma of a Gaussian kernel. max_kernel_width : scalar Maximum kernel width Returns ------- ANTsImage Example ------- >>> import ants >>> image = ants.image_read( ants.get_ants_data('r16')) >>> simage = ants.smooth_image(image, (1.2,1.5))
def smooth_image(image, sigma, sigma_in_physical_coordinates=True, FWHM=False, max_kernel_width=32): """ Smooth an image ANTsR function: `smoothImage` Arguments --------- image Image to smooth sigma Smoothing factor. Can be scalar, in which case the same sigma is applied to each dimension, or a vector of length dim(inimage) to specify a unique smoothness for each dimension. sigma_in_physical_coordinates : boolean If true, the smoothing factor is in millimeters; if false, it is in pixels. FWHM : boolean If true, sigma is interpreted as the full-width-half-max (FWHM) of the filter, not the sigma of a Gaussian kernel. max_kernel_width : scalar Maximum kernel width Returns ------- ANTsImage Example ------- >>> import ants >>> image = ants.image_read( ants.get_ants_data('r16')) >>> simage = ants.smooth_image(image, (1.2,1.5)) """ if image.components == 1: return _smooth_image_helper(image, sigma, sigma_in_physical_coordinates, FWHM, max_kernel_width) else: imagelist = utils.split_channels(image) newimages = [] for image in imagelist: newimage = _smooth_image_helper(image, sigma, sigma_in_physical_coordinates, FWHM, max_kernel_width) newimages.append(newimage) return utils.merge_channels(newimages)
Estimate an optimal template from an input image_list ANTsR function: N/A Arguments --------- initial_template : ANTsImage initialization for the template building image_list : ANTsImages images from which to estimate template iterations : integer number of template building iterations gradient_step : scalar for shape update gradient kwargs : keyword args extra arguments passed to ants registration Returns ------- ANTsImage Example ------- >>> import ants >>> image = ants.image_read( ants.get_ants_data('r16') , 'float') >>> image2 = ants.image_read( ants.get_ants_data('r27') , 'float') >>> image3 = ants.image_read( ants.get_ants_data('r85') , 'float') >>> timage = ants.build_template( image_list = ( image, image2, image3 ) )
def build_template( initial_template=None, image_list=None, iterations = 3, gradient_step = 0.2, **kwargs ): """ Estimate an optimal template from an input image_list ANTsR function: N/A Arguments --------- initial_template : ANTsImage initialization for the template building image_list : ANTsImages images from which to estimate template iterations : integer number of template building iterations gradient_step : scalar for shape update gradient kwargs : keyword args extra arguments passed to ants registration Returns ------- ANTsImage Example ------- >>> import ants >>> image = ants.image_read( ants.get_ants_data('r16') , 'float') >>> image2 = ants.image_read( ants.get_ants_data('r27') , 'float') >>> image3 = ants.image_read( ants.get_ants_data('r85') , 'float') >>> timage = ants.build_template( image_list = ( image, image2, image3 ) ) """ wt = 1.0 / len( image_list ) if initial_template is None: initial_template = image_list[ 0 ] * 0 for i in range( len( image_list ) ): initial_template = initial_template + image_list[ i ] * wt xavg = initial_template.clone() for i in range( iterations ): for k in range( len( image_list ) ): w1 = registration( xavg, image_list[k], type_of_transform='SyN', **kwargs ) if k == 0: wavg = iio.image_read( w1['fwdtransforms'][0] ) * wt xavgNew = w1['warpedmovout'] * wt else: wavg = wavg + iio.image_read( w1['fwdtransforms'][0] ) * wt xavgNew = xavgNew + w1['warpedmovout'] * wt print( wavg.abs().mean() ) wscl = (-1.0) * gradient_step wavg = wavg * wscl wavgfn = mktemp(suffix='.nii.gz') iio.image_write(wavg, wavgfn) xavg = apply_transforms( xavg, xavg, wavgfn ) return xavg
X : ANTsImage | string | list of ANTsImage types | list of strings images to register to fixed image y : string | list of strings labels for images
def fit(self, X, y=None): """ X : ANTsImage | string | list of ANTsImage types | list of strings images to register to fixed image y : string | list of strings labels for images """ moving_images = X if isinstance(X, (list,tuple)) else [X] moving_labels = y if y is not None else [i for i in range(len(moving_images))] fixed_image = self.fixed_image self.fwdtransforms_ = {} self.invtransforms_ = {} self.warpedmovout_ = {} self.warpedfixout_ = {} for moving_image, moving_label in zip(moving_images, moving_labels): fit_result = interface.registration(fixed_image, moving_image, type_of_transform=self.type_of_transform, initial_transform=None, outprefix='', mask=None, grad_step=0.2, flow_sigma=3, total_sigma=0, aff_metric='mattes', aff_sampling=32, syn_metric='mattes', syn_sampling=32, reg_iterations=(40,20,0), verbose=False) self.fwdtransforms_[moving_label] = fit_result['fwdtransforms'] self.invtransforms_[moving_label] = fit_result['invtransforms'] self.warpedmovout_[moving_label] = fit_result['warpedmovout'] self.warpedfixout_[moving_label] = fit_result['warpedfixout'] return self
A multiple atlas voting scheme to customize labels for a new subject. This function will also perform intensity fusion. It almost directly calls the C++ in the ANTs executable so is much faster than other variants in ANTsR. One may want to normalize image intensities for each input image before passing to this function. If no labels are passed, we do intensity fusion. Note on computation time: the underlying C++ is multithreaded. You can control the number of threads by setting the environment variable ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS e.g. to use all or some of your CPUs. This will improve performance substantially. For instance, on a macbook pro from 2015, 8 cores improves speed by about 4x. ANTsR function: `jointLabelFusion` Arguments --------- target_image : ANTsImage image to be approximated target_image_mask : ANTsImage mask with value 1 atlas_list : list of ANTsImage types list containing intensity images beta : scalar weight sharpness, default to 2 rad : scalar neighborhood radius, default to 2 label_list : list of ANTsImage types (optional) list containing images with segmentation labels rho : scalar ridge penalty increases robustness to outliers but also makes image converge to average usecor : boolean employ correlation as local similarity r_search : scalar radius of search, default is 3 nonnegative : boolean constrain weights to be non-negative verbose : boolean whether to show status updates Returns ------- dictionary w/ following key/value pairs: `segmentation` : ANTsImage segmentation image `intensity` : ANTsImage intensity image `probabilityimages` : list of ANTsImage types probability map image for each label Example ------- >>> import ants >>> ref = ants.image_read( ants.get_ants_data('r16')) >>> ref = ants.resample_image(ref, (50,50),1,0) >>> ref = ants.iMath(ref,'Normalize') >>> mi = ants.image_read( ants.get_ants_data('r27')) >>> mi2 = ants.image_read( ants.get_ants_data('r30')) >>> mi3 = ants.image_read( ants.get_ants_data('r62')) >>> mi4 = ants.image_read( ants.get_ants_data('r64')) >>> mi5 = ants.image_read( ants.get_ants_data('r85')) >>> refmask = ants.get_mask(ref) >>> refmask = ants.iMath(refmask,'ME',2) # just to speed things up >>> ilist = [mi,mi2,mi3,mi4,mi5] >>> seglist = [None]*len(ilist) >>> for i in range(len(ilist)): >>> ilist[i] = ants.iMath(ilist[i],'Normalize') >>> mytx = ants.registration(fixed=ref , moving=ilist[i] , >>> typeofTransform = ('Affine') ) >>> mywarpedimage = ants.apply_transforms(fixed=ref,moving=ilist[i], >>> transformlist=mytx['fwdtransforms']) >>> ilist[i] = mywarpedimage >>> seg = ants.threshold_image(ilist[i],'Otsu', 3) >>> seglist[i] = ( seg ) + ants.threshold_image( seg, 1, 3 ).morphology( operation='dilate', radius=3 ) >>> r = 2 >>> pp = ants.joint_label_fusion(ref, refmask, ilist, r_search=2, >>> label_list=seglist, rad=[r]*ref.dimension ) >>> pp = ants.joint_label_fusion(ref,refmask,ilist, r_search=2, rad=[r]*ref.dimension)
def joint_label_fusion(target_image, target_image_mask, atlas_list, beta=4, rad=2, label_list=None, rho=0.01, usecor=False, r_search=3, nonnegative=False, verbose=False): """ A multiple atlas voting scheme to customize labels for a new subject. This function will also perform intensity fusion. It almost directly calls the C++ in the ANTs executable so is much faster than other variants in ANTsR. One may want to normalize image intensities for each input image before passing to this function. If no labels are passed, we do intensity fusion. Note on computation time: the underlying C++ is multithreaded. You can control the number of threads by setting the environment variable ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS e.g. to use all or some of your CPUs. This will improve performance substantially. For instance, on a macbook pro from 2015, 8 cores improves speed by about 4x. ANTsR function: `jointLabelFusion` Arguments --------- target_image : ANTsImage image to be approximated target_image_mask : ANTsImage mask with value 1 atlas_list : list of ANTsImage types list containing intensity images beta : scalar weight sharpness, default to 2 rad : scalar neighborhood radius, default to 2 label_list : list of ANTsImage types (optional) list containing images with segmentation labels rho : scalar ridge penalty increases robustness to outliers but also makes image converge to average usecor : boolean employ correlation as local similarity r_search : scalar radius of search, default is 3 nonnegative : boolean constrain weights to be non-negative verbose : boolean whether to show status updates Returns ------- dictionary w/ following key/value pairs: `segmentation` : ANTsImage segmentation image `intensity` : ANTsImage intensity image `probabilityimages` : list of ANTsImage types probability map image for each label Example ------- >>> import ants >>> ref = ants.image_read( ants.get_ants_data('r16')) >>> ref = ants.resample_image(ref, (50,50),1,0) >>> ref = ants.iMath(ref,'Normalize') >>> mi = ants.image_read( ants.get_ants_data('r27')) >>> mi2 = ants.image_read( ants.get_ants_data('r30')) >>> mi3 = ants.image_read( ants.get_ants_data('r62')) >>> mi4 = ants.image_read( ants.get_ants_data('r64')) >>> mi5 = ants.image_read( ants.get_ants_data('r85')) >>> refmask = ants.get_mask(ref) >>> refmask = ants.iMath(refmask,'ME',2) # just to speed things up >>> ilist = [mi,mi2,mi3,mi4,mi5] >>> seglist = [None]*len(ilist) >>> for i in range(len(ilist)): >>> ilist[i] = ants.iMath(ilist[i],'Normalize') >>> mytx = ants.registration(fixed=ref , moving=ilist[i] , >>> typeofTransform = ('Affine') ) >>> mywarpedimage = ants.apply_transforms(fixed=ref,moving=ilist[i], >>> transformlist=mytx['fwdtransforms']) >>> ilist[i] = mywarpedimage >>> seg = ants.threshold_image(ilist[i],'Otsu', 3) >>> seglist[i] = ( seg ) + ants.threshold_image( seg, 1, 3 ).morphology( operation='dilate', radius=3 ) >>> r = 2 >>> pp = ants.joint_label_fusion(ref, refmask, ilist, r_search=2, >>> label_list=seglist, rad=[r]*ref.dimension ) >>> pp = ants.joint_label_fusion(ref,refmask,ilist, r_search=2, rad=[r]*ref.dimension) """ segpixtype = 'unsigned int' if (label_list is None) or (np.any([l is None for l in label_list])): doJif = True else: doJif = False if not doJif: if len(label_list) != len(atlas_list): raise ValueError('len(label_list) != len(atlas_list)') inlabs = np.sort(np.unique(label_list[0][target_image_mask != 0 ])) mymask = target_image_mask.clone() else: mymask = target_image_mask osegfn = mktemp(prefix='antsr', suffix='myseg.nii.gz') #segdir = osegfn.replace(os.path.basename(osegfn),'') if os.path.exists(osegfn): os.remove(osegfn) probs = mktemp(prefix='antsr', suffix='prob%02d.nii.gz') probsbase = os.path.basename(probs) tdir = probs.replace(probsbase,'') searchpattern = probsbase.replace('%02d', '*') mydim = target_image_mask.dimension if not doJif: # not sure if these should be allocated or what their size should be outimg = label_list[1].clone(segpixtype) outimgi = target_image * 0 outimg_ptr = utils.get_pointer_string(outimg) outimgi_ptr = utils.get_pointer_string(outimgi) outs = '[%s,%s,%s]' % (outimg_ptr, outimgi_ptr, probs) else: outimgi = target_image * 0 outs = utils.get_pointer_string(outimgi) mymask = mymask.clone(segpixtype) if (not isinstance(rad, (tuple,list))) or (len(rad)==1): myrad = [rad] * mydim else: myrad = rad if len(myrad) != mydim: raise ValueError('path radius dimensionality must equal image dimensionality') myrad = 'x'.join([str(mr) for mr in myrad]) vnum = 1 if verbose else 0 nnum = 1 if nonnegative else 0 myargs = { 'd': mydim, 't': target_image, 'a': rho, 'b': beta, 'c': nnum, 'p': myrad, 'm': 'PC', 's': r_search, 'x': mymask, 'o': outs, 'v': vnum } kct = len(myargs.keys()) for k in range(len(atlas_list)): kct += 1 myargs['g-MULTINAME-%i' % kct] = atlas_list[k] if not doJif: kct += 1 castseg = label_list[k].clone(segpixtype) myargs['l-MULTINAME-%i' % kct] = castseg myprocessedargs = utils._int_antsProcessArguments(myargs) libfn = utils.get_lib_fn('antsJointFusion') rval = libfn(myprocessedargs) if rval != 0: print('Warning: Non-zero return from antsJointFusion') if doJif: return outimgi probsout = glob.glob(os.path.join(tdir,'*'+searchpattern ) ) probsout.sort() probimgs = [iio2.image_read(probsout[0])] for idx in range(1, len(probsout)): probimgs.append(iio2.image_read(probsout[idx])) segmat = iio2.images_to_matrix( probimgs, target_image_mask ) finalsegvec = segmat.argmax( axis = 0 ) finalsegvec2 = finalsegvec.copy() # mapfinalsegvec to original labels for i in range(finalsegvec.max()+1): finalsegvec2[finalsegvec==i] = inlabs[i] outimg = iio2.make_image( target_image_mask, finalsegvec2 ) return { 'segmentation': outimg, 'intensity': outimgi, 'probabilityimages': probimgs }
Create a tiled mosaic of 2D slice images from a 3D ANTsImage. ANTsR function : N/A ANTs function : `createTiledMosaic` Arguments --------- image : ANTsImage base image to visualize rgb : ANTsImage optional overlay image to display on top of base image mask : ANTsImage optional mask image alpha : float alpha value for rgb/overlay image direction : integer or string which axis to visualize options: 0, 1, 2, 'x', 'y', 'z' pad_or_crop : list of 2-tuples padding or cropping values for each dimension and each side. - to crop the X dimension, use the following: pad_or_crop = [(10,10), 0, 0] - to pad the X dimension, use the following: pad_or_crop = [(-10,-10), 0, 0] slices : list/numpy.ndarray or integer or 3-tuple if list or numpy.ndarray: slices to use if integer: number of slices to incremenet if 3-tuple: (# slices to increment, min slice, max slice) flip_slice : 2-tuple of boolean (whether to flip X direction, whether to flip Y direction) permute_axes : boolean whether to permute axes output : string output filename where mosaic image will be saved. If not given, this function will save to a temp file, then return the image as a PIL.Image object ANTs ---- -i, --input-image inputImageFilename -r, --rgb-image rgbImageFilename -x, --mask-image maskImageFilename -a, --alpha value -e, --functional-overlay [rgbImageFileName,maskImageFileName,<alpha=1>] -o, --output tiledMosaicImage -t, --tile-geometry RxC -d, --direction 0/1/2/x/y/(z) -p, --pad-or-crop padVoxelWidth [padVoxelWidth,<constantValue=0>] [lowerPadding[0]xlowerPadding[1],upperPadding[0]xupperPadding[1],constantValue] -s, --slices Slice1xSlice2xSlice3... numberOfSlicesToIncrement [numberOfSlicesToIncrement,<minSlice=0>,<maxSlice=lastSlice>] -f, --flip-slice flipXxflipY -g, --permute-axes doPermute -h CreateTiledMosaic -i OAS1_0457_MR1_mpr_n3_anon_sbj_111BrainSegmentation0N4 . nii . gz \ -r OAS1_0457_MR1_mpr_n3_anon_sbj_111CorticalThickness_hot . nii . gz \ -x OAS1_0457_MR1_mpr_n3_anon_sbj_111CorticalThickness_mask . nii . gz \ -o OAS1_0457_MR1_mpr_n3_anon_sbj_111_tiledMosaic . png \ -a 1.0 -t -1 x8 -d 2 -p [ -15x -50 , -15x -30 ,0] -s [2 ,100 ,160] Example ------- >>> import ants >>> image = ants.image_read(ants.get_ants_data('ch2')) >>> plt = ants.create_tiled_mosaic(image)
def create_tiled_mosaic(image, rgb=None, mask=None, overlay=None, output=None, alpha=1., direction=0, pad_or_crop=None, slices=None, flip_slice=None, permute_axes=False): """ Create a tiled mosaic of 2D slice images from a 3D ANTsImage. ANTsR function : N/A ANTs function : `createTiledMosaic` Arguments --------- image : ANTsImage base image to visualize rgb : ANTsImage optional overlay image to display on top of base image mask : ANTsImage optional mask image alpha : float alpha value for rgb/overlay image direction : integer or string which axis to visualize options: 0, 1, 2, 'x', 'y', 'z' pad_or_crop : list of 2-tuples padding or cropping values for each dimension and each side. - to crop the X dimension, use the following: pad_or_crop = [(10,10), 0, 0] - to pad the X dimension, use the following: pad_or_crop = [(-10,-10), 0, 0] slices : list/numpy.ndarray or integer or 3-tuple if list or numpy.ndarray: slices to use if integer: number of slices to incremenet if 3-tuple: (# slices to increment, min slice, max slice) flip_slice : 2-tuple of boolean (whether to flip X direction, whether to flip Y direction) permute_axes : boolean whether to permute axes output : string output filename where mosaic image will be saved. If not given, this function will save to a temp file, then return the image as a PIL.Image object ANTs ---- -i, --input-image inputImageFilename -r, --rgb-image rgbImageFilename -x, --mask-image maskImageFilename -a, --alpha value -e, --functional-overlay [rgbImageFileName,maskImageFileName,<alpha=1>] -o, --output tiledMosaicImage -t, --tile-geometry RxC -d, --direction 0/1/2/x/y/(z) -p, --pad-or-crop padVoxelWidth [padVoxelWidth,<constantValue=0>] [lowerPadding[0]xlowerPadding[1],upperPadding[0]xupperPadding[1],constantValue] -s, --slices Slice1xSlice2xSlice3... numberOfSlicesToIncrement [numberOfSlicesToIncrement,<minSlice=0>,<maxSlice=lastSlice>] -f, --flip-slice flipXxflipY -g, --permute-axes doPermute -h CreateTiledMosaic -i OAS1_0457_MR1_mpr_n3_anon_sbj_111BrainSegmentation0N4 . nii . gz \ -r OAS1_0457_MR1_mpr_n3_anon_sbj_111CorticalThickness_hot . nii . gz \ -x OAS1_0457_MR1_mpr_n3_anon_sbj_111CorticalThickness_mask . nii . gz \ -o OAS1_0457_MR1_mpr_n3_anon_sbj_111_tiledMosaic . png \ -a 1.0 -t -1 x8 -d 2 -p [ -15x -50 , -15x -30 ,0] -s [2 ,100 ,160] Example ------- >>> import ants >>> image = ants.image_read(ants.get_ants_data('ch2')) >>> plt = ants.create_tiled_mosaic(image) """ # image needs to be unsigned char if image.pixeltype != 'unsigned char': # transform between 0 and 255. image = (image - image.max()) / (image.max() - image.min()) image = image * 255. image = image.clone('unsigned char') output_is_temp = False if output is None: output_is_temp = True output = mktemp(suffix='.jpg') if rgb is None: rgb = image.clone() imagepath = mktemp(suffix='.nii.gz') iio2.image_write(image, imagepath) rgbpath = mktemp(suffix='.nii.gz') iio2.image_write(rgb, rgbpath) args = { 'i': imagepath, 'r': rgbpath, 'o': output } processed_args = utils._int_antsProcessArguments(args) libfn = utils.get_lib_fn('CreateTiledMosaic') libfn(processed_args) outimage = Image.open(output) if output_is_temp: os.remove(output) return outimage
Impute missing values on a numpy ndarray in a column-wise manner. ANTsR function: `antsrimpute` Arguments --------- data : numpy.ndarray data to impute method : string or float type of imputation method to use Options: mean median constant KNN BiScaler NuclearNormMinimization SoftImpute IterativeSVD value : scalar (optional) optional arguments for different methods if method == 'constant' constant value if method == 'KNN' number of nearest neighbors to use nan_value : scalar value which is interpreted as a missing value Returns ------- ndarray if ndarray was given OR pd.DataFrame if pd.DataFrame was given Example ------- >>> import ants >>> import numpy as np >>> data = np.random.randn(4,10) >>> data[2,3] = np.nan >>> data[3,5] = np.nan >>> data_imputed = ants.impute(data, 'mean') Details ------- KNN: Nearest neighbor imputations which weights samples using the mean squared difference on features for which two rows both have observed data. SoftImpute: Matrix completion by iterative soft thresholding of SVD decompositions. Inspired by the softImpute package for R, which is based on Spectral Regularization Algorithms for Learning Large Incomplete Matrices by Mazumder et. al. IterativeSVD: Matrix completion by iterative low-rank SVD decomposition. Should be similar to SVDimpute from Missing value estimation methods for DNA microarrays by Troyanskaya et. al. MICE: Reimplementation of Multiple Imputation by Chained Equations. MatrixFactorization: Direct factorization of the incomplete matrix into low-rank U and V, with an L1 sparsity penalty on the elements of U and an L2 penalty on the elements of V. Solved by gradient descent. NuclearNormMinimization: Simple implementation of Exact Matrix Completion via Convex Optimization by Emmanuel Candes and Benjamin Recht using cvxpy. Too slow for large matrices. BiScaler: Iterative estimation of row/column means and standard deviations to get doubly normalized matrix. Not guaranteed to converge but works well in practice. Taken from Matrix Completion and Low-Rank SVD via Fast Alternating Least Squares.
def impute(data, method='mean', value=None, nan_value=np.nan): """ Impute missing values on a numpy ndarray in a column-wise manner. ANTsR function: `antsrimpute` Arguments --------- data : numpy.ndarray data to impute method : string or float type of imputation method to use Options: mean median constant KNN BiScaler NuclearNormMinimization SoftImpute IterativeSVD value : scalar (optional) optional arguments for different methods if method == 'constant' constant value if method == 'KNN' number of nearest neighbors to use nan_value : scalar value which is interpreted as a missing value Returns ------- ndarray if ndarray was given OR pd.DataFrame if pd.DataFrame was given Example ------- >>> import ants >>> import numpy as np >>> data = np.random.randn(4,10) >>> data[2,3] = np.nan >>> data[3,5] = np.nan >>> data_imputed = ants.impute(data, 'mean') Details ------- KNN: Nearest neighbor imputations which weights samples using the mean squared difference on features for which two rows both have observed data. SoftImpute: Matrix completion by iterative soft thresholding of SVD decompositions. Inspired by the softImpute package for R, which is based on Spectral Regularization Algorithms for Learning Large Incomplete Matrices by Mazumder et. al. IterativeSVD: Matrix completion by iterative low-rank SVD decomposition. Should be similar to SVDimpute from Missing value estimation methods for DNA microarrays by Troyanskaya et. al. MICE: Reimplementation of Multiple Imputation by Chained Equations. MatrixFactorization: Direct factorization of the incomplete matrix into low-rank U and V, with an L1 sparsity penalty on the elements of U and an L2 penalty on the elements of V. Solved by gradient descent. NuclearNormMinimization: Simple implementation of Exact Matrix Completion via Convex Optimization by Emmanuel Candes and Benjamin Recht using cvxpy. Too slow for large matrices. BiScaler: Iterative estimation of row/column means and standard deviations to get doubly normalized matrix. Not guaranteed to converge but works well in practice. Taken from Matrix Completion and Low-Rank SVD via Fast Alternating Least Squares. """ _fancyimpute_options = {'KNN', 'BiScaler', 'NuclearNormMinimization', 'SoftImpute', 'IterativeSVD'} if (not has_fancyimpute) and (method in _fancyimpute_options): raise ValueError('You must install `fancyimpute` (pip install fancyimpute) to use this method') _base_options = {'mean', 'median', 'constant'} if (method not in _base_options) and (method not in _fancyimpute_options) and (not isinstance(method, (int,float))): raise ValueError('method not understood.. Use `mean`, `median`, a scalar, or an option from `fancyimpute`') X_incomplete = data.copy() if method == 'KNN': if value is None: value = 3 X_filled = KNN(k=value, verbose=False).complete(X_incomplete) elif method == 'BiScaler': X_filled = BiScaler(verbose=False).fit_transform(X_incomplete) elif method == 'SoftImpute': X_filled = SoftImpute(verbose=False).complete(X_incomplete) elif method == 'IterativeSVD': if value is None: rank = min(10, X_incomplete.shape[0]-2) else: rank = value X_filled = IterativeSVD(rank=rank, verbose=False).complete(X_incomplete) elif method == 'mean': col_means = np.nanmean(X_incomplete, axis=0) for i in range(X_incomplete.shape[1]): X_incomplete[:,i][np.isnan(X_incomplete[:,i])] = col_means[i] X_filled = X_incomplete elif method == 'median': col_means = np.nanmean(X_incomplete, axis=0) for i in range(X_incomplete.shape[1]): X_incomplete[:,i][np.isnan(X_incomplete[:,i])] = col_means[i] X_filled = X_incomplete elif method == 'constant': if value is None: raise ValueError('Must give `value` argument if method == constant') X_incomplete[np.isnan(X_incomplete)] = value X_filled = X_incomplete return X_filled
Resample image by spacing or number of voxels with various interpolators. Works with multi-channel images. ANTsR function: `resampleImage` Arguments --------- image : ANTsImage input image resample_params : tuple/list vector of size dimension with numeric values use_voxels : boolean True means interpret resample params as voxel counts interp_type : integer one of 0 (linear), 1 (nearest neighbor), 2 (gaussian), 3 (windowed sinc), 4 (bspline) Returns ------- ANTsImage Example ------- >>> import ants >>> fi = ants.image_read( ants.get_ants_data("r16")) >>> finn = ants.resample_image(fi,(50,60),True,0) >>> filin = ants.resample_image(fi,(1.5,1.5),False,1)
def resample_image(image, resample_params, use_voxels=False, interp_type=1): """ Resample image by spacing or number of voxels with various interpolators. Works with multi-channel images. ANTsR function: `resampleImage` Arguments --------- image : ANTsImage input image resample_params : tuple/list vector of size dimension with numeric values use_voxels : boolean True means interpret resample params as voxel counts interp_type : integer one of 0 (linear), 1 (nearest neighbor), 2 (gaussian), 3 (windowed sinc), 4 (bspline) Returns ------- ANTsImage Example ------- >>> import ants >>> fi = ants.image_read( ants.get_ants_data("r16")) >>> finn = ants.resample_image(fi,(50,60),True,0) >>> filin = ants.resample_image(fi,(1.5,1.5),False,1) """ if image.components == 1: inimage = image.clone('float') outimage = image.clone('float') rsampar = 'x'.join([str(rp) for rp in resample_params]) args = [image.dimension, inimage, outimage, rsampar, int(use_voxels), interp_type] processed_args = utils._int_antsProcessArguments(args) libfn = utils.get_lib_fn('ResampleImage') libfn(processed_args) outimage = outimage.clone(image.pixeltype) return outimage else: raise ValueError('images with more than 1 component not currently supported')
Resample image by using another image as target reference. This function uses ants.apply_transform with an identity matrix to achieve proper resampling. ANTsR function: `resampleImageToTarget` Arguments --------- image : ANTsImage image to resample target : ANTsImage image of reference, the output will be in this space interp_type : string Choice of interpolator. Supports partial matching. linear nearestNeighbor multiLabel for label images but genericlabel is preferred gaussian bSpline cosineWindowedSinc welchWindowedSinc hammingWindowedSinc lanczosWindowedSinc genericLabel use this for label images imagetype : integer choose 0/1/2/3 mapping to scalar/vector/tensor/time-series verbose : boolean print command and run verbose application of transform. kwargs : keyword arguments additional arugment passed to antsApplyTransforms C code Returns ------- ANTsImage Example ------- >>> import ants >>> fi = ants.image_read(ants.get_ants_data('r16')) >>> fi2mm = ants.resample_image(fi, (2,2), use_voxels=0, interp_type='linear') >>> resampled = ants.resample_image_to_target(fi2mm, fi, verbose=True)
def resample_image_to_target(image, target, interp_type='linear', imagetype=0, verbose=False, **kwargs): """ Resample image by using another image as target reference. This function uses ants.apply_transform with an identity matrix to achieve proper resampling. ANTsR function: `resampleImageToTarget` Arguments --------- image : ANTsImage image to resample target : ANTsImage image of reference, the output will be in this space interp_type : string Choice of interpolator. Supports partial matching. linear nearestNeighbor multiLabel for label images but genericlabel is preferred gaussian bSpline cosineWindowedSinc welchWindowedSinc hammingWindowedSinc lanczosWindowedSinc genericLabel use this for label images imagetype : integer choose 0/1/2/3 mapping to scalar/vector/tensor/time-series verbose : boolean print command and run verbose application of transform. kwargs : keyword arguments additional arugment passed to antsApplyTransforms C code Returns ------- ANTsImage Example ------- >>> import ants >>> fi = ants.image_read(ants.get_ants_data('r16')) >>> fi2mm = ants.resample_image(fi, (2,2), use_voxels=0, interp_type='linear') >>> resampled = ants.resample_image_to_target(fi2mm, fi, verbose=True) """ fixed = target moving = image compose = None transformlist = 'identity' interpolator = interp_type interpolator_oldoptions = ("linear", "nearestNeighbor", "gaussian", "cosineWindowedSinc", "bSpline") if isinstance(interp_type, int): interpolator = interpolator_oldoptions[interp_type] accepted_interpolators = {"linear", "nearestNeighbor", "multiLabel", "gaussian", "bSpline", "cosineWindowedSinc", "welchWindowedSinc", "hammingWindowedSinc", "lanczosWindowedSinc", "genericLabel"} if interpolator not in accepted_interpolators: raise ValueError('interpolator not supported - see %s' % accepted_interpolators) args = [fixed, moving, transformlist, interpolator] if not isinstance(fixed, str): if isinstance(fixed, iio.ANTsImage) and isinstance(moving, iio.ANTsImage): inpixeltype = fixed.pixeltype warpedmovout = moving.clone() f = fixed m = moving if (moving.dimension == 4) and (fixed.dimension==3) and (imagetype==0): raise ValueError('Set imagetype 3 to transform time series images.') wmo = warpedmovout mytx = ['-t', 'identity'] if compose is None: args = ['-d', fixed.dimension, '-i', m, '-o', wmo, '-r', f, '-n', interpolator] + mytx tfn = '%scomptx.nii.gz' % compose if compose is not None else 'NA' if compose is not None: mycompo = '[%s,1]' % tfn args = ['-d', fixed.dimension, '-i', m, '-o', mycompo, '-r', f, '-n', interpolator] + mytx myargs = utils._int_antsProcessArguments(args) # NO CLUE WHAT THIS DOES OR WHY IT'S NEEDED for jj in range(len(myargs)): if myargs[jj] is not None: if myargs[jj] == '-': myargs2 = [None]*(len(myargs)-1) myargs2[:(jj-1)] = myargs[:(jj-1)] myargs2[jj:(len(myargs)-1)] = myargs[(jj+1):(len(myargs))] myargs = myargs2 myverb = int(verbose) processed_args = myargs + ['-z', str(1), '-v', str(myverb), '--float', str(1), '-e', str(imagetype)] libfn = utils.get_lib_fn('antsApplyTransforms') libfn(processed_args) if compose is None: return warpedmovout.clone(inpixeltype) else: if os.path.exists(tfn): return tfn else: return None else: return 1 else: processed_args = myargs + ['-z', str(1), '--float', str(1), '-e', str(imagetype)] libfn = utils.get_lib_fn('antsApplyTransforms') libfn(processed_args)
Apply ANTsTransform to data ANTsR function: `applyAntsrTransform` Arguments --------- transform : ANTsTransform transform to apply to image data : ndarray/list/tuple data to which transform will be applied data_type : string type of data Options : 'point' 'vector' 'image' reference : ANTsImage target space for transforming image kwargs : kwargs additional options passed to `apply_ants_transform_to_image` Returns ------- ANTsImage if data_type == 'point' OR tuple if data_type == 'point' or data_type == 'vector'
def apply_ants_transform(transform, data, data_type="point", reference=None, **kwargs): """ Apply ANTsTransform to data ANTsR function: `applyAntsrTransform` Arguments --------- transform : ANTsTransform transform to apply to image data : ndarray/list/tuple data to which transform will be applied data_type : string type of data Options : 'point' 'vector' 'image' reference : ANTsImage target space for transforming image kwargs : kwargs additional options passed to `apply_ants_transform_to_image` Returns ------- ANTsImage if data_type == 'point' OR tuple if data_type == 'point' or data_type == 'vector' """ return transform.apply(data, data_type, reference, **kwargs)