idx int64 0 251k | question stringlengths 53 3.53k | target stringlengths 5 1.23k | len_question int64 20 893 | len_target int64 3 238 |
|---|---|---|---|---|
242,300 | def get_output_docs ( self ) : if not self . parsed : self . _parse ( ) lst = [ ] for e in self . docs_list : lst . append ( e [ 'docs' ] . get_raw_docs ( ) ) return lst | Return the output docstrings once formatted | 59 | 7 |
242,301 | def compute_before_after ( self ) : if not self . parsed : self . _parse ( ) list_from = self . input_lines list_to = [ ] last = 0 for e in self . docs_list : start , end = e [ 'location' ] if start <= 0 : start , end = - start , - end list_to . extend ( list_from [ last : start + 1 ] ) else : list_to . extend ( list_f... | Compute the list of lines before and after the proposed docstring changes . | 191 | 15 |
242,302 | def diff ( self , source_path = '' , target_path = '' , which = - 1 ) : list_from , list_to = self . compute_before_after ( ) if source_path . startswith ( os . sep ) : source_path = source_path [ 1 : ] if source_path and not source_path . endswith ( os . sep ) : source_path += os . sep if target_path . startswith ( os... | Build the diff between original docstring and proposed docstring . | 227 | 12 |
242,303 | def get_patch_lines ( self , source_path , target_path ) : diff = self . diff ( source_path , target_path ) return [ "# Patch generated by Pyment v{0}\n\n" . format ( __version__ ) ] + diff | Return the diff between source_path and target_path | 58 | 11 |
242,304 | def write_patch_file ( self , patch_file , lines_to_write ) : with open ( patch_file , 'w' ) as f : f . writelines ( lines_to_write ) | Write lines_to_write to a the file called patch_file | 45 | 14 |
242,305 | def overwrite_source_file ( self , lines_to_write ) : tmp_filename = '{0}.writing' . format ( self . input_file ) ok = False try : with open ( tmp_filename , 'w' ) as fh : fh . writelines ( lines_to_write ) ok = True finally : if ok : if platform . system ( ) == 'Windows' : self . _windows_rename ( tmp_filename ) else ... | overwrite the file with line_to_write | 125 | 10 |
242,306 | def by_own_time_per_call ( stat ) : return ( - stat . own_time_per_call if stat . own_hits else - stat . own_time , by_deep_time_per_call ( stat ) ) | Sorting by exclusive elapsed time per call in descending order . | 54 | 12 |
242,307 | def result ( self ) : try : cpu_time = max ( 0 , time . clock ( ) - self . _cpu_time_started ) wall_time = max ( 0 , time . time ( ) - self . _wall_time_started ) except AttributeError : cpu_time = wall_time = 0.0 return self . stats , cpu_time , wall_time | Gets the frozen statistics to serialize by Pickle . | 82 | 12 |
242,308 | def dump ( self , dump_filename , pickle_protocol = pickle . HIGHEST_PROTOCOL ) : result = self . result ( ) with open ( dump_filename , 'wb' ) as f : pickle . dump ( ( self . __class__ , result ) , f , pickle_protocol ) | Saves the profiling result to a file | 71 | 8 |
242,309 | def make_viewer ( self , title = None , at = None ) : viewer = StatisticsViewer ( ) viewer . set_profiler_class ( self . __class__ ) stats , cpu_time , wall_time = self . result ( ) viewer . set_result ( stats , cpu_time , wall_time , title = title , at = at ) viewer . activate ( ) return viewer | Makes a statistics viewer from the profiling result . | 85 | 10 |
242,310 | def pack_msg ( method , msg , pickle_protocol = PICKLE_PROTOCOL ) : dump = io . BytesIO ( ) pickle . dump ( msg , dump , pickle_protocol ) size = dump . tell ( ) return ( struct . pack ( METHOD_STRUCT_FORMAT , method ) + struct . pack ( SIZE_STRUCT_FORMAT , size ) + dump . getvalue ( ) ) | Packs a method and message . | 95 | 7 |
242,311 | def recv ( sock , size ) : data = sock . recv ( size , socket . MSG_WAITALL ) if len ( data ) < size : raise socket . error ( ECONNRESET , 'Connection closed' ) return data | Receives exactly size bytes . This function blocks the thread . | 52 | 13 |
242,312 | def recv_msg ( sock ) : data = recv ( sock , struct . calcsize ( METHOD_STRUCT_FORMAT ) ) method , = struct . unpack ( METHOD_STRUCT_FORMAT , data ) data = recv ( sock , struct . calcsize ( SIZE_STRUCT_FORMAT ) ) size , = struct . unpack ( SIZE_STRUCT_FORMAT , data ) data = recv ( sock , size ) msg = pickle . loads ( d... | Receives a method and message from the socket . This function blocks the current thread . | 111 | 18 |
242,313 | def connected ( self , client ) : self . clients . add ( client ) self . _log_connected ( client ) self . _start_watching ( client ) self . send_msg ( client , WELCOME , ( self . pickle_protocol , __version__ ) , pickle_protocol = 0 ) profiler = self . profiler while True : try : profiler = profiler . profiler except A... | Call this method when a client connected . | 195 | 8 |
242,314 | def disconnected ( self , client ) : if client not in self . clients : # already disconnected. return self . clients . remove ( client ) self . _log_disconnected ( client ) self . _close ( client ) | Call this method when a client disconnected . | 46 | 8 |
242,315 | def get_mark ( self ) : if self . is_leaf : char = self . icon_chars [ 2 ] else : char = self . icon_chars [ int ( self . expanded ) ] return urwid . SelectableIcon ( ( 'mark' , char ) , 0 ) | Gets an expanded collapsed or leaf icon . | 62 | 9 |
242,316 | def get_path ( self ) : path = deque ( ) __ , node = self . get_focus ( ) while not node . is_root ( ) : stats = node . get_value ( ) path . appendleft ( hash ( stats ) ) node = node . get_parent ( ) return path | Gets the path to the focused statistics . Each step is a hash of statistics object . | 65 | 18 |
242,317 | def find_node ( self , node , path ) : for hash_value in path : if isinstance ( node , LeafStatisticsNode ) : break for stats in node . get_child_keys ( ) : if hash ( stats ) == hash_value : node = node . get_child_node ( stats ) break else : break return node | Finds a node by the given path from the given node . | 72 | 13 |
242,318 | def update_result ( self ) : try : if self . paused : result = self . _paused_result else : result = self . _final_result except AttributeError : self . table . update_frame ( ) return stats , cpu_time , wall_time , title , at = result self . table . set_result ( stats , cpu_time , wall_time , title , at ) | Updates the result on the table . | 86 | 8 |
242,319 | def option_getter ( type ) : option_getters = { None : ConfigParser . get , int : ConfigParser . getint , float : ConfigParser . getfloat , bool : ConfigParser . getboolean } return option_getters . get ( type , option_getters [ None ] ) | Gets an unbound method to get a configuration option as the given type . | 65 | 16 |
242,320 | def config_default ( option , default = None , type = None , section = cli . name ) : def f ( option = option , default = default , type = type , section = section ) : config = read_config ( ) if type is None and default is not None : # detect type from default. type = builtins . type ( default ) get_option = option_ge... | Guesses a default value of a CLI option from the configuration . | 114 | 13 |
242,321 | def config_flag ( option , value , default = False , section = cli . name ) : class x ( object ) : def __bool__ ( self , option = option , value = value , default = default , section = section ) : config = read_config ( ) type = builtins . type ( value ) get_option = option_getter ( type ) try : return get_option ( con... | Guesses whether a CLI flag should be turned on or off from the configuration . If the configuration option value is same with the given value it returns True . | 118 | 31 |
242,322 | def get_title ( src_name , src_type = None ) : if src_type == 'tcp' : return '{0}:{1}' . format ( * src_name ) return os . path . basename ( src_name ) | Normalizes a source name as a string to be used for viewer s title . | 55 | 16 |
242,323 | def spawn_thread ( func , * args , * * kwargs ) : thread = threading . Thread ( target = func , args = args , kwargs = kwargs ) thread . daemon = True thread . start ( ) return thread | Spawns a daemon thread . | 52 | 6 |
242,324 | def spawn ( mode , func , * args , * * kwargs ) : if mode is None : # 'threading' is the default mode. mode = 'threading' elif mode not in spawn . modes : # validate the given mode. raise ValueError ( 'Invalid spawn mode: %s' % mode ) if mode == 'threading' : return spawn_thread ( func , * args , * * kwargs ) elif mode... | Spawns a thread - like object which runs the given function concurrently . | 195 | 14 |
242,325 | def profile ( script , argv , profiler_factory , pickle_protocol , dump_filename , mono ) : filename , code , globals_ = script sys . argv [ : ] = [ filename ] + list ( argv ) __profile__ ( filename , code , globals_ , profiler_factory , pickle_protocol = pickle_protocol , dump_filename = dump_filename , mono = mono ) | Profile a Python script . | 95 | 5 |
242,326 | def live_profile ( script , argv , profiler_factory , interval , spawn , signum , pickle_protocol , mono ) : filename , code , globals_ = script sys . argv [ : ] = [ filename ] + list ( argv ) parent_sock , child_sock = socket . socketpair ( ) stderr_r_fd , stderr_w_fd = os . pipe ( ) pid = os . fork ( ) if pid : # par... | Profile a Python script continuously . | 641 | 6 |
242,327 | def view ( src , mono ) : src_type , src_name = src title = get_title ( src_name , src_type ) viewer , loop = make_viewer ( mono ) if src_type == 'dump' : time = datetime . fromtimestamp ( os . path . getmtime ( src_name ) ) with open ( src_name , 'rb' ) as f : profiler_class , ( stats , cpu_time , wall_time ) = pickle... | Inspect statistics by TUI view . | 248 | 8 |
242,328 | def timeit_profile ( stmt , number , repeat , setup , profiler_factory , pickle_protocol , dump_filename , mono , * * _ignored ) : del _ignored globals_ = { } exec_ ( setup , globals_ ) if number is None : # determine number so that 0.2 <= total time < 2.0 like timeit. dummy_profiler = profiler_factory ( ) dummy_profil... | Profile a Python statement like timeit . | 246 | 8 |
242,329 | def spread_stats ( stats , spreader = False ) : spread = spread_t ( ) if spreader else True descendants = deque ( stats ) while descendants : _stats = descendants . popleft ( ) if spreader : spread . clear ( ) yield _stats , spread else : yield _stats if spread : descendants . extend ( _stats ) | Iterates all descendant statistics under the given root statistics . | 74 | 11 |
242,330 | def own_time ( self ) : sub_time = sum ( stats . deep_time for stats in self ) return max ( 0. , self . deep_time - sub_time ) | The exclusive execution time . | 40 | 5 |
242,331 | def flatten ( cls , stats ) : flat_children = { } for _stats in spread_stats ( stats ) : key = ( _stats . name , _stats . filename , _stats . lineno , _stats . module ) try : flat_stats = flat_children [ key ] except KeyError : flat_stats = flat_children [ key ] = cls ( * key ) flat_stats . own_hits += _stats . own_hit... | Makes a flat statistics from the given statistics . | 207 | 10 |
242,332 | def requirements ( filename ) : with open ( filename ) as f : return [ x . strip ( ) for x in f . readlines ( ) if x . strip ( ) ] | Reads requirements from a file . | 37 | 7 |
242,333 | def sample ( self , frame ) : frames = self . frame_stack ( frame ) if frames : frames . pop ( ) parent_stats = self . stats for f in frames : parent_stats = parent_stats . ensure_child ( f . f_code , void ) stats = parent_stats . ensure_child ( frame . f_code , RecordingStatistics ) stats . own_hits += 1 | Samples the given frame . | 85 | 6 |
242,334 | def deferral ( ) : deferred = [ ] defer = lambda f , * a , * * k : deferred . append ( ( f , a , k ) ) try : yield defer finally : while deferred : f , a , k = deferred . pop ( ) f ( * a , * * k ) | Defers a function call when it is being required like Go . | 63 | 13 |
242,335 | def start ( self , * args , * * kwargs ) : if self . is_running ( ) : raise RuntimeError ( 'Already started' ) self . _running = self . run ( * args , * * kwargs ) try : yielded = next ( self . _running ) except StopIteration : raise TypeError ( 'run() must yield just one time' ) if yielded is not None : raise TypeErro... | Starts the instance . | 100 | 5 |
242,336 | def stop ( self ) : if not self . is_running ( ) : raise RuntimeError ( 'Not started' ) running , self . _running = self . _running , None try : next ( running ) except StopIteration : # expected. pass else : raise TypeError ( 'run() must yield just one time' ) | Stops the instance . | 69 | 5 |
242,337 | def sockets ( self ) : if self . listener is None : return self . clients else : return self . clients . union ( [ self . listener ] ) | Returns the set of the sockets . | 32 | 7 |
242,338 | def select_sockets ( self , timeout = None ) : if timeout is not None : t = time . time ( ) while True : try : ready , __ , __ = select . select ( self . sockets ( ) , ( ) , ( ) , timeout ) except ValueError : # there's fd=0 socket. pass except select . error as exc : # ignore an interrupted system call. if exc . args ... | EINTR safe version of select . It focuses on just incoming sockets . | 147 | 15 |
242,339 | def dispatch_sockets ( self , timeout = None ) : for sock in self . select_sockets ( timeout = timeout ) : if sock is self . listener : listener = sock sock , addr = listener . accept ( ) self . connected ( sock ) else : try : sock . recv ( 1 ) except socket . error as exc : if exc . errno != ECONNRESET : raise self . ... | Dispatches incoming sockets . | 90 | 6 |
242,340 | def record_entering ( self , time , code , frame_key , parent_stats ) : stats = parent_stats . ensure_child ( code , RecordingStatistics ) self . _times_entered [ ( code , frame_key ) ] = time stats . own_hits += 1 | Entered to a function call . | 62 | 7 |
242,341 | def record_leaving ( self , time , code , frame_key , parent_stats ) : try : stats = parent_stats . get_child ( code ) time_entered = self . _times_entered . pop ( ( code , frame_key ) ) except KeyError : return time_elapsed = time - time_entered stats . deep_time += max ( 0 , time_elapsed ) | Left from a function call . | 89 | 6 |
242,342 | def build_sink ( function : Callable [ ... , None ] = None , * , unpack : bool = False ) : def _build_sink ( function : Callable [ ... , None ] ) : @ wraps ( function ) def _wrapper ( * args , * * kwargs ) -> Sink : if 'unpack' in kwargs : raise TypeError ( '"unpack" has to be defined by decorator' ) return Sink ( func... | Decorator to wrap a function to return a Sink subscriber . | 138 | 14 |
242,343 | def build_map ( function : Callable [ [ Any ] , Any ] = None , unpack : bool = False ) : def _build_map ( function : Callable [ [ Any ] , Any ] ) : @ wraps ( function ) def _wrapper ( * args , * * kwargs ) -> Map : if 'unpack' in kwargs : raise TypeError ( '"unpack" has to be defined by decorator' ) return Map ( functi... | Decorator to wrap a function to return a Map operator . | 134 | 13 |
242,344 | def _trace_handler ( publisher , value , label = None ) : line = '--- %8.3f: ' % ( time ( ) - Trace . _timestamp_start ) line += repr ( publisher ) if label is None else label line += ' %r' % ( value , ) print ( line ) | Default trace handler is printing the timestamp the publisher name and the emitted value | 67 | 14 |
242,345 | def build_sink_async ( coro = None , * , mode = None , unpack : bool = False ) : _mode = mode def _build_sink_async ( coro ) : @ wraps ( coro ) def _wrapper ( * args , mode = None , * * kwargs ) -> SinkAsync : if 'unpack' in kwargs : raise TypeError ( '"unpack" has to be defined by decorator' ) if mode is None : mode =... | Decorator to wrap a coroutine to return a SinkAsync subscriber . | 178 | 16 |
242,346 | def build_accumulate ( function : Callable [ [ Any , Any ] , Tuple [ Any , Any ] ] = None , * , init : Any = NONE ) : _init = init def _build_accumulate ( function : Callable [ [ Any , Any ] , Tuple [ Any , Any ] ] ) : @ wraps ( function ) def _wrapper ( init = NONE ) -> Accumulate : init = _init if init is NONE else i... | Decorator to wrap a function to return an Accumulate operator . | 157 | 15 |
242,347 | def resolve_meta_key ( hub , key , meta ) : if key not in meta : return None value = meta [ key ] if isinstance ( value , str ) and value [ 0 ] == '>' : topic = value [ 1 : ] if topic not in hub : raise KeyError ( 'topic %s not found in hub' % topic ) return hub [ topic ] . get ( ) return value | Resolve a value when it s a string and starts with > | 86 | 13 |
242,348 | def checked_emit ( self , value : Any ) -> asyncio . Future : if not isinstance ( self . _subject , Subscriber ) : raise TypeError ( 'Topic %r has to be a subscriber' % self . _path ) value = self . cast ( value ) self . check ( value ) return self . _subject . emit ( value , who = self ) | Casting and checking in one call | 81 | 7 |
242,349 | def add_datatype ( self , name : str , datatype : DT ) : self . _datatypes [ name ] = datatype | Register the datatype with it s name | 33 | 9 |
242,350 | def cast ( self , topic , value ) : datatype_key = topic . meta . get ( 'datatype' , 'none' ) result = self . _datatypes [ datatype_key ] . cast ( topic , value ) validate_dt = topic . meta . get ( 'validate' , None ) if validate_dt : result = self . _datatypes [ validate_dt ] . cast ( topic , result ) return result | Cast a string to the value based on the datatype | 98 | 12 |
242,351 | def check ( self , topic , value ) : datatype_key = topic . meta . get ( 'datatype' , 'none' ) self . _datatypes [ datatype_key ] . check ( topic , value ) validate_dt = topic . meta . get ( 'validate' , None ) if validate_dt : self . _datatypes [ validate_dt ] . check ( topic , value ) | Checking the value if it fits into the given specification | 92 | 11 |
242,352 | def flush ( self ) : self . notify ( tuple ( self . _queue ) ) self . _queue . clear ( ) | Emits the current queue and clears the queue | 26 | 9 |
242,353 | def _periodic_callback ( self ) : try : self . notify ( self . _state ) # emit to all subscribers except Exception : # pylint: disable=broad-except self . _error_callback ( * sys . exc_info ( ) ) if self . _subscriptions : # if there are still subscriptions register next _periodic callback self . _call_later_handle = s... | Will be started on first emit | 126 | 6 |
242,354 | def build_reduce ( function : Callable [ [ Any , Any ] , Any ] = None , * , init : Any = NONE ) : _init = init def _build_reduce ( function : Callable [ [ Any , Any ] , Any ] ) : @ wraps ( function ) def _wrapper ( init = NONE ) -> Reduce : init = _init if init is NONE else init if init is NONE : raise TypeError ( 'ini... | Decorator to wrap a function to return a Reduce operator . | 135 | 13 |
242,355 | def flush ( self ) : if not self . _emit_partial and len ( self . _state ) != self . _state . maxlen : self . notify ( tuple ( self . _state ) ) self . _state . clear ( ) | Flush the queue - this will emit the current queue | 52 | 11 |
242,356 | def build_map_async ( coro = None , * , mode = None , unpack : bool = False ) : _mode = mode def _build_map_async ( coro ) : @ wraps ( coro ) def _wrapper ( * args , mode = None , * * kwargs ) -> MapAsync : if 'unpack' in kwargs : raise TypeError ( '"unpack" has to be defined by decorator' ) if mode is None : mode = MO... | Decorator to wrap a coroutine to return a MapAsync operator . | 172 | 15 |
242,357 | def _future_done ( self , future ) : try : # notify the subscribers (except result is an exception or NONE) result = future . result ( ) # may raise exception if result is not NONE : self . notify ( result ) # may also raise exception except asyncio . CancelledError : return except Exception : # pylint: disable=broad-e... | Will be called when the coroutine is done | 149 | 9 |
242,358 | def _run_coro ( self , value ) : # when LAST_DISTINCT is used only start coroutine when value changed if self . _options . mode is MODE . LAST_DISTINCT and value == self . _last_emit : self . _future = None return # store the value to be emitted for LAST_DISTINCT self . _last_emit = value # publish the start of the cor... | Start the coroutine as task | 205 | 6 |
242,359 | def build_filter ( predicate : Callable [ [ Any ] , bool ] = None , * , unpack : bool = False ) : def _build_filter ( predicate : Callable [ [ Any ] , bool ] ) : @ wraps ( predicate ) def _wrapper ( * args , * * kwargs ) -> Filter : if 'unpack' in kwargs : raise TypeError ( '"unpack" has to be defined by decorator' ) r... | Decorator to wrap a function to return a Filter operator . | 136 | 13 |
242,360 | def apply_operator_overloading ( ) : # operator overloading is (unfortunately) not working for the following # cases: # int, float, str - should return appropriate type instead of a Publisher # len - should return an integer # "x in y" - is using __bool__ which is not working with Publisher for method in ( '__lt__' , '... | Function to apply operator overloading to Publisher class | 756 | 9 |
242,361 | def assign ( self , subject ) : if not isinstance ( subject , ( Publisher , Subscriber ) ) : raise TypeError ( 'Assignee has to be Publisher or Subscriber' ) # check if not already assigned if self . _subject is not None : raise SubscriptionError ( 'Topic %r already assigned' % self . _path ) self . _subject = subject ... | Assigns the given subject to the topic | 172 | 9 |
242,362 | def freeze ( self , freeze : bool = True ) : for topic in self . _topics . values ( ) : topic . freeze ( ) self . _frozen = freeze | Freezing the hub means that each topic has to be assigned and no new topics can be created after this point . | 37 | 23 |
242,363 | def reset ( self ) : if self . _call_later_handler is not None : self . _call_later_handler . cancel ( ) self . _call_later_handler = None self . _wait_done_cb ( ) | Reseting duration for throttling | 51 | 6 |
242,364 | def build_map_threaded ( function : Callable [ [ Any ] , Any ] = None , mode = MODE . CONCURRENT , unpack : bool = False ) : _mode = mode def _build_map_threaded ( function : Callable [ [ Any ] , Any ] ) : @ wraps ( function ) def _wrapper ( * args , mode = None , * * kwargs ) -> MapThreaded : if 'unpack' in kwargs : r... | Decorator to wrap a function to return a MapThreaded operator . | 190 | 15 |
242,365 | async def _thread_coro ( self , * args ) : return await self . _loop . run_in_executor ( self . _executor , self . _function , * args ) | Coroutine called by MapAsync . It s wrapping the call of run_in_executor to run the synchronous function as thread | 43 | 27 |
242,366 | def reset ( self ) : if self . _retrigger_value is not NONE : self . notify ( self . _retrigger_value ) self . _state = self . _retrigger_value self . _next_state = self . _retrigger_value if self . _call_later_handler : self . _call_later_handler . cancel ( ) self . _call_later_handler = None | Reset the debounce time | 93 | 6 |
242,367 | def subscribe ( self , subscriber : 'Subscriber' , prepend : bool = False ) -> SubscriptionDisposable : # `subscriber in self._subscriptions` is not working because # tuple.__contains__ is using __eq__ which is overwritten and returns # a new publisher - not helpful here if any ( subscriber is s for s in self . _subscr... | Subscribing the given subscriber . | 140 | 7 |
242,368 | def unsubscribe ( self , subscriber : 'Subscriber' ) -> None : # here is a special implementation which is replacing the more # obvious one: self._subscriptions.remove(subscriber) - this will not # work because list.remove(x) is doing comparision for equality. # Applied to publishers this will return another publisher ... | Unsubscribe the given subscriber | 128 | 6 |
242,369 | def inherit_type ( self , type_cls : Type [ TInherit ] ) -> Union [ TInherit , 'Publisher' ] : self . _inherited_type = type_cls return self | enables the usage of method and attribute overloading for this publisher . | 48 | 14 |
242,370 | def _move_tuple_axes_first ( array , axis ) : # Figure out how many axes we are operating over naxis = len ( axis ) # Add remaining axes to the axis tuple axis += tuple ( i for i in range ( array . ndim ) if i not in axis ) # The new position of each axis is just in order destination = tuple ( range ( array . ndim ) ) ... | Bottleneck can only take integer axis not tuple so this function takes all the axes to be operated on and combines them into the first dimension of the array so that we can then use axis = 0 | 205 | 39 |
242,371 | def _nanmean ( array , axis = None ) : if isinstance ( axis , tuple ) : array = _move_tuple_axes_first ( array , axis = axis ) axis = 0 return bottleneck . nanmean ( array , axis = axis ) | Bottleneck nanmean function that handle tuple axis . | 55 | 10 |
242,372 | def _nanmedian ( array , axis = None ) : if isinstance ( axis , tuple ) : array = _move_tuple_axes_first ( array , axis = axis ) axis = 0 return bottleneck . nanmedian ( array , axis = axis ) | Bottleneck nanmedian function that handle tuple axis . | 57 | 11 |
242,373 | def _nanstd ( array , axis = None , ddof = 0 ) : if isinstance ( axis , tuple ) : array = _move_tuple_axes_first ( array , axis = axis ) axis = 0 return bottleneck . nanstd ( array , axis = axis , ddof = ddof ) | Bottleneck nanstd function that handle tuple axis . | 66 | 10 |
242,374 | def sigma_clip ( data , sigma = 3 , sigma_lower = None , sigma_upper = None , maxiters = 5 , cenfunc = 'median' , stdfunc = 'std' , axis = None , masked = True , return_bounds = False , copy = True ) : sigclip = SigmaClip ( sigma = sigma , sigma_lower = sigma_lower , sigma_upper = sigma_upper , maxiters = maxiters , ce... | Perform sigma - clipping on the provided data . | 155 | 11 |
242,375 | def sigma_clipped_stats ( data , mask = None , mask_value = None , sigma = 3.0 , sigma_lower = None , sigma_upper = None , maxiters = 5 , cenfunc = 'median' , stdfunc = 'std' , std_ddof = 0 , axis = None ) : if mask is not None : data = np . ma . MaskedArray ( data , mask ) if mask_value is not None : data = np . ma . ... | Calculate sigma - clipped statistics on the provided data . | 341 | 13 |
242,376 | def _sigmaclip_noaxis ( self , data , masked = True , return_bounds = False , copy = True ) : filtered_data = data . ravel ( ) # remove masked values and convert to ndarray if isinstance ( filtered_data , np . ma . MaskedArray ) : filtered_data = filtered_data . data [ ~ filtered_data . mask ] # remove invalid values g... | Sigma clip the data when axis is None . | 381 | 10 |
242,377 | def _sigmaclip_withaxis ( self , data , axis = None , masked = True , return_bounds = False , copy = True ) : # float array type is needed to insert nans into the array filtered_data = data . astype ( float ) # also makes a copy # remove invalid values bad_mask = ~ np . isfinite ( filtered_data ) if np . any ( bad_mask... | Sigma clip the data when axis is specified . | 636 | 10 |
242,378 | def do_photometry ( self , data , error = None , mask = None , method = 'exact' , subpixels = 5 , unit = None ) : data = np . asanyarray ( data ) if mask is not None : mask = np . asanyarray ( mask ) data = copy . deepcopy ( data ) # do not modify input data data [ mask ] = 0 if error is not None : # do not modify inpu... | Perform aperture photometry on the input data . | 359 | 10 |
242,379 | def _to_sky_params ( self , wcs , mode = 'all' ) : sky_params = { } x , y = np . transpose ( self . positions ) sky_params [ 'positions' ] = pixel_to_skycoord ( x , y , wcs , mode = mode ) # The aperture object must have a single value for each shape # parameter so we must use a single pixel scale for all positions. # ... | Convert the pixel aperture parameters to those for a sky aperture . | 304 | 13 |
242,380 | def _to_pixel_params ( self , wcs , mode = 'all' ) : pixel_params = { } x , y = skycoord_to_pixel ( self . positions , wcs , mode = mode ) pixel_params [ 'positions' ] = np . array ( [ x , y ] ) . transpose ( ) # The aperture object must have a single value for each shape # parameter so we must use a single pixel scale... | Convert the sky aperture parameters to those for a pixel aperture . | 355 | 13 |
242,381 | def source_properties ( data , segment_img , error = None , mask = None , background = None , filter_kernel = None , wcs = None , labels = None ) : if not isinstance ( segment_img , SegmentationImage ) : segment_img = SegmentationImage ( segment_img ) if segment_img . shape != data . shape : raise ValueError ( 'segment... | Calculate photometry and morphological properties of sources defined by a labeled segmentation image . | 320 | 19 |
242,382 | def _properties_table ( obj , columns = None , exclude_columns = None ) : # default properties columns_all = [ 'id' , 'xcentroid' , 'ycentroid' , 'sky_centroid' , 'sky_centroid_icrs' , 'source_sum' , 'source_sum_err' , 'background_sum' , 'background_mean' , 'background_at_centroid' , 'xmin' , 'xmax' , 'ymin' , 'ymax' ,... | Construct a ~astropy . table . QTable of source properties from a SourceProperties or SourceCatalog object . | 510 | 23 |
242,383 | def _total_mask ( self ) : mask = self . _segment_mask | self . _data_mask if self . _input_mask is not None : mask |= self . _input_mask return mask | Combination of the _segment_mask _input_mask and _data_mask . | 47 | 19 |
242,384 | def to_table ( self , columns = None , exclude_columns = None ) : return _properties_table ( self , columns = columns , exclude_columns = exclude_columns ) | Create a ~astropy . table . QTable of properties . | 41 | 13 |
242,385 | def data_cutout_ma ( self ) : return np . ma . masked_array ( self . _data [ self . _slice ] , mask = self . _total_mask ) | A 2D ~numpy . ma . MaskedArray cutout from the data . | 40 | 18 |
242,386 | def error_cutout_ma ( self ) : if self . _error is None : return None else : return np . ma . masked_array ( self . _error [ self . _slice ] , mask = self . _total_mask ) | A 2D ~numpy . ma . MaskedArray cutout from the input error image . | 52 | 20 |
242,387 | def background_cutout_ma ( self ) : if self . _background is None : return None else : return np . ma . masked_array ( self . _background [ self . _slice ] , mask = self . _total_mask ) | A 2D ~numpy . ma . MaskedArray cutout from the input background . | 52 | 19 |
242,388 | def coords ( self ) : yy , xx = np . nonzero ( self . data_cutout_ma ) return ( yy + self . _slice [ 0 ] . start , xx + self . _slice [ 1 ] . start ) | A tuple of two ~numpy . ndarray containing the y and x pixel coordinates of unmasked pixels within the source segment . | 53 | 28 |
242,389 | def sky_centroid ( self ) : if self . _wcs is not None : return pixel_to_skycoord ( self . xcentroid . value , self . ycentroid . value , self . _wcs , origin = 0 ) else : return None | The sky coordinates of the centroid within the source segment returned as a ~astropy . coordinates . SkyCoord object . | 55 | 25 |
242,390 | def sky_bbox_ll ( self ) : if self . _wcs is not None : return pixel_to_skycoord ( self . xmin . value - 0.5 , self . ymin . value - 0.5 , self . _wcs , origin = 0 ) else : return None | The sky coordinates of the lower - left vertex of the minimal bounding box of the source segment returned as a ~astropy . coordinates . SkyCoord object . | 63 | 33 |
242,391 | def sky_bbox_ul ( self ) : if self . _wcs is not None : return pixel_to_skycoord ( self . xmin . value - 0.5 , self . ymax . value + 0.5 , self . _wcs , origin = 0 ) else : return None | The sky coordinates of the upper - left vertex of the minimal bounding box of the source segment returned as a ~astropy . coordinates . SkyCoord object . | 63 | 33 |
242,392 | def sky_bbox_lr ( self ) : if self . _wcs is not None : return pixel_to_skycoord ( self . xmax . value + 0.5 , self . ymin . value - 0.5 , self . _wcs , origin = 0 ) else : return None | The sky coordinates of the lower - right vertex of the minimal bounding box of the source segment returned as a ~astropy . coordinates . SkyCoord object . | 63 | 33 |
242,393 | def sky_bbox_ur ( self ) : if self . _wcs is not None : return pixel_to_skycoord ( self . xmax . value + 0.5 , self . ymax . value + 0.5 , self . _wcs , origin = 0 ) else : return None | The sky coordinates of the upper - right vertex of the minimal bounding box of the source segment returned as a ~astropy . coordinates . SkyCoord object . | 63 | 33 |
242,394 | def min_value ( self ) : if self . _is_completely_masked : return np . nan * self . _data_unit else : return np . min ( self . values ) | The minimum pixel value of the data within the source segment . | 41 | 12 |
242,395 | def max_value ( self ) : if self . _is_completely_masked : return np . nan * self . _data_unit else : return np . max ( self . values ) | The maximum pixel value of the data within the source segment . | 41 | 12 |
242,396 | def source_sum ( self ) : if self . _is_completely_masked : return np . nan * self . _data_unit # table output needs unit else : return np . sum ( self . values ) | The sum of the unmasked data values within the source segment . | 46 | 14 |
242,397 | def source_sum_err ( self ) : if self . _error is not None : if self . _is_completely_masked : return np . nan * self . _error_unit # table output needs unit else : return np . sqrt ( np . sum ( self . _error_values ** 2 ) ) else : return None | The uncertainty of ~photutils . SourceProperties . source_sum propagated from the input error array . | 72 | 22 |
242,398 | def background_sum ( self ) : if self . _background is not None : if self . _is_completely_masked : return np . nan * self . _background_unit # unit for table else : return np . sum ( self . _background_values ) else : return None | The sum of background values within the source segment . | 61 | 10 |
242,399 | def background_mean ( self ) : if self . _background is not None : if self . _is_completely_masked : return np . nan * self . _background_unit # unit for table else : return np . mean ( self . _background_values ) else : return None | The mean of background values within the source segment . | 61 | 10 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.